Skip to content

Commit

Permalink
uses PurePosixPath when preparing paths for DMSS instead of Path, to …
Browse files Browse the repository at this point in the history
…ensure os independent path creation for DMSS.
  • Loading branch information
l3abak committed Nov 25, 2024
1 parent 32b460c commit 9b340f4
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 23 deletions.
6 changes: 3 additions & 3 deletions dm_cli/bin/dm
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#! /usr/bin/env python
#!C:\dev\dm-app-simpos\git\.py13env\Scripts\python.exe
import io
import os
from pathlib import Path
from pathlib import Path, PurePosixPath
from typing import Optional, List
from zipfile import ZipFile
import emoji
Expand Down Expand Up @@ -66,7 +66,7 @@ def create_lookup(name: Annotated[str, typer.Argument(help="Name of the lookup (
Create a named Ui-/StorageRecipe-lookup-table from all RecipeLinks in a package existing in DMSS (requires admin privileges).
"""
# TODO change type of paths argument to be list of str to avoid this path as strings conversion below
paths_as_strings = [str(path) for path in paths]
paths_as_strings = [str(PurePosixPath(path)) for path in paths]
print(f"Creating lookup table from paths: {paths_as_strings}")
dmss_exception_wrapper(dmss_api.create_lookup, recipe_package=paths_as_strings, application=name)

Expand Down
8 changes: 4 additions & 4 deletions dm_cli/command_group/data_source.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from pathlib import Path
from pathlib import PurePosixPath, Path

import emoji
import typer
Expand Down Expand Up @@ -96,7 +96,7 @@ def initialize_data_source(
The remote packages in a data source will be deleted before new data sources are imported.
"""
# Check for presence of expected directories, 'data_sources' and 'data'
data_sources_dir, data_dir = get_app_dir_structure(Path(path))
data_sources_dir, data_dir = get_app_dir_structure(PurePosixPath(path))

data_source_definitions = get_json_files_in_dir(data_sources_dir)
if not data_source_definitions:
Expand All @@ -111,7 +111,7 @@ def initialize_data_source(
def import_data_source_file(
data_sources_dir: str, data_dir: str, data_source_definition_filename: str, resolve_local_ids: bool
):
data_source_definition_filepath = Path(data_sources_dir).joinpath(data_source_definition_filename)
data_source_definition_filepath = PurePosixPath(data_sources_dir).joinpath(data_source_definition_filename)
data_source_name = data_source_definition_filename.replace(".json", "")

data_source_data_dir = data_dir / data_source_name
Expand Down Expand Up @@ -156,7 +156,7 @@ def reset_data_source(
"""
Reset a single data source (deletes and re-uploads root-packages)
"""
app_dir = Path(path)
app_dir = PurePosixPath(path)
if not app_dir.is_dir():
raise FileNotFoundError(f"The path '{path}' is not a directory.")

Expand Down
8 changes: 4 additions & 4 deletions dm_cli/import_entity.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import io
import json
from json import JSONDecodeError
from pathlib import Path
from pathlib import PurePosixPath, Path
from zipfile import ZipFile

from requests import Response
Expand Down Expand Up @@ -58,12 +58,12 @@ def import_document(source_path: Path, destination: str, document: dict):


def import_single_entity(source_path: Path, destination: str, validate: bool = False):
ensure_package_structure(Path(destination))
ensure_package_structure(PurePosixPath(destination))
print(f"Importing ENTITY '{source_path.name}' --> '{destination}'")

try: # Load the JSON document
with open(source_path, "r") as fh:
if Path(source_path).suffix == ".json":
if PurePosixPath(source_path).suffix == ".json":
content = json.load(fh)
if validate:
print(f"Validating {source_path}", end="")
Expand Down Expand Up @@ -95,7 +95,7 @@ def import_folder_entity(
raw_package_import: bool = False,
resolve_local_ids: bool = False,
) -> dict:
destination_path = Path(destination)
destination_path = PurePosixPath(destination)

# Check if target already exists on remote. Then delete or raise exception
target = f"{destination}/{source_path.name}"
Expand Down
10 changes: 5 additions & 5 deletions dm_cli/import_package.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import io
import json
from json import JSONDecodeError
from pathlib import Path
from pathlib import PurePosixPath, Path
from typing import Dict, List
from uuid import uuid4

Expand Down Expand Up @@ -42,7 +42,7 @@ def add_object_to_package(path: Path, package: Package, object: io.BytesIO) -> N
package.content.append(sub_folder)

new_path = str(path).split("/", 1)[1] # Remove first element in path before stepping down
return add_object_to_package(Path(new_path), sub_folder, object)
return add_object_to_package(PurePosixPath(new_path), sub_folder, object)


def add_file_to_package(path: Path, package: Package, document: dict) -> None:
Expand All @@ -61,7 +61,7 @@ def add_file_to_package(path: Path, package: Package, document: dict) -> None:
package.content.append(sub_folder)

new_path = str(path).split("/", 1)[1] # Remove first element in path before stepping down
return add_file_to_package(Path(new_path), sub_folder, document)
return add_file_to_package(PurePosixPath(new_path), sub_folder, document)


def add_package_to_package(path: Path, package: Package) -> None:
Expand All @@ -75,7 +75,7 @@ def add_package_to_package(path: Path, package: Package) -> None:
package.content.append(sub_folder)

new_path = str(path).split("/", 1)[1] # Remove first element in path before stepping down
return add_package_to_package(Path(new_path), sub_folder)
return add_package_to_package(PurePosixPath(new_path), sub_folder)


def import_package_tree(package: Package, destination: str, raw_package_import: bool, resolve_local_ids: bool) -> None:
Expand Down Expand Up @@ -116,7 +116,7 @@ def import_package_content(package: Package, data_source: str, destination: str,

def upload_global_file(address: str) -> str:
"""Handling uploading of global files."""
filepath = Path(address)
filepath = PurePosixPath(address)
if not filepath.is_file():
raise ApplicationException(
f"Tried to upload file with address '{address}'. The file was not found", data=package.to_dict()
Expand Down
14 changes: 7 additions & 7 deletions dm_cli/package_tree_from_zip.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import io
import json
from json import JSONDecodeError
from pathlib import Path
from pathlib import PurePosixPath, Path
from typing import Dict, Union
from zipfile import ZipFile

Expand Down Expand Up @@ -64,20 +64,20 @@ def package_tree_from_zip(
if file_info.is_dir():
if filename == "": # Skip rootPackage
continue
add_package_to_package(Path(filename), root_package)
add_package_to_package(PurePosixPath(filename), root_package)
continue
if Path(filename).suffix != ".json":
if PurePosixPath(filename).suffix != ".json":
file_like = io.BytesIO(zip_file.read(f"{folder_name}/{filename}"))
file_like.name = Path(filename).name # stem
file_like.destination = Path(f"/{destination}/{folder_name}/{filename}").parent
add_object_to_package(Path(filename), root_package, file_like)
file_like.name = PurePosixPath(filename).name # stem
file_like.destination = PurePosixPath(f"/{destination}/{folder_name}/{filename}").parent
add_object_to_package(PurePosixPath(filename), root_package, file_like)
continue
try:
json_doc = json.loads(zip_file.read(f"{folder_name}/{filename}"))
except JSONDecodeError:
raise Exception(f"Failed to load the file '{filename}' as a JSON document")

add_file_to_package(Path(filename), root_package, json_doc)
add_file_to_package(PurePosixPath(filename), root_package, json_doc)

# Add dependencies from entity to the global dependencies list
dependencies = concat_dependencies(
Expand Down
1 change: 1 addition & 0 deletions dm_cli/utils/reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def resolve_reference(
return resolve_dependency(reference, dependencies)
if reference[0] == ".":
normalized_dotted_ref: str = normpath(f"{file_path}/{reference}")
normalized_dotted_ref = Path(normalized_dotted_ref).as_posix()
return f"dmss://{destination}/{normalized_dotted_ref}"
if reference[0] == "/":
data_source = destination.split("/")[0]
Expand Down

0 comments on commit 9b340f4

Please sign in to comment.