diff --git a/docker/dockerfiles/Dockerfile.onnx.lambda b/docker/dockerfiles/Dockerfile.onnx.lambda index 9ef86db9b..74c64cf63 100644 --- a/docker/dockerfiles/Dockerfile.onnx.lambda +++ b/docker/dockerfiles/Dockerfile.onnx.lambda @@ -71,6 +71,7 @@ ENV ALLOW_NON_HTTPS_URL_INPUT=False ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False ENV CORE_MODEL_TROCR_ENABLED=false +ENV USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS=False WORKDIR ${LAMBDA_TASK_ROOT} RUN rm -rf /build diff --git a/docker/dockerfiles/Dockerfile.onnx.lambda.slim b/docker/dockerfiles/Dockerfile.onnx.lambda.slim index b6891d560..c2d76de76 100644 --- a/docker/dockerfiles/Dockerfile.onnx.lambda.slim +++ b/docker/dockerfiles/Dockerfile.onnx.lambda.slim @@ -66,6 +66,7 @@ ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False ENV CORE_MODEL_TROCR_ENABLED=false ENV ENABLE_WORKFLOWS_PROFILING=True +ENV USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS=False WORKDIR ${LAMBDA_TASK_ROOT} diff --git a/inference/core/env.py b/inference/core/env.py index cfc559a67..d5a083d9f 100644 --- a/inference/core/env.py +++ b/inference/core/env.py @@ -436,3 +436,6 @@ WORKFLOWS_DEFINITION_CACHE_EXPIRY = int( os.getenv("WORKFLOWS_DEFINITION_CACHE_EXPIRY", 15 * 60) ) +USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS = str2bool( + os.getenv("USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS", "True") +) diff --git a/inference/core/roboflow_api.py b/inference/core/roboflow_api.py index 3fba809df..44a1f553c 100644 --- a/inference/core/roboflow_api.py +++ b/inference/core/roboflow_api.py @@ -22,6 +22,7 @@ from inference.core.env import ( API_BASE_URL, MODEL_CACHE_DIR, + USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS, WORKFLOWS_DEFINITION_CACHE_EXPIRY, ) from inference.core.exceptions import ( @@ -391,21 +392,35 @@ def get_roboflow_labeling_jobs( return _get_from_url(url=api_url) -def get_workflow_cache_file(workspace_id: WorkspaceID, workflow_id: str): +def get_workflow_cache_file( + workspace_id: WorkspaceID, workflow_id: str, api_key: str +) -> str: sanitized_workspace_id = sanitize_path_segment(workspace_id) sanitized_workflow_id = sanitize_path_segment(workflow_id) - return os.path.join( - MODEL_CACHE_DIR, - "workflow", - sanitized_workspace_id, - f"{sanitized_workflow_id}.json", + api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() + prefix = os.path.abspath(os.path.join(MODEL_CACHE_DIR, "workflow")) + result = os.path.abspath( + os.path.join( + prefix, + sanitized_workspace_id, + f"{sanitized_workflow_id}_{api_key_hash}.json", + ) ) + if not result.startswith(prefix): + raise ValueError( + "Detected attempt to save workflow definition in insecure location" + ) + return result def cache_workflow_response( - workspace_id: WorkspaceID, workflow_id: str, response: dict + workspace_id: WorkspaceID, workflow_id: str, api_key: str, response: dict ): - workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id) + workflow_cache_file = get_workflow_cache_file( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + ) workflow_cache_dir = os.path.dirname(workflow_cache_file) if not os.path.exists(workflow_cache_dir): os.makedirs(workflow_cache_dir, exist_ok=True) @@ -414,24 +429,40 @@ def cache_workflow_response( def delete_cached_workflow_response_if_exists( - workspace_id: WorkspaceID, workflow_id: str + workspace_id: WorkspaceID, + workflow_id: str, + api_key: str, ) -> None: - workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id) + workflow_cache_file = get_workflow_cache_file( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + ) if os.path.exists(workflow_cache_file): os.remove(workflow_cache_file) def load_cached_workflow_response( - workspace_id: WorkspaceID, workflow_id: str + workspace_id: WorkspaceID, + workflow_id: str, + api_key: str, ) -> Optional[dict]: - workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id) + workflow_cache_file = get_workflow_cache_file( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + ) if not os.path.exists(workflow_cache_file): return None try: with open(workflow_cache_file, "r") as f: return json.load(f) except: - delete_cached_workflow_response_if_exists(workspace_id, workflow_id) + delete_cached_workflow_response_if_exists( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + ) @wrap_roboflow_api_errors() @@ -458,12 +489,21 @@ def get_workflow_specification( ) try: response = _get_from_url(url=api_url) - if use_cache: - cache_workflow_response(workspace_id, workflow_id, response) + if USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS: + cache_workflow_response( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + response=response, + ) except (requests.exceptions.ConnectionError, ConnectionError) as error: - if not use_cache: + if not USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS: raise error - response = load_cached_workflow_response(workspace_id, workflow_id) + response = load_cached_workflow_response( + workspace_id=workspace_id, + workflow_id=workflow_id, + api_key=api_key, + ) if response is None: raise error if "workflow" not in response or "config" not in response["workflow"]: diff --git a/inference/core/version.py b/inference/core/version.py index f3c0a588e..309f0166c 100644 --- a/inference/core/version.py +++ b/inference/core/version.py @@ -1,4 +1,4 @@ -__version__ = "0.22.0rc1" +__version__ = "0.22.0" if __name__ == "__main__": diff --git a/inference/core/workflows/core_steps/analytics/path_deviation/v1.py b/inference/core/workflows/core_steps/analytics/path_deviation/v1.py index 4d2fbc228..276e27caf 100644 --- a/inference/core/workflows/core_steps/analytics/path_deviation/v1.py +++ b/inference/core/workflows/core_steps/analytics/path_deviation/v1.py @@ -45,6 +45,10 @@ class PathDeviationManifest(WorkflowBlockManifest): "long_description": LONG_DESCRIPTION, "license": "Apache-2.0", "block_type": "analytics", + "ui_manifest": { + "section": "video", + "icon": "far fa-tower-observation", + }, } ) type: Literal["roboflow_core/path_deviation_analytics@v1"] diff --git a/inference/core/workflows/core_steps/classical_cv/size_measurement/v1.py b/inference/core/workflows/core_steps/classical_cv/size_measurement/v1.py index 905fdebca..7ee90ea86 100644 --- a/inference/core/workflows/core_steps/classical_cv/size_measurement/v1.py +++ b/inference/core/workflows/core_steps/classical_cv/size_measurement/v1.py @@ -47,6 +47,11 @@ class SizeMeasurementManifest(WorkflowBlockManifest): "long_description": LONG_DESCRIPTION, "license": "Apache-2.0", "block_type": "transformation", + "ui_manifest": { + "section": "classical_cv", + "icon": "far fa-ruler", + "opencv": True, + }, } ) type: Literal[f"roboflow_core/size_measurement@v1"] diff --git a/inference/core/workflows/core_steps/models/foundation/google_vision_ocr/v1.py b/inference/core/workflows/core_steps/models/foundation/google_vision_ocr/v1.py index 12948d139..7f6273a6a 100644 --- a/inference/core/workflows/core_steps/models/foundation/google_vision_ocr/v1.py +++ b/inference/core/workflows/core_steps/models/foundation/google_vision_ocr/v1.py @@ -53,6 +53,10 @@ class BlockManifest(WorkflowBlockManifest): "long_description": LONG_DESCRIPTION, "license": "Apache-2.0", "block_type": "model", + "ui_manifest": { + "section": "model", + "icon": "far fa-google", + }, }, protected_namespaces=(), ) diff --git a/inference/core/workflows/core_steps/models/foundation/stability_ai/inpainting/v1.py b/inference/core/workflows/core_steps/models/foundation/stability_ai/inpainting/v1.py index d9b42254f..8ae57f5b4 100644 --- a/inference/core/workflows/core_steps/models/foundation/stability_ai/inpainting/v1.py +++ b/inference/core/workflows/core_steps/models/foundation/stability_ai/inpainting/v1.py @@ -57,6 +57,10 @@ class BlockManifest(WorkflowBlockManifest): "inpainting", "image generation", ], + "ui_manifest": { + "section": "model", + "icon": "far fa-palette", + }, } ) type: Literal["roboflow_core/stability_ai_inpainting@v1"] diff --git a/inference/core/workflows/core_steps/transformations/stitch_images/v1.py b/inference/core/workflows/core_steps/transformations/stitch_images/v1.py index 65f68a3f6..6cc6c431b 100644 --- a/inference/core/workflows/core_steps/transformations/stitch_images/v1.py +++ b/inference/core/workflows/core_steps/transformations/stitch_images/v1.py @@ -39,6 +39,11 @@ class BlockManifest(WorkflowBlockManifest): "long_description": LONG_DESCRIPTION, "license": "Apache-2.0", "block_type": "transformation", + "ui_manifest": { + "section": "transformation", + "icon": "far fa-object-union", + "opencv": True, + }, } ) type: Literal["roboflow_core/stitch_images@v1"] diff --git a/tests/inference/unit_tests/core/test_roboflow_api.py b/tests/inference/unit_tests/core/test_roboflow_api.py index 10b9507e7..83f454e06 100644 --- a/tests/inference/unit_tests/core/test_roboflow_api.py +++ b/tests/inference/unit_tests/core/test_roboflow_api.py @@ -1694,7 +1694,11 @@ def test_get_workflow_specification_when_connection_error_occurs_and_no_cache_to get_mock: MagicMock, ) -> None: # given - delete_cached_workflow_response_if_exists("my_workspace", "some_workflow") + delete_cached_workflow_response_if_exists( + workspace_id="my_workspace", + workflow_id="some_workflow", + api_key="my_api_key", + ) get_mock.side_effect = ConnectionError() # when @@ -1712,7 +1716,11 @@ def test_get_workflow_specification_when_connection_error_occurs_but_file_is_cac get_mock: MagicMock, ) -> None: # given - delete_cached_workflow_response_if_exists("my_workspace", "some_workflow") + delete_cached_workflow_response_if_exists( + workspace_id="my_workspace", + workflow_id="some_workflow", + api_key="my_api_key", + ) get_mock.return_value = MagicMock( status_code=200, json=MagicMock( @@ -1744,7 +1752,11 @@ def test_get_workflow_specification_when_consecutive_request_hits_ephemeral_cach get_mock: MagicMock, ) -> None: # given - delete_cached_workflow_response_if_exists("my_workspace", "some_workflow") + delete_cached_workflow_response_if_exists( + workspace_id="my_workspace", + workflow_id="some_workflow", + api_key="my_api_key", + ) get_mock.return_value = MagicMock( status_code=200, json=MagicMock(