Skip to content

Commit

Permalink
Merge pull request #723 from roboflow/feature/add_metadata_to_new_blocks
Browse files Browse the repository at this point in the history
Add metadata for new blocks
  • Loading branch information
PawelPeczek-Roboflow authored Oct 4, 2024
2 parents b9e4074 + 2ad85f0 commit c70cc32
Show file tree
Hide file tree
Showing 11 changed files with 100 additions and 21 deletions.
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ ENV ALLOW_NON_HTTPS_URL_INPUT=False
ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False
ENV CORE_MODEL_TROCR_ENABLED=false
ENV USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS=False

WORKDIR ${LAMBDA_TASK_ROOT}
RUN rm -rf /build
Expand Down
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda.slim
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False
ENV CORE_MODEL_TROCR_ENABLED=false
ENV ENABLE_WORKFLOWS_PROFILING=True
ENV USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS=False

WORKDIR ${LAMBDA_TASK_ROOT}

Expand Down
3 changes: 3 additions & 0 deletions inference/core/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,3 +436,6 @@
WORKFLOWS_DEFINITION_CACHE_EXPIRY = int(
os.getenv("WORKFLOWS_DEFINITION_CACHE_EXPIRY", 15 * 60)
)
USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS = str2bool(
os.getenv("USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS", "True")
)
74 changes: 57 additions & 17 deletions inference/core/roboflow_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from inference.core.env import (
API_BASE_URL,
MODEL_CACHE_DIR,
USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS,
WORKFLOWS_DEFINITION_CACHE_EXPIRY,
)
from inference.core.exceptions import (
Expand Down Expand Up @@ -391,21 +392,35 @@ def get_roboflow_labeling_jobs(
return _get_from_url(url=api_url)


def get_workflow_cache_file(workspace_id: WorkspaceID, workflow_id: str):
def get_workflow_cache_file(
workspace_id: WorkspaceID, workflow_id: str, api_key: str
) -> str:
sanitized_workspace_id = sanitize_path_segment(workspace_id)
sanitized_workflow_id = sanitize_path_segment(workflow_id)
return os.path.join(
MODEL_CACHE_DIR,
"workflow",
sanitized_workspace_id,
f"{sanitized_workflow_id}.json",
api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest()
prefix = os.path.abspath(os.path.join(MODEL_CACHE_DIR, "workflow"))
result = os.path.abspath(
os.path.join(
prefix,
sanitized_workspace_id,
f"{sanitized_workflow_id}_{api_key_hash}.json",
)
)
if not result.startswith(prefix):
raise ValueError(
"Detected attempt to save workflow definition in insecure location"
)
return result


def cache_workflow_response(
workspace_id: WorkspaceID, workflow_id: str, response: dict
workspace_id: WorkspaceID, workflow_id: str, api_key: str, response: dict
):
workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id)
workflow_cache_file = get_workflow_cache_file(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
)
workflow_cache_dir = os.path.dirname(workflow_cache_file)
if not os.path.exists(workflow_cache_dir):
os.makedirs(workflow_cache_dir, exist_ok=True)
Expand All @@ -414,24 +429,40 @@ def cache_workflow_response(


def delete_cached_workflow_response_if_exists(
workspace_id: WorkspaceID, workflow_id: str
workspace_id: WorkspaceID,
workflow_id: str,
api_key: str,
) -> None:
workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id)
workflow_cache_file = get_workflow_cache_file(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
)
if os.path.exists(workflow_cache_file):
os.remove(workflow_cache_file)


def load_cached_workflow_response(
workspace_id: WorkspaceID, workflow_id: str
workspace_id: WorkspaceID,
workflow_id: str,
api_key: str,
) -> Optional[dict]:
workflow_cache_file = get_workflow_cache_file(workspace_id, workflow_id)
workflow_cache_file = get_workflow_cache_file(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
)
if not os.path.exists(workflow_cache_file):
return None
try:
with open(workflow_cache_file, "r") as f:
return json.load(f)
except:
delete_cached_workflow_response_if_exists(workspace_id, workflow_id)
delete_cached_workflow_response_if_exists(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
)


@wrap_roboflow_api_errors()
Expand All @@ -458,12 +489,21 @@ def get_workflow_specification(
)
try:
response = _get_from_url(url=api_url)
if use_cache:
cache_workflow_response(workspace_id, workflow_id, response)
if USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS:
cache_workflow_response(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
response=response,
)
except (requests.exceptions.ConnectionError, ConnectionError) as error:
if not use_cache:
if not USE_FILE_CACHE_FOR_WORKFLOWS_DEFINITIONS:
raise error
response = load_cached_workflow_response(workspace_id, workflow_id)
response = load_cached_workflow_response(
workspace_id=workspace_id,
workflow_id=workflow_id,
api_key=api_key,
)
if response is None:
raise error
if "workflow" not in response or "config" not in response["workflow"]:
Expand Down
2 changes: 1 addition & 1 deletion inference/core/version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.22.0rc1"
__version__ = "0.22.0"


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ class PathDeviationManifest(WorkflowBlockManifest):
"long_description": LONG_DESCRIPTION,
"license": "Apache-2.0",
"block_type": "analytics",
"ui_manifest": {
"section": "video",
"icon": "far fa-tower-observation",
},
}
)
type: Literal["roboflow_core/path_deviation_analytics@v1"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@ class SizeMeasurementManifest(WorkflowBlockManifest):
"long_description": LONG_DESCRIPTION,
"license": "Apache-2.0",
"block_type": "transformation",
"ui_manifest": {
"section": "classical_cv",
"icon": "far fa-ruler",
"opencv": True,
},
}
)
type: Literal[f"roboflow_core/size_measurement@v1"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ class BlockManifest(WorkflowBlockManifest):
"long_description": LONG_DESCRIPTION,
"license": "Apache-2.0",
"block_type": "model",
"ui_manifest": {
"section": "model",
"icon": "far fa-google",
},
},
protected_namespaces=(),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ class BlockManifest(WorkflowBlockManifest):
"inpainting",
"image generation",
],
"ui_manifest": {
"section": "model",
"icon": "far fa-palette",
},
}
)
type: Literal["roboflow_core/stability_ai_inpainting@v1"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@ class BlockManifest(WorkflowBlockManifest):
"long_description": LONG_DESCRIPTION,
"license": "Apache-2.0",
"block_type": "transformation",
"ui_manifest": {
"section": "transformation",
"icon": "far fa-object-union",
"opencv": True,
},
}
)
type: Literal["roboflow_core/stitch_images@v1"]
Expand Down
18 changes: 15 additions & 3 deletions tests/inference/unit_tests/core/test_roboflow_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1694,7 +1694,11 @@ def test_get_workflow_specification_when_connection_error_occurs_and_no_cache_to
get_mock: MagicMock,
) -> None:
# given
delete_cached_workflow_response_if_exists("my_workspace", "some_workflow")
delete_cached_workflow_response_if_exists(
workspace_id="my_workspace",
workflow_id="some_workflow",
api_key="my_api_key",
)
get_mock.side_effect = ConnectionError()

# when
Expand All @@ -1712,7 +1716,11 @@ def test_get_workflow_specification_when_connection_error_occurs_but_file_is_cac
get_mock: MagicMock,
) -> None:
# given
delete_cached_workflow_response_if_exists("my_workspace", "some_workflow")
delete_cached_workflow_response_if_exists(
workspace_id="my_workspace",
workflow_id="some_workflow",
api_key="my_api_key",
)
get_mock.return_value = MagicMock(
status_code=200,
json=MagicMock(
Expand Down Expand Up @@ -1744,7 +1752,11 @@ def test_get_workflow_specification_when_consecutive_request_hits_ephemeral_cach
get_mock: MagicMock,
) -> None:
# given
delete_cached_workflow_response_if_exists("my_workspace", "some_workflow")
delete_cached_workflow_response_if_exists(
workspace_id="my_workspace",
workflow_id="some_workflow",
api_key="my_api_key",
)
get_mock.return_value = MagicMock(
status_code=200,
json=MagicMock(
Expand Down

0 comments on commit c70cc32

Please sign in to comment.