-
Notifications
You must be signed in to change notification settings - Fork 615
Fix SAM ONNX export requirements with transformers 4.32, export vision encoder separately #1301
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e293989
c75f8e3
dffba31
7510ce9
9117f11
d7d319b
bb97398
f601c40
7dcfca5
6d76a1e
5b95514
de35bf2
66f882d
1058442
9b0f82b
851208a
fcfc661
70021f1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -36,6 +36,7 @@ | |
| _get_submodels_for_export_stable_diffusion, | ||
| get_decoder_models_for_export, | ||
| get_encoder_decoder_models_for_export, | ||
| get_sam_models_for_export, | ||
| get_stable_diffusion_models_for_export, | ||
| ) | ||
|
|
||
|
|
@@ -61,6 +62,7 @@ def _get_submodels_and_onnx_configs( | |
| monolith: bool, | ||
| custom_onnx_configs: Dict, | ||
| custom_architecture: bool, | ||
| _variant: str, | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why make it a protected parameter?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm thinking to keep it "private" for now, and support it correctly once we move to this API fully instead of |
||
| fn_get_submodels: Optional[Callable] = None, | ||
| preprocessors: Optional[List[Any]] = None, | ||
| ): | ||
|
|
@@ -75,6 +77,12 @@ def _get_submodels_and_onnx_configs( | |
| ) | ||
| onnx_config = onnx_config_constructor(model.config, preprocessors=preprocessors) | ||
|
|
||
| onnx_config.variant = _variant | ||
| all_variants = "\n".join( | ||
| [f"\t- {name}: {description}" for name, description in onnx_config.VARIANTS.items()] | ||
| ) | ||
| logger.info(f"Using the export variant {onnx_config.variant}. Available variants are:\n{all_variants}") | ||
|
|
||
| if ( | ||
| model.config.is_encoder_decoder | ||
| and task.startswith(TasksManager._ENCODER_DECODER_TASKS) | ||
|
|
@@ -83,6 +91,8 @@ def _get_submodels_and_onnx_configs( | |
| models_and_onnx_configs = get_encoder_decoder_models_for_export(model, onnx_config) | ||
| elif task.startswith("text-generation") and not monolith: | ||
| models_and_onnx_configs = get_decoder_models_for_export(model, onnx_config) | ||
| elif model.config.model_type == "sam": | ||
| models_and_onnx_configs = get_sam_models_for_export(model, onnx_config) | ||
| else: | ||
| models_and_onnx_configs = {"model": (model, onnx_config)} | ||
|
|
||
|
|
@@ -156,6 +166,7 @@ def main_export( | |
| custom_onnx_configs: Optional[Dict[str, "OnnxConfig"]] = None, | ||
| fn_get_submodels: Optional[Callable] = None, | ||
| use_subprocess: bool = False, | ||
| _variant: str = "default", | ||
| **kwargs_shapes, | ||
| ): | ||
| """ | ||
|
|
@@ -230,6 +241,8 @@ def main_export( | |
| exporting on CUDA device, where ORT does not release memory at inference session | ||
| destruction. When set to `True`, the `main_export` call should be guarded in | ||
| `if __name__ == "__main__":` block. | ||
| _variant (`str`, defaults to `default`): | ||
| Specify the variant of the ONNX export to use. | ||
| **kwargs_shapes (`Dict`): | ||
| Shapes to use during inference. This argument allows to override the default shapes used during the ONNX export. | ||
|
|
||
|
|
@@ -373,6 +386,7 @@ def main_export( | |
| custom_architecture=custom_architecture, | ||
| fn_get_submodels=fn_get_submodels, | ||
| preprocessors=preprocessors, | ||
| _variant=_variant, | ||
| ) | ||
|
|
||
| if not is_stable_diffusion: | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe add a set of possible choices here.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It would be a bit tricky given that the choices are dynamic (dependent on the onnx config).