diff --git a/onnxruntime/python/tools/transformers/models/gpt2/convert_to_onnx.py b/onnxruntime/python/tools/transformers/models/gpt2/convert_to_onnx.py index a4015f50fdc13..841421a353b07 100644 --- a/onnxruntime/python/tools/transformers/models/gpt2/convert_to_onnx.py +++ b/onnxruntime/python/tools/transformers/models/gpt2/convert_to_onnx.py @@ -21,6 +21,7 @@ import os import shutil import sys +import warnings from pathlib import Path import numpy @@ -243,6 +244,13 @@ def get_latency_name(batch_size, sequence_length, past_sequence_length): def main(argv=None, experiment_name: str = "", run_id: str = "0", csv_filename: str = "gpt2_parity_results.csv"): + warnings.warn( + "This example is deprecated. Use the Olive recipe instead: " + "https://github.com/microsoft/olive-recipes/tree/main", + DeprecationWarning, + stacklevel=2, + ) + result = {} if version.parse(transformers_version) < version.parse( "3.1.0" diff --git a/onnxruntime/python/tools/transformers/models/llama/README.md b/onnxruntime/python/tools/transformers/models/llama/README.md index cd8a8756d681e..eccfb46582fbc 100644 --- a/onnxruntime/python/tools/transformers/models/llama/README.md +++ b/onnxruntime/python/tools/transformers/models/llama/README.md @@ -1,3 +1,5 @@ +> **Deprecated:** This example is deprecated. Use the Olive recipes instead: https://github.com/microsoft/olive-recipes/tree/main + # Contents - [LLaMA-2](#llama-2) - [Prerequisites](#prerequisites) diff --git a/onnxruntime/python/tools/transformers/models/llama/convert_to_onnx.py b/onnxruntime/python/tools/transformers/models/llama/convert_to_onnx.py index 6411dca00b5de..2cb6a733c5bc7 100644 --- a/onnxruntime/python/tools/transformers/models/llama/convert_to_onnx.py +++ b/onnxruntime/python/tools/transformers/models/llama/convert_to_onnx.py @@ -12,6 +12,7 @@ import subprocess import sys import tempfile +import warnings from itertools import chain import onnx @@ -801,6 +802,12 @@ def get_args(): def main(): + warnings.warn( + "This example is deprecated. Use the Olive recipe instead: " + "https://github.com/microsoft/olive-recipes/tree/main", + DeprecationWarning, + stacklevel=2, + ) if version.parse(torch.__version__) < version.parse("2.2.0"): logger.error(f"Detected PyTorch version {torch.__version__}. Please upgrade and use v2.2.0 or newer.") return diff --git a/onnxruntime/python/tools/transformers/models/phi2/README.md b/onnxruntime/python/tools/transformers/models/phi2/README.md index da62bba0f02fb..eab31680e64c7 100644 --- a/onnxruntime/python/tools/transformers/models/phi2/README.md +++ b/onnxruntime/python/tools/transformers/models/phi2/README.md @@ -1,3 +1,5 @@ +> **Deprecated:** This example is deprecated. Use the Olive recipes instead: https://github.com/microsoft/olive-recipes/tree/main + # Phi2 Optimizations ## Prerequisites A Linux machine for [TorchDynamo-based ONNX Exporter](https://pytorch.org/docs/stable/onnx.html#torchdynamo-based-onnx-exporter)\ diff --git a/onnxruntime/python/tools/transformers/models/phi2/convert_to_onnx.py b/onnxruntime/python/tools/transformers/models/phi2/convert_to_onnx.py index dd0accc5dd9e8..ebdb5e32b7184 100644 --- a/onnxruntime/python/tools/transformers/models/phi2/convert_to_onnx.py +++ b/onnxruntime/python/tools/transformers/models/phi2/convert_to_onnx.py @@ -7,6 +7,7 @@ import argparse import logging import os +import warnings from pathlib import Path import onnx @@ -375,6 +376,12 @@ def parse_arguments(): def main(): + warnings.warn( + "This example is deprecated. Use the Olive recipe instead: " + "https://github.com/microsoft/olive-recipes/tree/main", + DeprecationWarning, + stacklevel=2, + ) args = parse_arguments() device = torch.device("cuda", args.device_id) if torch.cuda.is_available() else torch.device("cpu") diff --git a/onnxruntime/python/tools/transformers/models/stable_diffusion/README.md b/onnxruntime/python/tools/transformers/models/stable_diffusion/README.md index 12e6df53de577..4afede881fb93 100644 --- a/onnxruntime/python/tools/transformers/models/stable_diffusion/README.md +++ b/onnxruntime/python/tools/transformers/models/stable_diffusion/README.md @@ -1,3 +1,5 @@ +> **Deprecated:** This example is deprecated. Use the Olive recipes instead: https://github.com/microsoft/olive-recipes/tree/main + # Stable Diffusion GPU Optimization ONNX Runtime uses the following optimizations to speed up Stable Diffusion in CUDA: diff --git a/onnxruntime/python/tools/transformers/models/stable_diffusion/optimize_pipeline.py b/onnxruntime/python/tools/transformers/models/stable_diffusion/optimize_pipeline.py index eb4d7242f72fc..33397cf75e127 100644 --- a/onnxruntime/python/tools/transformers/models/stable_diffusion/optimize_pipeline.py +++ b/onnxruntime/python/tools/transformers/models/stable_diffusion/optimize_pipeline.py @@ -20,6 +20,7 @@ import os import shutil import tempfile +import warnings from pathlib import Path import coloredlogs @@ -569,6 +570,12 @@ def parse_arguments(argv: list[str] | None = None): def main(argv: list[str] | None = None): + warnings.warn( + "This example is deprecated. Use the Olive recipe instead: " + "https://github.com/microsoft/olive-recipes/tree/main", + DeprecationWarning, + stacklevel=2, + ) args = parse_arguments(argv) logger.info("Arguments: %s", str(args)) diff --git a/onnxruntime/python/tools/transformers/models/whisper/README.md b/onnxruntime/python/tools/transformers/models/whisper/README.md index 9056ac07cc286..44a041d789b5d 100644 --- a/onnxruntime/python/tools/transformers/models/whisper/README.md +++ b/onnxruntime/python/tools/transformers/models/whisper/README.md @@ -1,3 +1,5 @@ +> **Deprecated:** This example is deprecated. Use the Olive recipes instead: https://github.com/microsoft/olive-recipes/tree/main + # Whisper ## Prerequisites diff --git a/onnxruntime/python/tools/transformers/models/whisper/convert_to_onnx.py b/onnxruntime/python/tools/transformers/models/whisper/convert_to_onnx.py index 79b508047da55..93b509eec6982 100644 --- a/onnxruntime/python/tools/transformers/models/whisper/convert_to_onnx.py +++ b/onnxruntime/python/tools/transformers/models/whisper/convert_to_onnx.py @@ -7,6 +7,7 @@ import argparse import logging import os +import warnings import onnx import torch @@ -493,6 +494,12 @@ def export_onnx_models( def main(argv=None): + warnings.warn( + "This example is deprecated. Use the Olive recipe instead: " + "https://github.com/microsoft/olive-recipes/tree/main", + DeprecationWarning, + stacklevel=2, + ) args = parse_arguments(argv) setup_logger(args.verbose)