|
15 | 15 | """ Generates C/C++ source code capable of performing inference for a model. """
|
16 | 16 |
|
17 | 17 | import os
|
18 |
| -import pathlib |
19 | 18 |
|
20 | 19 | from absl import app
|
21 | 20 | from absl import flags
|
22 | 21 | from collections.abc import Sequence
|
23 | 22 |
|
24 | 23 | from tflite_micro.codegen import inference_generator
|
25 | 24 | from tflite_micro.codegen import graph
|
| 25 | +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils |
26 | 26 |
|
27 | 27 | # Usage information:
|
28 | 28 | # Default:
|
|
48 | 48 | "'model' basename."),
|
49 | 49 | required=False)
|
50 | 50 |
|
51 |
| -_QUIET = flags.DEFINE_bool( |
52 |
| - name="quiet", |
53 |
| - default=False, |
54 |
| - help="Suppress informational output (e.g., for use in for build system)", |
55 |
| - required=False) |
56 |
| - |
57 | 51 |
|
58 | 52 | def main(argv: Sequence[str]) -> None:
|
59 |
| - if _QUIET.value: |
60 |
| - restore = os.environ.get("TF_CPP_MIN_LOG_LEVEL", "0") |
61 |
| - os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" |
62 |
| - from tflite_micro.tensorflow.lite.tools import flatbuffer_utils |
63 |
| - os.environ["TF_CPP_MIN_LOG_LEVEL"] = restore |
64 |
| - else: |
65 |
| - from tflite_micro.tensorflow.lite.tools import flatbuffer_utils |
66 |
| - |
67 | 53 | output_dir = _OUTPUT_DIR.value or os.path.dirname(_MODEL_PATH.value)
|
68 | 54 | output_name = _OUTPUT_NAME.value or os.path.splitext(
|
69 | 55 | os.path.basename(_MODEL_PATH.value))[0]
|
70 | 56 |
|
71 | 57 | model = flatbuffer_utils.read_model(_MODEL_PATH.value)
|
72 | 58 |
|
73 |
| - if not _QUIET.value: |
74 |
| - print("Generating inference code for model: {}".format(_MODEL_PATH.value)) |
75 |
| - output_path = pathlib.Path(output_dir) / output_name |
76 |
| - print(f"Generating {output_path}.h") |
77 |
| - print(f"Generating {output_path}.cc") |
| 59 | + print("Generating inference code for model: {}".format(_MODEL_PATH.value)) |
78 | 60 |
|
79 | 61 | inference_generator.generate(output_dir, output_name,
|
80 | 62 | graph.OpCodeTable([model]), graph.Graph(model))
|
|
0 commit comments