Skip to content

Commit f91dd91

Browse files
authored
build(codegen): suppress noise in console output (#2742)
build(codegen): suppress noise in console output Redirect output from the code_generator binary so that when it's used within the build system, it doesn't print unexpected, distracting noise to the console. Generally, compiler or generator commands don't print output unless there's an error. This reverts "build(codegen): suppress noise in console output (#2708)", commit d249577, which attempted to fix the same problem by adding a --quiet flag. A subsequent upgrade to the Tensorflow Python package caused new noise that wasn't possible to suppress with the flag. BUG=description
1 parent 182c8c7 commit f91dd91

File tree

3 files changed

+20
-23
lines changed

3 files changed

+20
-23
lines changed

codegen/build_def.bzl

+17-3
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,24 @@ def tflm_inference_library(
1818
native.genrule(
1919
name = generated_target,
2020
srcs = [tflite_model],
21-
outs = [name + ".h", name + ".cc"],
21+
outs = [
22+
name + ".h",
23+
name + ".cc",
24+
name + ".log",
25+
],
2226
tools = ["//codegen:code_generator"],
23-
cmd = "$(location //codegen:code_generator) --quiet " +
24-
"--model=$< --output_dir=$(RULEDIR) --output_name=%s" % name,
27+
cmd = """
28+
# code_generator (partially because it uses Tensorflow) outputs
29+
# much noise to the console. Intead, write output to a logfile to
30+
# prevent noise in the error-free bazel output.
31+
NAME=%s
32+
LOGFILE=$(RULEDIR)/$$NAME.log
33+
$(location //codegen:code_generator) \
34+
--model=$< \
35+
--output_dir=$(RULEDIR) \
36+
--output_name=$$NAME \
37+
>$$LOGFILE 2>&1
38+
""" % name,
2539
visibility = ["//visibility:private"],
2640
)
2741

codegen/code_generator.py

+2-20
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,14 @@
1515
""" Generates C/C++ source code capable of performing inference for a model. """
1616

1717
import os
18-
import pathlib
1918

2019
from absl import app
2120
from absl import flags
2221
from collections.abc import Sequence
2322

2423
from tflite_micro.codegen import inference_generator
2524
from tflite_micro.codegen import graph
25+
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils
2626

2727
# Usage information:
2828
# Default:
@@ -48,33 +48,15 @@
4848
"'model' basename."),
4949
required=False)
5050

51-
_QUIET = flags.DEFINE_bool(
52-
name="quiet",
53-
default=False,
54-
help="Suppress informational output (e.g., for use in for build system)",
55-
required=False)
56-
5751

5852
def main(argv: Sequence[str]) -> None:
59-
if _QUIET.value:
60-
restore = os.environ.get("TF_CPP_MIN_LOG_LEVEL", "0")
61-
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
62-
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils
63-
os.environ["TF_CPP_MIN_LOG_LEVEL"] = restore
64-
else:
65-
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils
66-
6753
output_dir = _OUTPUT_DIR.value or os.path.dirname(_MODEL_PATH.value)
6854
output_name = _OUTPUT_NAME.value or os.path.splitext(
6955
os.path.basename(_MODEL_PATH.value))[0]
7056

7157
model = flatbuffer_utils.read_model(_MODEL_PATH.value)
7258

73-
if not _QUIET.value:
74-
print("Generating inference code for model: {}".format(_MODEL_PATH.value))
75-
output_path = pathlib.Path(output_dir) / output_name
76-
print(f"Generating {output_path}.h")
77-
print(f"Generating {output_path}.cc")
59+
print("Generating inference code for model: {}".format(_MODEL_PATH.value))
7860

7961
inference_generator.generate(output_dir, output_name,
8062
graph.OpCodeTable([model]), graph.Graph(model))

codegen/inference_generator.py

+1
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ class ModelData(TypedDict):
3535

3636
def _render(output_file: pathlib.Path, template_file: pathlib.Path,
3737
model_data: ModelData) -> None:
38+
print("Generating {}".format(output_file))
3839
t = template.Template(filename=str(template_file))
3940
with output_file.open('w+') as file:
4041
file.write(t.render(**model_data))

0 commit comments

Comments
 (0)