Skip to content

Commit

Permalink
build(codegen): suppress noise in console output (tensorflow#2708)
Browse files Browse the repository at this point in the history
Add a --quiet option to the code_generator binary so that when it's used
within the build system, it doesn't print unexpected, distracting noise
to the console. Generally, compiler or generator commands don't print
output unless there's an error.

BUG=description
  • Loading branch information
rkuester authored Oct 1, 2024
1 parent 6411584 commit d249577
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 4 deletions.
2 changes: 1 addition & 1 deletion codegen/build_def.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def tflm_inference_library(
srcs = [tflite_model],
outs = [name + ".h", name + ".cc"],
tools = ["//codegen:code_generator"],
cmd = "$(location //codegen:code_generator) " +
cmd = "$(location //codegen:code_generator) --quiet " +
"--model=$< --output_dir=$(RULEDIR) --output_name=%s" % name,
visibility = ["//visibility:private"],
)
Expand Down
22 changes: 20 additions & 2 deletions codegen/code_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@
""" Generates C/C++ source code capable of performing inference for a model. """

import os
import pathlib

from absl import app
from absl import flags
from collections.abc import Sequence

from tflite_micro.codegen import inference_generator
from tflite_micro.codegen import graph
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils

# Usage information:
# Default:
Expand All @@ -48,15 +48,33 @@
"'model' basename."),
required=False)

_QUIET = flags.DEFINE_bool(
name="quiet",
default=False,
help="Suppress informational output (e.g., for use in for build system)",
required=False)


def main(argv: Sequence[str]) -> None:
if _QUIET.value:
restore = os.environ.get("TF_CPP_MIN_LOG_LEVEL", "0")
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils
os.environ["TF_CPP_MIN_LOG_LEVEL"] = restore
else:
from tflite_micro.tensorflow.lite.tools import flatbuffer_utils

output_dir = _OUTPUT_DIR.value or os.path.dirname(_MODEL_PATH.value)
output_name = _OUTPUT_NAME.value or os.path.splitext(
os.path.basename(_MODEL_PATH.value))[0]

model = flatbuffer_utils.read_model(_MODEL_PATH.value)

print("Generating inference code for model: {}".format(_MODEL_PATH.value))
if not _QUIET.value:
print("Generating inference code for model: {}".format(_MODEL_PATH.value))
output_path = pathlib.Path(output_dir) / output_name
print(f"Generating {output_path}.h")
print(f"Generating {output_path}.cc")

inference_generator.generate(output_dir, output_name,
graph.OpCodeTable([model]), graph.Graph(model))
Expand Down
1 change: 0 additions & 1 deletion codegen/inference_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ class ModelData(TypedDict):

def _render(output_file: pathlib.Path, template_file: pathlib.Path,
model_data: ModelData) -> None:
print("Generating {}".format(output_file))
t = template.Template(filename=str(template_file))
with output_file.open('w+') as file:
file.write(t.render(**model_data))
Expand Down

0 comments on commit d249577

Please sign in to comment.