codegen: Add Bazel macro for inference library (#2366)
The TFLM code_generator takes a TFLite model and generates C/C++ source code that can then be compiled into a binary. This change adds a Bazel macro for invoking the code generator and creating a cc_library with the resulting sources. It also updates the checked-in hello_world example with an appropriate BUILD file and updates the script to use Bazel instead of make.
BUG=b/294230402
diff --git a/codegen/build_def.bzl b/codegen/build_def.bzl
new file mode 100644
index 0000000..28b6232
--- /dev/null
+++ b/codegen/build_def.bzl
@@ -0,0 +1,44 @@
+""" Build rule for generating ML inference code from TFLite model. """
+
+load("//tensorflow/lite/micro:build_def.bzl", "micro_copts")
+
+def tflm_inference_library(
+ name,
+ tflite_model,
+ visibility = None):
+ """Creates a C++ library capable of performing ML inference of the provided
+ model.
+
+ Args:
+ name: Target name.
+ tflite_model: TFLite Model to generate inference from.
+ visibility: Visibility for the C++ library.
+ """
+ generated_target = name + "_gen"
+ native.genrule(
+ name = generated_target,
+ srcs = [tflite_model],
+ outs = [name + ".h", name + ".cc"],
+ tools = ["//codegen:code_generator"],
+ cmd = "$(location //codegen:code_generator) " +
+ "--model=$< --output_dir=$(RULEDIR) --output_name=%s" % name,
+ visibility = ["//visibility:private"],
+ )
+
+ native.cc_library(
+ name = name,
+ hdrs = [name + ".h"],
+ srcs = [name + ".cc"],
+ deps = [
+ generated_target,
+ "//codegen/runtime:micro_codegen_context",
+ "//tensorflow/lite/c:common",
+ "//tensorflow/lite/c:c_api_types",
+ "//tensorflow/lite/kernels/internal:compatibility",
+ "//tensorflow/lite/micro/kernels:micro_ops",
+ "//tensorflow/lite/micro:micro_common",
+ "//tensorflow/lite/micro:micro_context",
+ ],
+ copts = micro_copts(),
+ visibility = visibility,
+ )
diff --git a/codegen/examples/hello_world/BUILD b/codegen/examples/hello_world/BUILD
new file mode 100644
index 0000000..88d33cf
--- /dev/null
+++ b/codegen/examples/hello_world/BUILD
@@ -0,0 +1,14 @@
+load("//codegen:build_def.bzl", "tflm_inference_library")
+
+package(default_visibility = ["//visibility:public"])
+
+tflm_inference_library(
+ name = "hello_world_model",
+ tflite_model = "//tensorflow/lite/micro/examples/hello_world/models:hello_world_int8.tflite",
+)
+
+cc_binary(
+ name = "hello_world",
+ srcs = ["hello_world.cc"],
+ deps = [":hello_world_model"],
+)
diff --git a/codegen/examples/hello_world/README.md b/codegen/examples/hello_world/README.md
index dc64db9..eb68b1b 100644
--- a/codegen/examples/hello_world/README.md
+++ b/codegen/examples/hello_world/README.md
@@ -7,7 +7,7 @@
Please note that this will execute Bazel from make as part of the process.
```
-make -f tensorflow/lite/micro/tools/make/Makefile codegen_hello_world
+bazel build //codegen/examples/hello_world:hello_world
```
## Running the example
@@ -16,7 +16,7 @@
the data structures fully populated yet.
```
-make -f tensorflow/lite/micro/tools/make/Makefile run_codegen_hello_world
+bazel run //codegen/examples/hello_world:hello_world
```
## Updating the generated sources
diff --git a/codegen/examples/hello_world/hello_world.cc b/codegen/examples/hello_world/hello_world.cc
index 9e8caea..70d665b 100644
--- a/codegen/examples/hello_world/hello_world.cc
+++ b/codegen/examples/hello_world/hello_world.cc
@@ -13,7 +13,7 @@
limitations under the License.
==============================================================================*/
-#include "hello_world_model.h"
+#include "codegen/examples/hello_world/hello_world_model.h"
#include "tensorflow/lite/c/c_api_types.h"
int main(int argc, char** argv) {
diff --git a/codegen/examples/hello_world/update_example_source.sh b/codegen/examples/hello_world/update_example_source.sh
index df5e2ac..a381fed 100755
--- a/codegen/examples/hello_world/update_example_source.sh
+++ b/codegen/examples/hello_world/update_example_source.sh
@@ -24,6 +24,9 @@
ROOT_DIR=${SCRIPT_DIR}/../../..
cd "${ROOT_DIR}"
-make -j8 -f tensorflow/lite/micro/tools/make/Makefile codegen_hello_world
-cp ./gen/linux_x86_64_default/genfiles/hello_world_model.h ${SCRIPT_DIR}
-cp ./gen/linux_x86_64_default/genfiles/hello_world_model.cc ${SCRIPT_DIR}
+bazel build //codegen/examples/hello_world:hello_world_model
+cp ./bazel-bin/codegen/examples/hello_world/hello_world_model.h ${SCRIPT_DIR}
+cp ./bazel-bin/codegen/examples/hello_world/hello_world_model.cc ${SCRIPT_DIR}
+clang-format --style=google -i \
+ ${SCRIPT_DIR}/hello_world_model.h \
+ ${SCRIPT_DIR}/hello_world_model.cc
diff --git a/codegen/runtime/BUILD b/codegen/runtime/BUILD
new file mode 100644
index 0000000..a1cb6c1
--- /dev/null
+++ b/codegen/runtime/BUILD
@@ -0,0 +1,18 @@
+load("//tensorflow/lite/micro:build_def.bzl", "micro_copts")
+
+package(default_visibility = ["//visibility:public"])
+
+cc_library(
+ name = "micro_codegen_context",
+ srcs = ["micro_codegen_context.cc"],
+ hdrs = ["micro_codegen_context.h"],
+ copts = micro_copts(),
+ deps = [
+ "//tensorflow/lite/c:common",
+ "//tensorflow/lite/kernels:op_macros",
+ "//tensorflow/lite/kernels/internal:compatibility",
+ "//tensorflow/lite/micro:micro_context",
+ "//tensorflow/lite/micro:micro_graph",
+ "//tensorflow/lite/micro:micro_log",
+ ],
+)
diff --git a/tensorflow/lite/micro/examples/hello_world/models/BUILD b/tensorflow/lite/micro/examples/hello_world/models/BUILD
index 4f025b0..4c9441b 100644
--- a/tensorflow/lite/micro/examples/hello_world/models/BUILD
+++ b/tensorflow/lite/micro/examples/hello_world/models/BUILD
@@ -9,7 +9,10 @@
"hello_world_float.tflite",
"hello_world_int8.tflite",
],
- visibility = ["//tensorflow/lite/micro/examples/hello_world:__subpackages__"],
+ visibility = [
+ "//codegen/examples/hello_world:__subpackages__",
+ "//tensorflow/lite/micro/examples/hello_world:__subpackages__",
+ ],
)
generate_cc_arrays(