Patch over some TensorFlow Python API changes. (#14709)
Working towards getting our sample Colab notebooks working again
(https://github.com/openxla/iree/actions/workflows/samples.yml). I could
try to update the Colab notebooks in-place, but it might just be easier
to take an incremental approach - land these fixes that help with local
builds, wait for a nightly release, debug the notebooks with that
release build.
We're also short on presubmit / unit test coverage for the Python tools.
Resolves this discussion:
https://github.com/openxla/iree/pull/14303#discussion_r1260364221
Fixes https://github.com/openxla/iree/issues/14695
diff --git a/compiler/bindings/python/iree/compiler/tools/tf.py b/compiler/bindings/python/iree/compiler/tools/tf.py
index 729cb15..9eb28a2 100644
--- a/compiler/bindings/python/iree/compiler/tools/tf.py
+++ b/compiler/bindings/python/iree/compiler/tools/tf.py
@@ -129,11 +129,11 @@
elif options.save_temp_iree_input:
# Saving the file, use tfs.
tf_iree_input = tfs.alloc_optional(
- "tf-iree-input.mlir", export_as=options.save_temp_iree_input
+ "tf-iree-input.mlirbc", export_as=options.save_temp_iree_input
)
else:
# Not saving the file, so generate a loose temp file without tfs.
- tf_iree_input = os.path.join(tmpdir, "tf-iree-input.mlir")
+ tf_iree_input = os.path.join(tmpdir, "tf-iree-input.mlirbc")
__main__.import_saved_model(
output_path=tf_iree_input,
@@ -146,7 +146,7 @@
if options.import_only:
if options.output_file:
return None
- with open(tf_iree_input, "r") as f:
+ with open(tf_iree_input, "rb") as f:
return f.read()
# Run IREE compilation pipeline
diff --git a/integrations/tensorflow/python_projects/iree_tf/iree/tools/tf/scripts/iree_import_tf/__main__.py b/integrations/tensorflow/python_projects/iree_tf/iree/tools/tf/scripts/iree_import_tf/__main__.py
index 692c7de..5229f99 100644
--- a/integrations/tensorflow/python_projects/iree_tf/iree/tools/tf/scripts/iree_import_tf/__main__.py
+++ b/integrations/tensorflow/python_projects/iree_tf/iree/tools/tf/scripts/iree_import_tf/__main__.py
@@ -64,12 +64,24 @@
*, output_path, saved_model_dir, exported_names, import_type, tags
):
# From here there be dragons.
- from tensorflow.mlir.experimental import (
- convert_saved_model,
- convert_saved_model_v1,
- run_pass_pipeline,
- write_bytecode,
- )
+ try:
+ # Available from TF 2.14.
+ from tensorflow.mlir.experimental import (
+ convert_saved_model,
+ convert_saved_model_v1,
+ run_pass_pipeline,
+ write_bytecode,
+ )
+ except ImportError as e:
+ # Try the old names for the same API instead, e.g. with TF 2.12.
+ # Yes, this is brittle. Yes, we may need to completely change this
+ # if/when these APIs change again. Such is working with TensorFlow.
+ from tensorflow.python import pywrap_mlir
+
+ convert_saved_model = pywrap_mlir.experimental_convert_saved_model_to_mlir
+ convert_saved_model_v1 = pywrap_mlir.experimental_convert_saved_model_v1_to_mlir
+ run_pass_pipeline = pywrap_mlir.experimental_run_pass_pipeline
+ write_bytecode = pywrap_mlir.experimental_write_bytecode
if import_type == "savedmodel_v2":
result = convert_saved_model(