Enable intended double underscores in `tf_test_utils.py`; small spacing fix.

`tf_test_utils.save_and_compile_tf_module` appears to intend to save its artifacts with double underscores via `"__".join(target_backends)`, (e.g.  as `compiled__iree_vmla__iree_llvmjit.vmfb`). A regex on the same line immediately overwrites the double underscores however.

This change modifies the regex such that the double underscores are preserved. A small spacing fix is also included for two error messages.

Closes https://github.com/google/iree/pull/2289

COPYBARA_INTEGRATE_REVIEW=https://github.com/google/iree/pull/2289 from phoenix-meadowlark:tf-formatting 3c0d5e6ed352843a00996e140a78308df676db93
PiperOrigin-RevId: 317942035
diff --git a/integrations/tensorflow/bindings/python/pyiree/tf/compiler/register_tensorflow.cc b/integrations/tensorflow/bindings/python/pyiree/tf/compiler/register_tensorflow.cc
index c9a4da4..1dcc0c2 100644
--- a/integrations/tensorflow/bindings/python/pyiree/tf/compiler/register_tensorflow.cc
+++ b/integrations/tensorflow/bindings/python/pyiree/tf/compiler/register_tensorflow.cc
@@ -63,7 +63,7 @@
                               absl::MakeSpan(mutable_exported_names));
   if (!module_or.status().ok()) {
     std::stringstream msg;
-    msg << "Failed to convert saved model to MLIR'" << saved_model_dir
+    msg << "Failed to convert saved model to MLIR '" << saved_model_dir
         << "': " << module_or.status();
     throw RaisePyError(PyExc_RuntimeError, msg.str().c_str());
   }
@@ -93,7 +93,7 @@
                                 context_bundle->mlir_context());
   if (!module_or.status().ok()) {
     std::stringstream msg;
-    msg << "Failed to convert saved model to MLIR'" << saved_model_dir
+    msg << "Failed to convert saved model to MLIR '" << saved_model_dir
         << "': " << module_or.status();
     throw RaisePyError(PyExc_RuntimeError, msg.str().c_str());
   }
diff --git a/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py b/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
index 37da253..90673e5 100644
--- a/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
+++ b/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
@@ -83,11 +83,11 @@
         pass_pipeline=())
 
     # Save the input MLIR module.
-    flattened_target_backends = re.sub("[^0-9a-zA-Z]+", "_",
+    flattened_target_backends = re.sub("[^0-9a-zA-Z_]+", "_",
                                        "__".join(target_backends))
     if global_debug_dir:
       mlir_path = os.path.join(global_debug_dir,
-                               "raw_%s.mlir" % flattened_target_backends)
+                               "raw__%s.mlir" % flattened_target_backends)
       logging.info("Saving raw TF input MLIR to: %s", mlir_path)
       with open(mlir_path, "w") as f:
         f.write(compiler_module.to_asm())
@@ -97,7 +97,7 @@
 
     if global_debug_dir:
       mlir_path = os.path.join(global_debug_dir,
-                               "input_%s.mlir" % flattened_target_backends)
+                               "input__%s.mlir" % flattened_target_backends)
       logging.info("Saving IREE input MLIR to: %s", mlir_path)
       with open(mlir_path, "w") as f:
         f.write(compiler_module.to_asm())
@@ -105,7 +105,7 @@
     compiled_module = compiler_module.compile(target_backends=target_backends)
     if global_debug_dir:
       compiled_path = os.path.join(
-          global_debug_dir, "compiled_%s.vmfb" % flattened_target_backends)
+          global_debug_dir, "compiled__%s.vmfb" % flattened_target_backends)
       logging.info("Saving compiled IREE module to: %s", compiled_path)
       with open(compiled_path, "wb") as f:
         f.write(compiled_module)