Use recommended python3.6 APIs and document 3.7 upgrades (#4134)

- Remove remaining %-string substitutions (`absl.logging` excluded).
- Replace all `subprocess.check_*` calls with the recommended `subprocess.run(check=True,...)`.
- Add TODO(#4131) for the following:
  - `python>=3.7: Use postponed type annotations.`
  - `python>=3.7: Consider using a dataclass.`
  - `python>=3.7: Use capture_output=True.`
  - `python>=3.7: Replace 'universal_newlines' with 'text'.`
  - `python>=3.7: Remove redundant OrderedDict.`
diff --git a/bindings/python/build_tools/python/generate_build.py b/bindings/python/build_tools/python/generate_build.py
index 5d3a5d5..71d3491 100644
--- a/bindings/python/build_tools/python/generate_build.py
+++ b/bindings/python/build_tools/python/generate_build.py
@@ -42,8 +42,8 @@
                                                       minor=sys.version_info[1])
   implib_abs_path = os.path.join(exec_prefix, "libs", implib_basename)
   if not os.path.exists(implib_abs_path):
-    raise RuntimeError("Could not find Windows python import library: %s" %
-                       (implib_abs_path,))
+    raise RuntimeError(
+        f"Could not find Windows python import library: {implib_abs_path}")
   implib_ws_path = "libs/" + implib_basename
   print("# SYMLINK: {abs}\t{ws}".format(abs=implib_abs_path, ws=implib_ws_path))
   extra_srcs.append(implib_ws_path)
diff --git a/bindings/python/pyiree/compiler2/core.py b/bindings/python/pyiree/compiler2/core.py
index cdc7900..2f13f18 100644
--- a/bindings/python/pyiree/compiler2/core.py
+++ b/bindings/python/pyiree/compiler2/core.py
@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 from enum import Enum
 import subprocess
 from typing import Any, Dict, List, Optional, Sequence, Union
@@ -58,6 +60,7 @@
     return OutputFormat[spec]
 
 
+# TODO(#4131) python>=3.7: Consider using a dataclass.
 class CompilerOptions:
   """Options to the compiler backend.
 
diff --git a/bindings/python/pyiree/compiler2/tf.py b/bindings/python/pyiree/compiler2/tf.py
index c9608af..ab95890 100644
--- a/bindings/python/pyiree/compiler2/tf.py
+++ b/bindings/python/pyiree/compiler2/tf.py
@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 from enum import Enum
 import logging
 import tempfile
@@ -76,6 +78,7 @@
     return ImportType[spec]
 
 
+# TODO(#4131) python>=3.7: Consider using a dataclass.
 class ImportOptions(CompilerOptions):
   """Import options layer on top of the backend compiler options."""
 
diff --git a/bindings/python/pyiree/compiler2/tools.py b/bindings/python/pyiree/compiler2/tools.py
index 480600b..18260c2 100644
--- a/bindings/python/pyiree/compiler2/tools.py
+++ b/bindings/python/pyiree/compiler2/tools.py
@@ -149,7 +149,7 @@
       run_args["input"] = immediate_input
 
     # Capture output.
-    # Upgrade note: Python >= 3.7 can just use capture_output=True
+    # TODO(#4131) python>=3.7: Use capture_output=True.
     run_args["stdout"] = subprocess.PIPE
     run_args["stderr"] = subprocess.PIPE
     process = subprocess.run(command_line, **run_args)
diff --git a/bindings/python/pyiree/rt/system_api.py b/bindings/python/pyiree/rt/system_api.py
index c75fbad..427e318 100644
--- a/bindings/python/pyiree/rt/system_api.py
+++ b/bindings/python/pyiree/rt/system_api.py
@@ -23,6 +23,8 @@
 # pylint: disable=unused-argument
 # pylint: disable=g-explicit-length-test
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 __all__ = ["load_module", "load_modules", "Config", "SystemContext"]
 
 import os
@@ -58,7 +60,7 @@
   driver_exceptions = {}
   for driver_name in driver_names:
     if driver_name not in available_driver_names:
-      print("Could not create driver %s (not registered)" % driver_name,
+      print(f"Could not create driver {driver_name} (not registered)",
             file=sys.stderr)
       continue
     try:
@@ -66,7 +68,7 @@
       # TODO(laurenzo): Remove these prints to stderr (for now, more information
       # is better and there is no better way to report it yet).
     except Exception as ex:  # pylint: disable=broad-except
-      print("Could not create default driver %s: %r" % (driver_name, ex),
+      print(f"Could not create default driver {driver_name}: {ex:!r}",
             file=sys.stderr)
       driver_exceptions[driver_name] = ex
       continue
@@ -78,18 +80,18 @@
     try:
       device = driver.create_default_device()
     except Exception as ex:
-      print("Could not create default driver device %s: %r" % (driver_name, ex),
+      print(f"Could not create default driver device {driver_name}: {ex:!r}",
             file=sys.stderr)
       driver_exceptions[driver_name] = ex
       continue
 
-    print("Created IREE driver %s: %r" % (driver_name, driver), file=sys.stderr)
+    print("Created IREE driver {driver_name}: {driver:!r}", file=sys.stderr)
     return driver
 
   # All failed.
-  raise RuntimeError("Could not create any requested driver "
-                     "%r (available=%r) : %r" %
-                     (driver_names, available_driver_names, driver_exceptions))
+  raise RuntimeError(
+      f"Could not create any requested driver {repr(driver_names)} (available="
+      f"{repr(available_driver_names)}) : {repr(driver_exceptions)}")
 
 
 class Config:
@@ -147,10 +149,7 @@
     return unpacked_results
 
   def __repr__(self):
-    return "<BoundFunction %r (%r)>" % (
-        self._abi,
-        self._vm_function,
-    )
+    return f"<BoundFunction {repr(self._abi)} ({repr(self._vm_function)})>"
 
   def get_serialized_values(self):
     if self._serialized_inputs is None:
@@ -187,14 +186,13 @@
 
     vm_function = self._vm_module.lookup_function(name)
     if vm_function is None:
-      raise KeyError("Function '%s' not found in module '%s'" %
-                     (name, self.name))
+      raise KeyError(f"Function '{name}' not found in module '{self.name}'")
     bound_function = BoundFunction(self._context, vm_function)
     self._lazy_functions[name] = bound_function
     return bound_function
 
   def __repr__(self):
-    return "<BoundModule %r>" % (self._vm_module,)
+    return f"<BoundModule {self._vm_module:!r}>"
 
 
 class Modules(dict):
@@ -212,7 +210,8 @@
 
   def __init__(self, modules=None, config: Optional[Config] = None):
     self._config = config if config is not None else _get_global_config()
-    print("SystemContext driver=%r" % self._config.driver, file=sys.stderr)
+    # :!r does not work with the _binding.HalDriver class.
+    print(f"SystemContext driver={repr(self._config.driver)}", file=sys.stderr)
     self._is_dynamic = modules is None
     if not self._is_dynamic:
       init_modules = self._config.default_modules + tuple(modules)
@@ -258,7 +257,7 @@
     for m in modules:
       name = m.name
       if name in self._modules:
-        raise ValueError("Attempt to register duplicate module: '%s'" % (name,))
+        raise ValueError(f"Attempt to register duplicate module: '{name}'")
       self._modules[m.name] = BoundModule(self, m)
     self._vm_context.register_modules(modules)
 
diff --git a/bindings/python/setup.py b/bindings/python/setup.py
index 3a669cc..f0f7cec 100644
--- a/bindings/python/setup.py
+++ b/bindings/python/setup.py
@@ -30,7 +30,7 @@
   sub_path = os.path.join(this_dir, f"{name}.py")
   args = [sys.executable, sub_path] + sys.argv[1:]
   print(f"##### Running sub setup: {' '.join(args)}")
-  subprocess.check_call(args)
+  subprocess.run(args, check=True)
   print("")
 
 
diff --git a/build_tools/bazel_to_cmake/bazel_to_cmake_targets.py b/build_tools/bazel_to_cmake/bazel_to_cmake_targets.py
index b3375ff..07a508b 100644
--- a/build_tools/bazel_to_cmake/bazel_to_cmake_targets.py
+++ b/build_tools/bazel_to_cmake/bazel_to_cmake_targets.py
@@ -131,4 +131,4 @@
     # All Bazel targets map to a single CMake target.
     return ["ruy"]
 
-  raise KeyError("No conversion found for target '%s'" % target)
+  raise KeyError(f"No conversion found for target '{target}'")
diff --git a/build_tools/docker/utils.py b/build_tools/docker/utils.py
index f9a041b..5fc68c9 100644
--- a/build_tools/docker/utils.py
+++ b/build_tools/docker/utils.py
@@ -25,23 +25,25 @@
                 dry_run: bool = False,
                 check: bool = True,
                 capture_output: bool = False,
-                universal_newlines: bool = True,
+                text: bool = True,
                 **run_kwargs) -> subprocess.CompletedProcess:
   """Thin wrapper around subprocess.run"""
   print(f"Running: `{' '.join(command)}`")
-  if not dry_run:
-    if capture_output:
-      # Hardcode support for python <= 3.6.
-      run_kwargs["stdout"] = subprocess.PIPE
-      run_kwargs["stderr"] = subprocess.PIPE
+  if dry_run:
+    # Dummy CompletedProess with successful returncode.
+    return subprocess.CompletedProcess(command, returncode=0)
 
-    completed_process = subprocess.run(command,
-                                       universal_newlines=universal_newlines,
-                                       check=check,
-                                       **run_kwargs)
-    return completed_process
-  # Dummy CompletedProess with successful returncode.
-  return subprocess.CompletedProcess(command, returncode=0)
+  if capture_output:
+    # TODO(#4131) python>=3.7: Use capture_output=True.
+    run_kwargs["stdout"] = subprocess.PIPE
+    run_kwargs["stderr"] = subprocess.PIPE
+
+  # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+  completed_process = subprocess.run(command,
+                                     universal_newlines=text,
+                                     check=check,
+                                     **run_kwargs)
+  return completed_process
 
 
 def check_gcloud_auth(dry_run: bool = False):
diff --git a/build_tools/manylinux_py_setup.py b/build_tools/manylinux_py_setup.py
index abdd11d..5a049b9 100755
--- a/build_tools/manylinux_py_setup.py
+++ b/build_tools/manylinux_py_setup.py
@@ -47,7 +47,7 @@
         "numpy",
     ]
     print("EXEC:", " ".join(args))
-    subprocess.check_call(args)
+    subprocess.run(args, check=True)
 
 
 def dump_current(identifier):
@@ -65,8 +65,8 @@
     identifier = python_exe.parent.parent.name
     versions_ids.append(identifier)
     # Invoke ourselves with a different interpreter/args to dump config.
-    subprocess.check_call(
-        [str(python_exe), __file__, "_current_args", identifier])
+    subprocess.run([str(python_exe), __file__, "_current_args", identifier],
+                   check=True)
   print("-DIREE_MULTIPY_VERSIONS='{}'".format(";".join(versions_ids)))
 
 
diff --git a/colab/start_colab_kernel.py b/colab/start_colab_kernel.py
index 3a9b3a1..77bfab9 100755
--- a/colab/start_colab_kernel.py
+++ b/colab/start_colab_kernel.py
@@ -53,36 +53,43 @@
 
   # Determine the repository root (one dir-level up).
   repo_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-  print("Repository root: %s" % (repo_root,))
+  print(f"Repository root: {repo_root}")
 
   # Use 'bazelisk' instead of 'bazel' if it exists on the path.
   # Bazelisk is an optional utility that pick versions of Bazel to use and
   # passes through all command-line arguments to the real Bazel binary:
   # https://github.com/bazelbuild/bazelisk
   bazel_exe = "bazelisk" if shutil.which("bazelisk") else "bazel"
-  print("Using bazel executable: %s" % (bazel_exe))
+  print(f"Using bazel executable: {bazel_exe}")
 
   # Detect python and query bazel for its output.
-  print("Setting Bazel PYTHON_BIN=%s" % (sys.executable,))
+  print(f"Setting Bazel PYTHON_BIN={sys.executable}")
   bazel_env["PYTHON_BIN"] = sys.executable
-  bazel_bin = subprocess.check_output([bazel_exe, "info", "bazel-bin"],
-                                      cwd=repo_root,
-                                      env=bazel_env).decode("utf-8")
-  bazel_bin = bazel_bin.splitlines()[0]
+  completed_process = subprocess.run(
+      [bazel_exe, "info", "bazel-bin"],
+      cwd=repo_root,
+      env=bazel_env,
+      check=True,
+      # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+      universal_newlines=True,
+      # TODO(#4131) python>=3.7: Use capture_output=True.
+      stdout=subprocess.PIPE)
+  bazel_bin = completed_process.stdout.splitlines()[0]
   # Bazel always reports the path with '/'. On windows, switch it
   # since we need native path manipulation code below to have it the
   # right way.
   if os.path.sep == "\\":
     bazel_bin = bazel_bin.replace("/", "\\")
-  print("Found Bazel bin: %s" % (bazel_bin))
+  print(f"Found Bazel bin: {bazel_bin}")
 
 
 def build():
   """Builds the python bundle."""
   print("Building python bindings...")
-  subprocess.check_call([bazel_exe, "build", "//colab:everything_for_colab"],
-                        cwd=repo_root,
-                        env=bazel_env)
+  subprocess.run([bazel_exe, "build", "//colab:everything_for_colab"],
+                 cwd=repo_root,
+                 env=bazel_env,
+                 check=True)
 
 
 def run():
@@ -139,9 +146,9 @@
 def show_install_instructions():
   """Prints some install instructions."""
   print("ERROR: Unable to load Jupyter. Ensure that it is installed:")
-  print("  %s -m pip install --upgrade pip" % (sys.executable,))
-  print("  %s -m pip install jupyter" % (sys.executable,))
-  print("  %s -m pip install jupyter_http_over_ws" % (sys.executable,))
+  print(f"  {sys.executable} -m pip install --upgrade pip")
+  print(f"  {sys.executable} -m pip install jupyter")
+  print(f"  {sys.executable} -m pip install jupyter_http_over_ws")
   print("  jupyter serverextension enable --py jupyter_http_over_ws")
 
 
diff --git a/configure_bazel.py b/configure_bazel.py
index 18be4a4..b86b34f 100644
--- a/configure_bazel.py
+++ b/configure_bazel.py
@@ -46,8 +46,15 @@
   # For some reason, bazel doesn't always find the user site path, which
   # is typically where "pip install --user" libraries end up. Inject it.
   try:
-    user_site = subprocess.check_output(
-        [sys.executable, "-m", "site", "--user-site"]).decode("utf-8").strip()
+    user_site = subprocess.run(
+        [sys.executable, "-m", "site", "--user-site"],
+        check=True,
+        # TODO(#4131) python>=3.7: Use capture_output=True.
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+        universal_newlines=True,
+    ).stdout.strip()
     print("Found user site directory:", user_site)
   except subprocess.CalledProcessError:
     print("Could not resolve user site directory")
diff --git a/integrations/tensorflow/bindings/python/pyiree/tf/support/module_utils.py b/integrations/tensorflow/bindings/python/pyiree/tf/support/module_utils.py
index fb55501..d5a57f1 100644
--- a/integrations/tensorflow/bindings/python/pyiree/tf/support/module_utils.py
+++ b/integrations/tensorflow/bindings/python/pyiree/tf/support/module_utils.py
@@ -14,6 +14,8 @@
 # limitations under the License.
 """Utilities for compiling 'tf.Module's"""
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 import collections
 import os
 import tempfile
diff --git a/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py b/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
index 1961a9a..24e83aa 100644
--- a/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
+++ b/integrations/tensorflow/bindings/python/pyiree/tf/support/tf_test_utils.py
@@ -22,6 +22,8 @@
 #   ref: reference – for the reference CompiledModule
 #   tar: target - for one of the target CompiledModules
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 import collections
 import copy
 import itertools
@@ -111,7 +113,7 @@
     backends = module_utils.BackendInfo.get_all_backends()
   return backends
 
-
+# TODO(#4131) python>=3.7: Consider using a (frozen) dataclass.
 Modules = collections.namedtuple("Modules",
                                  ["ref_module", "tar_modules", "artifacts_dir"])
 
diff --git a/integrations/tensorflow/bindings/python/pyiree/tf/support/trace_utils.py b/integrations/tensorflow/bindings/python/pyiree/tf/support/trace_utils.py
index 1a0c789..e6c4265 100644
--- a/integrations/tensorflow/bindings/python/pyiree/tf/support/trace_utils.py
+++ b/integrations/tensorflow/bindings/python/pyiree/tf/support/trace_utils.py
@@ -18,6 +18,8 @@
 #   ref: reference – for the reference CompiledModule
 #   tar: target - for one of the target CompiledModules
 
+# TODO(#4131) python>=3.7: Use postponed type annotations.
+
 import copy
 import glob
 import inspect
diff --git a/scripts/get_e2e_artifacts.py b/scripts/get_e2e_artifacts.py
index 21d9259..3b5fd62 100755
--- a/scripts/get_e2e_artifacts.py
+++ b/scripts/get_e2e_artifacts.py
@@ -163,7 +163,7 @@
     command = ['bazel', 'test', *test_suites, '--color=yes']
     print(f'Running: `{" ".join(command)}`')
     if not FLAGS.dry_run:
-      subprocess.check_call(command)
+      subprocess.run(command, check=True)
     print()
 
   written_paths = set()
diff --git a/scripts/git/submodule_versions.py b/scripts/git/submodule_versions.py
index d3e2c61..d578286 100755
--- a/scripts/git/submodule_versions.py
+++ b/scripts/git/submodule_versions.py
@@ -46,7 +46,7 @@
   raw_status = utils.execute(["git", "submodule", "status"],
                              cwd=repo_dir,
                              silent=True,
-                             capture_output=True).decode("UTF-8")
+                             capture_output=True).stdout
   status_lines = []
   for line in raw_status.splitlines():
     # Format is a status char followed by revision, space and path.
@@ -105,7 +105,7 @@
              "still in the version file") % (path,))
       continue
     if written is None:
-      print("Warning: Submodule %s is not in the version file" % (current,))
+      print(f"Warning: Submodule '{current}' is not in the version file")
       continue
     # Directly update the submodule commit hash in the index.
     # See: https://stackoverflow.com/questions/33514642
@@ -138,7 +138,7 @@
         "  ./scripts/git/submodule_versions.py export # Use version in git state ('actual')"
     )
     for k, (current, written) in diff_versions.items():
-      print("%s : actual=%s written=%s" % (k, current, written))
+      print(f"{k} : actual={current} written={written}")
     return False
   return True
 
diff --git a/scripts/git/update_to_llvm_syncpoint.py b/scripts/git/update_to_llvm_syncpoint.py
index d543b48..0775bb1 100755
--- a/scripts/git/update_to_llvm_syncpoint.py
+++ b/scripts/git/update_to_llvm_syncpoint.py
@@ -152,8 +152,7 @@
   return utils.execute(["git", "rev-parse", rev],
                        cwd=path,
                        silent=True,
-                       capture_output=True,
-                       universal_newlines=True).strip()
+                       capture_output=True).stdout.strip()
 
 
 def find_new_llvm_bazel_commit(llvm_bazel_path, llvm_commit, llvm_bazel_commit):
@@ -192,8 +191,7 @@
   return utils.execute(
       ["git", "submodule", "status", "third_party/llvm-project"],
       capture_output=True,
-      universal_newlines=True,
-      cwd=llvm_bazel_path).split()[0].lstrip("+-")
+      cwd=llvm_bazel_path).stdout.split()[0].lstrip("+-")
 
 
 def find_new_tf_commit(tensorflow_path, llvm_commit, tf_commit):
@@ -233,8 +231,7 @@
           "tensorflow/workspace.bzl"
       ],
       capture_output=True,
-      universal_newlines=True,
-      cwd=tensorflow_path).split()
+      cwd=tensorflow_path).stdout.split()
   if len(tf_integrate_commits) > 2:
     raise RuntimeError(
         f"Expected one or two TF commits to involve LLVM commit {llvm_commit},"
diff --git a/scripts/git/utils.py b/scripts/git/utils.py
index b4328e7..cbeb29e 100644
--- a/scripts/git/utils.py
+++ b/scripts/git/utils.py
@@ -25,7 +25,7 @@
   return execute(["git", "rev-parse", "--show-toplevel"],
                  cwd=os.path.dirname(__file__),
                  capture_output=True,
-                 silent=True).strip().decode("UTF-8")
+                 silent=True).stdout.strip()
 
 
 def str2bool(v):
@@ -42,22 +42,30 @@
     raise argparse.ArgumentTypeError("Boolean value expected.")
 
 
-def execute(args, cwd, capture_output=False, silent=False, **kwargs):
+def execute(args, cwd, capture_output=False, text=True, silent=False, **kwargs):
   """Executes a command.
 
   Args:
     args: List of command line arguments.
     cwd: Directory to execute in.
     capture_output: Whether to capture the output.
+    text: Whether or not to treat std* as text (as opposed to binary streams).
     silent: Whether to skip logging the invocation.
     **kwargs: Extra arguments to pass to subprocess.exec
 
   Returns:
-    The output if capture_output, otherwise None.
+    A subprocess.CompletedProcess
   """
   if not silent:
-    print("+", " ".join(args), "  [from %s]" % cwd)
+    print(f"+{' '.join(args)}  [from {cwd}]")
   if capture_output:
-    return subprocess.check_output(args, cwd=cwd, **kwargs)
-  else:
-    return subprocess.check_call(args, cwd=cwd, **kwargs)
+    # TODO(#4131) python>=3.7: Use capture_output=True.
+    kwargs["stdout"] = subprocess.PIPE
+    kwargs["stderr"] = subprocess.PIPE
+  return subprocess.run(
+      args,
+      cwd=cwd,
+      check=True,
+      # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+      universal_newlines=text,
+      **kwargs)
diff --git a/scripts/update_e2e_coverage.py b/scripts/update_e2e_coverage.py
index fba79b2..3de3738 100755
--- a/scripts/update_e2e_coverage.py
+++ b/scripts/update_e2e_coverage.py
@@ -30,6 +30,7 @@
 REFERENCE_BACKEND = 'tf'
 # Assumes that tests are expanded for the tf, iree_vmla, and
 # iree_vulkan backends.
+# TODO(#4131) python>=3.7: Remove redundant OrderedDict
 BACKENDS_TO_TITLES = collections.OrderedDict([
     ('tf', 'tensorflow'),
     ('tflite', 'tflite'),
diff --git a/scripts/update_op_coverage.py b/scripts/update_op_coverage.py
index 575a390..77ce782 100755
--- a/scripts/update_op_coverage.py
+++ b/scripts/update_op_coverage.py
@@ -79,9 +79,15 @@
 def get_tested_ops_for_backends(build_dir):
   """Parses current op tests for each backend."""
 
-  ctest_output = subprocess.check_output(
-      ['ctest', '-N', '-L', E2E_XLA_OPS_PATH], cwd=build_dir)
-  tests = ctest_output.decode('ascii').strip().split('\n')
+  completed_process = subprocess.run(
+      ['ctest', '-N', '-L', E2E_XLA_OPS_PATH],
+      cwd=build_dir,
+      # TODO(#4131) python>=3.7: Use capture_output=True.
+      stderr=subprocess.PIPE,
+      stdout=subprocess.PIPE,
+      # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+      universal_newlines=True)
+  tests = completed_process.stdout.strip().split('\n')
   res = collections.defaultdict(list)
   for t in tests:
     if not t.endswith('.mlir'):
diff --git a/scripts/utils.py b/scripts/utils.py
index 870044b..313dd65 100644
--- a/scripts/utils.py
+++ b/scripts/utils.py
@@ -33,10 +33,13 @@
   print(f'Running: `{" ".join(command)}`')
   if dry_run:
     return None, None
-  process = subprocess.run(command,
-                           stderr=subprocess.PIPE,
-                           stdout=subprocess.PIPE,
-                           universal_newlines=True)
+  process = subprocess.run(
+      command,
+      # TODO(#4131) python>=3.7: Use capture_output=True.
+      stderr=subprocess.PIPE,
+      stdout=subprocess.PIPE,
+      # TODO(#4131) python>=3.7: Replace 'universal_newlines' with 'text'.
+      universal_newlines=True)
 
   if log_stderr:
     for line in process.stderr.splitlines():