Allow updating docker digests without rebuilding (#9984)

This is useful when the digests somehow get out of sync, as happened in
https://github.com/iree-org/iree/pull/9983. I can turn it into a lint
check too.

Also updates quote style to be consistent with all our other Python
stuff.
diff --git a/build_tools/docker/manage_images.py b/build_tools/docker/manage_images.py
index c362d74..bf55cf1 100755
--- a/build_tools/docker/manage_images.py
+++ b/build_tools/docker/manage_images.py
@@ -18,7 +18,7 @@
   python3 build_tools/docker/manage_images.py --image cmake
 
 Print out output for rebuilding the cmake image and all images that
-transitively on depend on it, but don't take side-effecting actions:
+transitively depend on it, but don't take side-effecting actions:
   python3 build_tools/docker/manage_images.py --image cmake --dry-run
 
 Rebuild and push all images and update references to them in the repository:
@@ -36,24 +36,24 @@
 
 import utils
 
-IREE_GCR_URL = 'gcr.io/iree-oss/'
-DIGEST_REGEX = r'sha256:[a-zA-Z0-9]+'
-DOCKER_DIR = 'build_tools/docker/'.replace('/', os.sep)
+IREE_GCR_URL = "gcr.io/iree-oss/"
+DIGEST_REGEX = r"sha256:[a-zA-Z0-9]+"
+DOCKER_DIR = "build_tools/docker/".replace("/", os.sep)
 
 # Map from image names to images that they depend on.
 IMAGES_TO_DEPENDENCIES = {
-    'base': [],
-    'manylinux2014_x86_64-release': [],
-    'android': ['base'],
-    'emscripten': ['base'],
-    'nvidia': ['base'],
-    'riscv': ['base'],
-    'gradle-android': ['base'],
-    'frontends': ['android'],
-    'swiftshader': ['base'],
-    'samples': ['swiftshader'],
-    'frontends-swiftshader': ['frontends', 'swiftshader'],
-    'frontends-nvidia': ['frontends'],
+    "base": [],
+    "manylinux2014_x86_64-release": [],
+    "android": ["base"],
+    "emscripten": ["base"],
+    "nvidia": ["base"],
+    "riscv": ["base"],
+    "gradle-android": ["base"],
+    "frontends": ["android"],
+    "swiftshader": ["base"],
+    "samples": ["swiftshader"],
+    "frontends-swiftshader": ["frontends", "swiftshader"],
+    "frontends-nvidia": ["frontends"],
 }
 
 IMAGES_TO_DEPENDENT_IMAGES = {k: [] for k in IMAGES_TO_DEPENDENCIES}
@@ -61,36 +61,42 @@
   for dependency in dependencies:
     IMAGES_TO_DEPENDENT_IMAGES[dependency].append(image)
 
-IMAGES_HELP = [f'`{name}`' for name in IMAGES_TO_DEPENDENCIES]
-IMAGES_HELP = f'{", ".join(IMAGES_HELP)} or `all`'
+IMAGES_HELP = [f"`{name}`" for name in IMAGES_TO_DEPENDENCIES]
+IMAGES_HELP = f"{', '.join(IMAGES_HELP)} or `all`"
 
 
 def parse_arguments():
   """Parses command-line options."""
   parser = argparse.ArgumentParser(
       description="Build IREE's Docker images and optionally push them to GCR.")
-  parser.add_argument('--images',
-                      '--image',
+  parser.add_argument("--images",
+                      "--image",
                       type=str,
                       required=True,
-                      action='append',
-                      help=f'Name of the image to build: {IMAGES_HELP}.')
+                      action="append",
+                      help=f"Name of the image to build: {IMAGES_HELP}.")
   parser.add_argument(
-      '--dry_run',
-      '--dry-run',
-      '-n',
-      action='store_true',
-      help='Print output without building or pushing any images.')
+      "--dry_run",
+      "--dry-run",
+      "-n",
+      action="store_true",
+      help="Print output without building or pushing any images.")
+  parser.add_argument(
+      "--only_references",
+      "--only-references",
+      action="store_true",
+      help=
+      "Just update references to images using the digests in prod_digests.txt")
 
   args = parser.parse_args()
   for image in args.images:
-    if image == 'all':
+    if image == "all":
       # Sort for a determinstic order
       args.images = sorted(IMAGES_TO_DEPENDENCIES.keys())
     elif image not in IMAGES_TO_DEPENDENCIES:
-      raise parser.error('Expected --image to be one of:\n'
-                         f'  {IMAGES_HELP}\n'
-                         f'but got `{image}`.')
+      raise parser.error("Expected --image to be one of:\n"
+                         f"  {IMAGES_HELP}\n"
+                         f"but got `{image}`.")
   return args
 
 
@@ -123,12 +129,12 @@
 
 def get_repo_digest(tagged_image_url: str, dry_run: bool = False) -> str:
   inspect_command = [
-      'docker',
-      'image',
-      'inspect',
+      "docker",
+      "image",
+      "inspect",
       tagged_image_url,
-      '-f',
-      '{{index .RepoDigests 0}}',
+      "-f",
+      "{{index .RepoDigests 0}}",
   ]
   try:
     completed_process = utils.run_command(
@@ -141,35 +147,35 @@
       return ""
     else:
       raise RuntimeError(
-          f'Computing the repository digest for {tagged_image_url} failed. Has '
-          'it been pushed to GCR?') from error
-  _, repo_digest = completed_process.stdout.strip().split('@')
+          f"Computing the repository digest for {tagged_image_url} failed. Has "
+          "it been pushed to GCR?") from error
+  _, repo_digest = completed_process.stdout.strip().split("@")
   return repo_digest
 
 
 def update_references(image_url: str, digest: str, dry_run: bool = False):
-  """Updates all references to 'image_url' with a sha256 digest."""
-  print(f'Updating references to {image_url}')
+  """Updates all references to "image_url" with a sha256 digest."""
+  print(f"Updating references to {image_url}")
 
-  grep_command = ['git', 'grep', '-l', f'{image_url}@sha256']
+  grep_command = ["git", "grep", "-l", f"{image_url}@sha256"]
   try:
     completed_process = utils.run_command(grep_command,
                                           capture_output=True,
                                           timeout=5)
   except subprocess.CalledProcessError as error:
     if error.returncode == 1:
-      print(f'Found no references to {image_url}')
+      print(f"Found no references to {image_url}")
       return
     raise error
 
   # Update references in all grepped files.
   files = completed_process.stdout.split()
-  print(f'Updating references in {len(files)} files: {files}')
+  print(f"Updating references in {len(files)} files: {files}")
   if not dry_run:
     for line in fileinput.input(files=files, inplace=True):
-      print(re.sub(f'{image_url}@{DIGEST_REGEX}', f'{image_url}@{digest}',
+      print(re.sub(f"{image_url}@{DIGEST_REGEX}", f"{image_url}@{digest}",
                    line),
-            end='')
+            end="")
 
 
 def parse_prod_digests() -> Dict[str, str]:
@@ -181,50 +187,54 @@
   return image_urls_to_prod_digests
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
   args = parse_arguments()
-
-  # Ensure the user has the correct authorization to push to GCR.
-  utils.check_gcloud_auth(dry_run=args.dry_run)
-
-  images_to_process = get_ordered_images_to_process(args.images)
-  print(f'Also processing dependent images. Will process: {images_to_process}')
-
-  dependencies = get_dependencies(images_to_process)
-  print(f'Pulling image dependencies: {dependencies}')
   image_urls_to_prod_digests = parse_prod_digests()
-  for dependency in dependencies:
-    dependency_url = posixpath.join(IREE_GCR_URL, dependency)
-    # If `dependency` is a new image then it may not have a prod digest yet.
-    if dependency_url in image_urls_to_prod_digests:
-      digest = image_urls_to_prod_digests[dependency_url]
-      dependency_with_digest = f'{dependency_url}@{digest}'
-      utils.run_command(["docker", "pull", dependency_with_digest],
-                        dry_run=args.dry_run)
+  images_to_process = get_ordered_images_to_process(args.images)
+  print(f"Also processing dependent images. Will process: {images_to_process}")
+
+  if not args.only_references:
+    # Ensure the user has the correct authorization to push to GCR.
+    utils.check_gcloud_auth(dry_run=args.dry_run)
+
+    dependencies = get_dependencies(images_to_process)
+    print(f"Pulling image dependencies: {dependencies}")
+    for dependency in dependencies:
+      dependency_url = posixpath.join(IREE_GCR_URL, dependency)
+      # If `dependency` is a new image then it may not have a prod digest yet.
+      if dependency_url in image_urls_to_prod_digests:
+        digest = image_urls_to_prod_digests[dependency_url]
+        dependency_with_digest = f"{dependency_url}@{digest}"
+        utils.run_command(["docker", "pull", dependency_with_digest],
+                          dry_run=args.dry_run)
 
   for image in images_to_process:
-    print('\n' * 5 + f'Processing image {image}')
+    print("\n" * 5 + f"Processing image {image}")
     image_url = posixpath.join(IREE_GCR_URL, image)
-    tagged_image_url = f'{image_url}'
+    tagged_image_url = f"{image_url}"
     image_path = os.path.join(DOCKER_DIR, image)
 
-    utils.run_command(
-        ['docker', 'build', '--tag', tagged_image_url, image_path],
-        dry_run=args.dry_run)
+    if args.only_references:
+      digest = image_urls_to_prod_digests[image_url]
+    else:
+      utils.run_command(
+          ["docker", "build", "--tag", tagged_image_url, image_path],
+          dry_run=args.dry_run)
 
-    utils.run_command(['docker', 'push', tagged_image_url],
-                      dry_run=args.dry_run)
+      utils.run_command(["docker", "push", tagged_image_url],
+                        dry_run=args.dry_run)
 
-    digest = get_repo_digest(tagged_image_url, args.dry_run)
+      digest = get_repo_digest(tagged_image_url, args.dry_run)
 
-    # Check that the image is in 'prod_digests.txt' and append it to the list
-    # in the file if it isn't.
-    if image_url not in image_urls_to_prod_digests:
-      image_with_digest = f'{image_url}@{digest}'
-      print(
-          f'Adding new image {image_with_digest} to {utils.PROD_DIGESTS_PATH}')
-      if not args.dry_run:
-        with open(utils.PROD_DIGESTS_PATH, 'a') as f:
-          f.write(f'{image_with_digest}\n')
+      # Check that the image is in "prod_digests.txt" and append it to the list
+      # in the file if it isn't.
+      if image_url not in image_urls_to_prod_digests:
+        image_with_digest = f"{image_url}@{digest}"
+        print(
+            f"Adding new image {image_with_digest} to {utils.PROD_DIGESTS_PATH}"
+        )
+        if not args.dry_run:
+          with open(utils.PROD_DIGESTS_PATH, "a") as f:
+            f.write(f"{image_with_digest}\n")
 
     update_references(image_url, digest, dry_run=args.dry_run)