)]}'
{
  "commit": "7ce8c8e9f8f8a0234f97bbd576072b1cdf756f92",
  "tree": "b430a0bbe93b1d1950fb2818d941165902f6301e",
  "parents": [
    "4de493af31e370ca2eb1bb590469ebbf76fc8d5b"
  ],
  "author": {
    "name": "MaheshRavishankar",
    "email": "1663364+MaheshRavishankar@users.noreply.github.com",
    "time": "Wed Jul 17 11:27:14 2024 -0700"
  },
  "committer": {
    "name": "GitHub",
    "email": "noreply@github.com",
    "time": "Wed Jul 17 11:27:14 2024 -0700"
  },
  "message": "[Preprocessing] Add a one-off pattern to fuse attention with transpose. (#17901)\n\nThe attention ops in SDXL models are usually followed by a\r\n`tensor.expand_shape` and a `transpose`. It is more natural to fold\r\nthese in with the attention for codegeneration. This is done as a\r\none-off pattern for now. Ideally the attention ops can be fused with any\r\nof its elementwise consumers when attention is handled natively by the\r\nbackend pass-pipelines.\r\nMore details are in https://github.com/iree-org/iree/issues/17673.\r\n\r\nSigned-off-by: MaheshRavishankar \u003cmahesh.ravishankar@gmail.com\u003e",
  "tree_diff": [
    {
      "type": "modify",
      "old_id": "1692c78bf800651d5dcefc69e50ed1c486cad8db",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Preprocessing/Common/BUILD.bazel",
      "new_id": "a4818888559289eef34992385ef195a5abc9eedc",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/BUILD.bazel"
    },
    {
      "type": "modify",
      "old_id": "4613d4bb404b35297b93d77eaec9d297d9615624",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Preprocessing/Common/CMakeLists.txt",
      "new_id": "1bc4c1e859722dfd1ace5ef13b93eca7bd82b669",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/CMakeLists.txt"
    },
    {
      "type": "add",
      "old_id": "0000000000000000000000000000000000000000",
      "old_mode": 0,
      "old_path": "/dev/null",
      "new_id": "2c84abbba06fa625520daa4f55de01252a4bf749",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/FoldAttentionWithTranspose.cpp"
    },
    {
      "type": "modify",
      "old_id": "e4921b81fe883f53d4ba7c3ace2f3df533360cde",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Preprocessing/Common/Passes.td",
      "new_id": "edc17057b55b6f0c21ffe5cfeec6d403fe014a4d",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/Passes.td"
    },
    {
      "type": "modify",
      "old_id": "54ebb1176caac198e6df71a7fd1a52f93fc4312d",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Preprocessing/Common/test/BUILD.bazel",
      "new_id": "e484b956de10cf66929ba066623722c8bbd0531d",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/test/BUILD.bazel"
    },
    {
      "type": "modify",
      "old_id": "03c92b7423bc6e59088da3e1ed1b5625164252a3",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Preprocessing/Common/test/CMakeLists.txt",
      "new_id": "2c105a2b0636298dfcdf7e6743dac0bfc5c7902c",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/test/CMakeLists.txt"
    },
    {
      "type": "add",
      "old_id": "0000000000000000000000000000000000000000",
      "old_mode": 0,
      "old_path": "/dev/null",
      "new_id": "cbb6ebd7835c07e494cc484ef853810d67dabd54",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Preprocessing/Common/test/fold_attention_with_transpose.mlir"
    }
  ]
}
