)]}'
{
  "commit": "108e050bb707dfae4e7e4d8b52759bd795b52a97",
  "tree": "27553bf0c132bae9b331c76f99521b22a23d6620",
  "parents": [
    "1dcc62e1d563cdb7085ef6223b591730379c0904"
  ],
  "author": {
    "name": "Rob Suderman",
    "email": "suderman@google.com",
    "time": "Mon Jul 22 15:34:35 2024 -0700"
  },
  "committer": {
    "name": "GitHub",
    "email": "noreply@github.com",
    "time": "Mon Jul 22 15:34:35 2024 -0700"
  },
  "message": "Make fp8 attention using static quantized softmax (#17949)\n\nAdded a flag that uses a statically selected softmax range for fp8\r\nattention. This allows the value\r\nto be tuned without plumbing through the attention operation. Eventually\r\nthis should be a parameter\r\npassed to `linalg_ext.attention`.",
  "tree_diff": [
    {
      "type": "modify",
      "old_id": "cfe994a5c1390e130333335e5e3c38f218b0bec7",
      "old_mode": 33188,
      "old_path": "compiler/src/iree/compiler/Dialect/LinalgExt/Transforms/AggregatedOpInterfaceImpl.cpp",
      "new_id": "23ba79c2f6658a5a5f4744c043d9d05b0cb5d630",
      "new_mode": 33188,
      "new_path": "compiler/src/iree/compiler/Dialect/LinalgExt/Transforms/AggregatedOpInterfaceImpl.cpp"
    }
  ]
}
