Integrate llvm-project @4f3c9dabecc6074f8455ca23ba70020d5c556e63 (#17827)
Reverted commits:
-
https://github.com/iree-org/llvm-project/commit/fa0666876cdf11162af341911b99311a56be2274
-
https://github.com/iree-org/llvm-project/commit/2718654c542c742e2dd18dcda8b93de1d4d3b640
-
https://github.com/iree-org/llvm-project/commit/9abb574f9a68b1c0c32f49745f9dad8e1a7db1f9
---------
Signed-off-by: yzhang93 <zhyuhang88@gmail.com>
Signed-off-by: hanhanW <hanhan0912@gmail.com>
Co-authored-by: hanhanW <hanhan0912@gmail.com>
Co-authored-by: Jakub Kuderski <jakub@nod-labs.com>
diff --git a/compiler/src/iree/compiler/Codegen/Common/TransformExtensions/CommonExtensions.cpp b/compiler/src/iree/compiler/Codegen/Common/TransformExtensions/CommonExtensions.cpp
index 15a0c6a..a8aae48 100644
--- a/compiler/src/iree/compiler/Codegen/Common/TransformExtensions/CommonExtensions.cpp
+++ b/compiler/src/iree/compiler/Codegen/Common/TransformExtensions/CommonExtensions.cpp
@@ -253,7 +253,7 @@
void transform_dialect::ApplyBubblePackUnpackPatternsOp::populatePatterns(
RewritePatternSet &patterns) {
linalg::populateDataLayoutPropagationPatterns(
- patterns, [](Operation *op) { return true; });
+ patterns, [](OpOperand *opOperand) { return true; });
}
void transform_dialect::ApplyFoldReshapeIntoTensorHalInterfacePatternsOp::
diff --git a/compiler/src/iree/compiler/Codegen/Dialect/GPU/Transforms/PackToIntrinsics.cpp b/compiler/src/iree/compiler/Codegen/Dialect/GPU/Transforms/PackToIntrinsics.cpp
index 91977fb..4e9b40e 100644
--- a/compiler/src/iree/compiler/Codegen/Dialect/GPU/Transforms/PackToIntrinsics.cpp
+++ b/compiler/src/iree/compiler/Codegen/Dialect/GPU/Transforms/PackToIntrinsics.cpp
@@ -97,8 +97,10 @@
// Run layout propagation patterns to pull in adjacent un-configured ops.
RewritePatternSet patterns(context);
- linalg::ControlPropagationFn control = [](Operation *op) -> bool {
- return !getLoweringConfig(op);
+ linalg::ControlPropagationFn control = [](OpOperand *opOperand) -> bool {
+ Operation *producer = opOperand->get().getDefiningOp();
+ Operation *consumer = opOperand->getOwner();
+ return !getLoweringConfig(producer) || !getLoweringConfig(consumer);
};
linalg::populateDataLayoutPropagationPatterns(patterns, control);
diff --git a/compiler/src/iree/compiler/GlobalOptimization/DataLayoutPropagation.cpp b/compiler/src/iree/compiler/GlobalOptimization/DataLayoutPropagation.cpp
index 43d6800..58929fc 100644
--- a/compiler/src/iree/compiler/GlobalOptimization/DataLayoutPropagation.cpp
+++ b/compiler/src/iree/compiler/GlobalOptimization/DataLayoutPropagation.cpp
@@ -23,14 +23,18 @@
FunctionOpInterface funcOp = getOperation();
RewritePatternSet patterns(context);
- linalg::populateDataLayoutPropagationPatterns(patterns, [](Operation *op) {
- // Currently only bubble up/push down pack/unpack through collapse/expand
- // shape ops.
- // TODO(#17734): The propagation through expand_shape ops is broken.
- // Enable the propagation once we find it useful and the upstream issue is
- // fixed.
- return isa<tensor::CollapseShapeOp>(op);
- });
+ linalg::populateDataLayoutPropagationPatterns(
+ patterns, [](OpOperand *opOperand) {
+ Operation *producer = opOperand->get().getDefiningOp();
+ Operation *consumer = opOperand->getOwner();
+ (void)consumer;
+ // Currently only bubble up/push down pack/unpack through
+ // collapse/expand shape ops.
+ // TODO(#17734): The propagation through expand_shape ops is broken.
+ // Enable the propagation once we find it useful and the upstream
+ // issue is fixed.
+ return isa<tensor::CollapseShapeOp>(producer);
+ });
if (failed(applyPatternsAndFoldGreedily(funcOp, std::move(patterns)))) {
funcOp.emitOpError("folding patterns failed");
return signalPassFailure();
diff --git a/compiler/src/iree/compiler/GlobalOptimization/GeneralizeLinalgNamedOps.cpp b/compiler/src/iree/compiler/GlobalOptimization/GeneralizeLinalgNamedOps.cpp
index 3ab937a..5f30902 100644
--- a/compiler/src/iree/compiler/GlobalOptimization/GeneralizeLinalgNamedOps.cpp
+++ b/compiler/src/iree/compiler/GlobalOptimization/GeneralizeLinalgNamedOps.cpp
@@ -40,7 +40,7 @@
linalg::DivUnsignedOp, linalg::ElemwiseBinaryOp,
linalg::ElemwiseUnaryOp, linalg::ExpOp, linalg::FloorOp,
linalg::LogOp, linalg::MapOp, linalg::MaxOp,
- linalg::MulOp, linalg::NegfOp, linalg::ReduceOp,
+ linalg::MulOp, linalg::NegFOp, linalg::ReduceOp,
linalg::SubOp, linalg::TransposeOp>(
linalgOp.getOperation())) {
namedOpCandidates.push_back(linalgOp);
diff --git a/compiler/src/iree/compiler/Preprocessing/Common/ConvertConvToChannelsLast.cpp b/compiler/src/iree/compiler/Preprocessing/Common/ConvertConvToChannelsLast.cpp
index ac41bd8..cdef914 100644
--- a/compiler/src/iree/compiler/Preprocessing/Common/ConvertConvToChannelsLast.cpp
+++ b/compiler/src/iree/compiler/Preprocessing/Common/ConvertConvToChannelsLast.cpp
@@ -686,7 +686,7 @@
GreedyRewriteConfig config;
config.maxIterations = GreedyRewriteConfig::kNoLimit;
linalg::populateDataLayoutPropagationPatterns(
- patterns, [](Operation *op) { return true; });
+ patterns, [](OpOperand *opOperand) { return true; });
if (failed(
applyPatternsAndFoldGreedily(op, std::move(patterns), config))) {
return signalPassFailure();
diff --git a/third_party/llvm-project b/third_party/llvm-project
index c5bb6d3..fe82af3 160000
--- a/third_party/llvm-project
+++ b/third_party/llvm-project
@@ -1 +1 @@
-Subproject commit c5bb6d3e2eb870b5ae454b410ac190ea05045303
+Subproject commit fe82af3d2d9b0487e281b9349c61d2831594469f