================
@@ -124,63 +124,74 @@ xegpu::DistributeLayoutAttr
xegpu::getDistributeLayoutAttr(const Value value) {
Operation *defOp = result.getDefiningOp();
assert(defOp && "result must have a defining op");
- // For ConvertLayoutOp, the layout is stored in the targetLayoutAttr
- if (auto convertOp = dyn_cast<xegpu::ConvertLayoutOp>(defOp))
- return convertOp.getTargetLayoutAttr();
-
- // for LoadNdOp, the layout is stored in the tensor descriptor
- if (auto loadNd = dyn_cast<xegpu::LoadNdOp>(defOp))
- return getDistributeLayoutAttr(loadNd.getTensorDesc());
-
- // for LoadMatrixOp, the layout is attached to the property of the op
- if (auto loadOp = dyn_cast<xegpu::LoadMatrixOp>(defOp))
- return loadOp.getLayoutAttr();
-
- // for StoreMatrixOp, the layout is attached to the property of the op
- if (auto storeOp = dyn_cast<xegpu::StoreMatrixOp>(defOp))
- return storeOp.getLayoutAttr();
- std::string layoutName = getLayoutName(result);
- if (defOp->hasAttr(layoutName))
- return defOp->getAttrOfType<xegpu::DistributeLayoutAttr>(layoutName);
-
- // check for "permament" layout only after "temporary" layout name lookup
- // for backward compatibility
- if (auto loadGatherOp = dyn_cast<xegpu::LoadGatherOp>(defOp))
- return loadGatherOp.getLayoutAttr();
+ if (auto anchorOp = dyn_cast<xegpu::AnchorLayoutInterface>(defOp)) {
+ auto layout = anchorOp.getAnchorLayout();
+ return layout;
+ }
+
+ std::string layoutName = getTempLayoutName(result);
+ if (defOp->hasAttr(layoutName)) {
+ auto layout =
+ defOp->getAttrOfType<xegpu::DistributeLayoutAttr>(layoutName);
+ return layout;
+ }
}
if (auto arg = dyn_cast<BlockArgument>(value)) {
auto *parentOp = arg.getOwner()->getParentOp();
if (auto loop = dyn_cast<LoopLikeOpInterface>(parentOp)) {
OpOperand *tiedInit = loop.getTiedLoopInit(arg);
- if (tiedInit)
- return getDistributeLayoutAttr(tiedInit->get());
+ if (tiedInit) {
+ auto layout = getDistributeLayoutAttr(tiedInit->get());
+ return layout;
+ }
----------------
tkarna wrote:
nit: identical to previous implementation, revert?
https://github.com/llvm/llvm-project/pull/172125
_______________________________________________
cfe-commits mailing list
[email protected]
https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits