1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15
16 #include "llvm/ADT/SmallVector.h"
17 #include "mlir/IR/Attributes.h" // from @llvm-project
18 #include "mlir/IR/BuiltinTypes.h" // from @llvm-project
19 #include "mlir/Pass/Pass.h" // from @llvm-project
20 #include "mlir/Pass/PassRegistry.h" // from @llvm-project
21 #include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h"
22 #include "tensorflow/compiler/mlir/tensorflow/transforms/passes_detail.h"
23
24 namespace mlir {
25 namespace TF {
26
27 namespace {
28
29 // Rewrites RecvTPUEmbeddingActivationsOp and SendTPUEmbeddingGradients ops to
30 // internal variants by introducing XlaRecvTPUEmbeddingDeduplicationData op.
31 struct RewriteTPUEmbeddingOps
32 : public RewriteTPUEmbeddingOpsPassBase<RewriteTPUEmbeddingOps> {
33 void runOnOperation() override;
34 };
35
36 // Rewrites the given op to `OpT` op after adding the given operand at the end.
37 template <typename OpT>
AddOperandAndRewriteAs(Operation * op,Value operand,OpBuilder * builder)38 OpT AddOperandAndRewriteAs(Operation* op, Value operand, OpBuilder* builder) {
39 builder->setInsertionPoint(op);
40 auto operands = llvm::to_vector<4>(op->getOperands());
41 operands.push_back(operand);
42 auto new_op = builder->create<OpT>(op->getLoc(), op->getResultTypes(),
43 operands, op->getAttrs());
44 op->replaceAllUsesWith(new_op.getOperation()->getResults());
45 op->erase();
46 return new_op;
47 }
48
49 // Returns success if the function has at most one op of the template type and
50 // assigns it to `result`, if present. If there are multiple such ops, returns
51 // failure.
52 template <typename OpT>
GetOp(Region * region,OpT * result)53 LogicalResult GetOp(Region* region, OpT* result) {
54 *result = {};
55 for (auto op : region->getOps<OpT>()) {
56 if (*result) return op.emitError("should be unique within a function");
57 *result = op;
58 }
59 return success();
60 }
61
RunOnRegion(Region * region)62 LogicalResult RunOnRegion(Region* region) {
63 RecvTPUEmbeddingActivationsOp recv_op;
64 if (failed(GetOp(region, &recv_op))) return failure();
65
66 SendTPUEmbeddingGradientsOp send_op;
67 if (failed(GetOp(region, &send_op))) return failure();
68
69 // No TPU embedding ops.
70 if (!recv_op && !send_op) return success();
71
72 Location loc = recv_op ? recv_op.getLoc() : send_op.getLoc();
73 StringRef config = recv_op ? recv_op.config() : send_op.config();
74
75 // Create XlaRecvTPUEmbeddingDeduplicationData op.
76 OpBuilder builder(region);
77 auto output_ty =
78 RankedTensorType::get({}, VariantType::get(region->getContext()));
79 auto dedup_op = builder.create<XlaRecvTPUEmbeddingDeduplicationDataOp>(
80 loc, output_ty, config);
81
82 // Rewrite RecvTPUEmbeddingActivations op to the corresponding internal op.
83 if (recv_op)
84 AddOperandAndRewriteAs<XlaRecvTPUEmbeddingActivationsOp>(recv_op, dedup_op,
85 &builder);
86
87 // Rewrite SendTPUEmbeddingGradients op to the corresponding internal op and
88 // then update the OperandSegmentSize attribute.
89 if (send_op) {
90 int32_t operand_sizes[] = {static_cast<int32_t>(send_op.N()),
91 static_cast<int32_t>(send_op.NN()), 1};
92 auto operand_size_attr = builder.getDenseI32ArrayAttr(operand_sizes);
93
94 auto new_send_op = AddOperandAndRewriteAs<XlaSendTPUEmbeddingGradientsOp>(
95 send_op, dedup_op, &builder);
96 new_send_op->setAttr(new_send_op.getOperandSegmentSizeAttr(),
97 operand_size_attr);
98 }
99 return success();
100 }
101
runOnOperation()102 void RewriteTPUEmbeddingOps::runOnOperation() {
103 func::FuncOp func = getOperation();
104 if (failed(RunOnRegion(&func.getBody()))) return signalPassFailure();
105
106 func.walk([&](Operation* op) {
107 for (Region& region : op->getRegions()) {
108 if (failed(RunOnRegion(®ion))) return signalPassFailure();
109 }
110 });
111 }
112
113 } // anonymous namespace
114
115 std::unique_ptr<OperationPass<func::FuncOp>>
CreateRewriteTPUEmbeddingOpsPass()116 CreateRewriteTPUEmbeddingOpsPass() {
117 return std::make_unique<RewriteTPUEmbeddingOps>();
118 }
119
120 } // namespace TF
121 } // namespace mlir
122