]> git.proxmox.com Git - rustc.git/blob - src/binaryen/src/ir/utils.h
New upstream version 1.23.0+dfsg1
[rustc.git] / src / binaryen / src / ir / utils.h
1 /*
2 * Copyright 2016 WebAssembly Community Group participants
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef wasm_ir_utils_h
18 #define wasm_ir_utils_h
19
20 #include "wasm.h"
21 #include "wasm-traversal.h"
22 #include "wasm-builder.h"
23 #include "pass.h"
24 #include "ir/branch-utils.h"
25
26 namespace wasm {
27
28 // Measure the size of an AST
29
30 struct Measurer : public PostWalker<Measurer, UnifiedExpressionVisitor<Measurer>> {
31 Index size = 0;
32
33 void visitExpression(Expression* curr) {
34 size++;
35 }
36
37 static Index measure(Expression* tree) {
38 Measurer measurer;
39 measurer.walk(tree);
40 return measurer.size;
41 }
42 };
43
44 struct ExpressionAnalyzer {
45 // Given a stack of expressions, checks if the topmost is used as a result.
46 // For example, if the parent is a block and the node is before the last position,
47 // it is not used.
48 static bool isResultUsed(std::vector<Expression*> stack, Function* func);
49
50 // Checks if a value is dropped.
51 static bool isResultDropped(std::vector<Expression*> stack);
52
53 // Checks if a break is a simple - no condition, no value, just a plain branching
54 static bool isSimple(Break* curr) {
55 return !curr->condition && !curr->value;
56 }
57
58 using ExprComparer = std::function<bool(Expression*, Expression*)>;
59 static bool flexibleEqual(Expression* left, Expression* right, ExprComparer comparer);
60
61 static bool equal(Expression* left, Expression* right) {
62 auto comparer = [](Expression* left, Expression* right) {
63 return false;
64 };
65 return flexibleEqual(left, right, comparer);
66 }
67
68 // hash an expression, ignoring superficial details like specific internal names
69 static uint32_t hash(Expression* curr);
70 };
71
72 // Re-Finalizes all node types
73 // This removes "unnecessary' block/if/loop types, i.e., that are added
74 // specifically, as in
75 // (block (result i32) (unreachable))
76 // vs
77 // (block (unreachable))
78 // This converts to the latter form.
79 struct ReFinalize : public WalkerPass<PostWalker<ReFinalize, OverriddenVisitor<ReFinalize>>> {
80 bool isFunctionParallel() override { return true; }
81
82 Pass* create() override { return new ReFinalize; }
83
84 ReFinalize() { name = "refinalize"; }
85
86 // block finalization is O(bad) if we do each block by itself, so do it in bulk,
87 // tracking break value types so we just do a linear pass
88
89 std::map<Name, WasmType> breakValues;
90
91 void visitBlock(Block *curr) {
92 if (curr->list.size() == 0) {
93 curr->type = none;
94 return;
95 }
96 // do this quickly, without any validation
97 auto old = curr->type;
98 // last element determines type
99 curr->type = curr->list.back()->type;
100 // if concrete, it doesn't matter if we have an unreachable child, and we
101 // don't need to look at breaks
102 if (isConcreteWasmType(curr->type)) return;
103 // otherwise, we have no final fallthrough element to determine the type,
104 // could be determined by breaks
105 if (curr->name.is()) {
106 auto iter = breakValues.find(curr->name);
107 if (iter != breakValues.end()) {
108 // there is a break to here
109 auto type = iter->second;
110 if (type == unreachable) {
111 // all we have are breaks with values of type unreachable, and no
112 // concrete fallthrough either. we must have had an existing type, then
113 curr->type = old;
114 assert(isConcreteWasmType(curr->type));
115 } else {
116 curr->type = type;
117 }
118 return;
119 }
120 }
121 if (curr->type == unreachable) return;
122 // type is none, but we might be unreachable
123 if (curr->type == none) {
124 for (auto* child : curr->list) {
125 if (child->type == unreachable) {
126 curr->type = unreachable;
127 break;
128 }
129 }
130 }
131 }
132 void visitIf(If *curr) { curr->finalize(); }
133 void visitLoop(Loop *curr) { curr->finalize(); }
134 void visitBreak(Break *curr) {
135 curr->finalize();
136 updateBreakValueType(curr->name, getValueType(curr->value));
137 }
138 void visitSwitch(Switch *curr) {
139 curr->finalize();
140 auto valueType = getValueType(curr->value);
141 for (auto target : curr->targets) {
142 updateBreakValueType(target, valueType);
143 }
144 updateBreakValueType(curr->default_, valueType);
145 }
146 void visitCall(Call *curr) { curr->finalize(); }
147 void visitCallImport(CallImport *curr) { curr->finalize(); }
148 void visitCallIndirect(CallIndirect *curr) { curr->finalize(); }
149 void visitGetLocal(GetLocal *curr) { curr->finalize(); }
150 void visitSetLocal(SetLocal *curr) { curr->finalize(); }
151 void visitGetGlobal(GetGlobal *curr) { curr->finalize(); }
152 void visitSetGlobal(SetGlobal *curr) { curr->finalize(); }
153 void visitLoad(Load *curr) { curr->finalize(); }
154 void visitStore(Store *curr) { curr->finalize(); }
155 void visitAtomicRMW(AtomicRMW *curr) { curr->finalize(); }
156 void visitAtomicCmpxchg(AtomicCmpxchg *curr) { curr->finalize(); }
157 void visitAtomicWait(AtomicWait* curr) { curr->finalize(); }
158 void visitAtomicWake(AtomicWake* curr) { curr->finalize(); }
159 void visitConst(Const *curr) { curr->finalize(); }
160 void visitUnary(Unary *curr) { curr->finalize(); }
161 void visitBinary(Binary *curr) { curr->finalize(); }
162 void visitSelect(Select *curr) { curr->finalize(); }
163 void visitDrop(Drop *curr) { curr->finalize(); }
164 void visitReturn(Return *curr) { curr->finalize(); }
165 void visitHost(Host *curr) { curr->finalize(); }
166 void visitNop(Nop *curr) { curr->finalize(); }
167 void visitUnreachable(Unreachable *curr) { curr->finalize(); }
168
169 void visitFunction(Function* curr) {
170 // we may have changed the body from unreachable to none, which might be bad
171 // if the function has a return value
172 if (curr->result != none && curr->body->type == none) {
173 Builder builder(*getModule());
174 curr->body = builder.blockify(curr->body, builder.makeUnreachable());
175 }
176 }
177
178 void visitFunctionType(FunctionType* curr) { WASM_UNREACHABLE(); }
179 void visitImport(Import* curr) { WASM_UNREACHABLE(); }
180 void visitExport(Export* curr) { WASM_UNREACHABLE(); }
181 void visitGlobal(Global* curr) { WASM_UNREACHABLE(); }
182 void visitTable(Table* curr) { WASM_UNREACHABLE(); }
183 void visitMemory(Memory* curr) { WASM_UNREACHABLE(); }
184 void visitModule(Module* curr) { WASM_UNREACHABLE(); }
185
186 WasmType getValueType(Expression* value) {
187 return value ? value->type : none;
188 }
189
190 void updateBreakValueType(Name name, WasmType type) {
191 if (type != unreachable || breakValues.count(name) == 0) {
192 breakValues[name] = type;
193 }
194 }
195 };
196
197 // Re-finalize a single node. This is slow, if you want to refinalize
198 // an entire ast, use ReFinalize
199 struct ReFinalizeNode : public OverriddenVisitor<ReFinalizeNode> {
200 void visitBlock(Block *curr) { curr->finalize(); }
201 void visitIf(If *curr) { curr->finalize(); }
202 void visitLoop(Loop *curr) { curr->finalize(); }
203 void visitBreak(Break *curr) { curr->finalize(); }
204 void visitSwitch(Switch *curr) { curr->finalize(); }
205 void visitCall(Call *curr) { curr->finalize(); }
206 void visitCallImport(CallImport *curr) { curr->finalize(); }
207 void visitCallIndirect(CallIndirect *curr) { curr->finalize(); }
208 void visitGetLocal(GetLocal *curr) { curr->finalize(); }
209 void visitSetLocal(SetLocal *curr) { curr->finalize(); }
210 void visitGetGlobal(GetGlobal *curr) { curr->finalize(); }
211 void visitSetGlobal(SetGlobal *curr) { curr->finalize(); }
212 void visitLoad(Load *curr) { curr->finalize(); }
213 void visitStore(Store *curr) { curr->finalize(); }
214 void visitAtomicRMW(AtomicRMW* curr) { curr->finalize(); }
215 void visitAtomicCmpxchg(AtomicCmpxchg* curr) { curr->finalize(); }
216 void visitAtomicWait(AtomicWait* curr) { curr->finalize(); }
217 void visitAtomicWake(AtomicWake* curr) { curr->finalize(); }
218 void visitConst(Const *curr) { curr->finalize(); }
219 void visitUnary(Unary *curr) { curr->finalize(); }
220 void visitBinary(Binary *curr) { curr->finalize(); }
221 void visitSelect(Select *curr) { curr->finalize(); }
222 void visitDrop(Drop *curr) { curr->finalize(); }
223 void visitReturn(Return *curr) { curr->finalize(); }
224 void visitHost(Host *curr) { curr->finalize(); }
225 void visitNop(Nop *curr) { curr->finalize(); }
226 void visitUnreachable(Unreachable *curr) { curr->finalize(); }
227
228 void visitFunctionType(FunctionType* curr) { WASM_UNREACHABLE(); }
229 void visitImport(Import* curr) { WASM_UNREACHABLE(); }
230 void visitExport(Export* curr) { WASM_UNREACHABLE(); }
231 void visitGlobal(Global* curr) { WASM_UNREACHABLE(); }
232 void visitTable(Table* curr) { WASM_UNREACHABLE(); }
233 void visitMemory(Memory* curr) { WASM_UNREACHABLE(); }
234 void visitModule(Module* curr) { WASM_UNREACHABLE(); }
235
236 // given a stack of nested expressions, update them all from child to parent
237 static void updateStack(std::vector<Expression*>& expressionStack) {
238 for (int i = int(expressionStack.size()) - 1; i >= 0; i--) {
239 auto* curr = expressionStack[i];
240 ReFinalizeNode().visit(curr);
241 }
242 }
243 };
244
245 // Adds drop() operations where necessary. This lets you not worry about adding drop when
246 // generating code.
247 // This also refinalizes before and after, as dropping can change types, and depends
248 // on types being cleaned up - no unnecessary block/if/loop types (see refinalize)
249 // TODO: optimize that, interleave them
250 struct AutoDrop : public WalkerPass<ExpressionStackWalker<AutoDrop>> {
251 bool isFunctionParallel() override { return true; }
252
253 Pass* create() override { return new AutoDrop; }
254
255 AutoDrop() { name = "autodrop"; }
256
257 bool maybeDrop(Expression*& child) {
258 bool acted = false;
259 if (isConcreteWasmType(child->type)) {
260 expressionStack.push_back(child);
261 if (!ExpressionAnalyzer::isResultUsed(expressionStack, getFunction()) && !ExpressionAnalyzer::isResultDropped(expressionStack)) {
262 child = Builder(*getModule()).makeDrop(child);
263 acted = true;
264 }
265 expressionStack.pop_back();
266 }
267 return acted;
268 }
269
270 void reFinalize() {
271 ReFinalizeNode::updateStack(expressionStack);
272 }
273
274 void visitBlock(Block* curr) {
275 if (curr->list.size() == 0) return;
276 for (Index i = 0; i < curr->list.size() - 1; i++) {
277 auto* child = curr->list[i];
278 if (isConcreteWasmType(child->type)) {
279 curr->list[i] = Builder(*getModule()).makeDrop(child);
280 }
281 }
282 if (maybeDrop(curr->list.back())) {
283 reFinalize();
284 assert(curr->type == none || curr->type == unreachable);
285 }
286 }
287
288 void visitIf(If* curr) {
289 bool acted = false;
290 if (maybeDrop(curr->ifTrue)) acted = true;
291 if (curr->ifFalse) {
292 if (maybeDrop(curr->ifFalse)) acted = true;
293 }
294 if (acted) {
295 reFinalize();
296 assert(curr->type == none);
297 }
298 }
299
300 void doWalkFunction(Function* curr) {
301 ReFinalize().walkFunctionInModule(curr, getModule());
302 walk(curr->body);
303 if (curr->result == none && isConcreteWasmType(curr->body->type)) {
304 curr->body = Builder(*getModule()).makeDrop(curr->body);
305 }
306 ReFinalize().walkFunctionInModule(curr, getModule());
307 }
308 };
309
310 struct I64Utilities {
311 static Expression* recreateI64(Builder& builder, Expression* low, Expression* high) {
312 return
313 builder.makeBinary(
314 OrInt64,
315 builder.makeUnary(
316 ExtendUInt32,
317 low
318 ),
319 builder.makeBinary(
320 ShlInt64,
321 builder.makeUnary(
322 ExtendUInt32,
323 high
324 ),
325 builder.makeConst(Literal(int64_t(32)))
326 )
327 )
328 ;
329 };
330
331 static Expression* recreateI64(Builder& builder, Index low, Index high) {
332 return recreateI64(builder, builder.makeGetLocal(low, i32), builder.makeGetLocal(high, i32));
333 };
334
335 static Expression* getI64High(Builder& builder, Index index) {
336 return
337 builder.makeUnary(
338 WrapInt64,
339 builder.makeBinary(
340 ShrUInt64,
341 builder.makeGetLocal(index, i64),
342 builder.makeConst(Literal(int64_t(32)))
343 )
344 )
345 ;
346 }
347
348 static Expression* getI64Low(Builder& builder, Index index) {
349 return
350 builder.makeUnary(
351 WrapInt64,
352 builder.makeGetLocal(index, i64)
353 )
354 ;
355 }
356 };
357
358 } // namespace wasm
359
360 #endif // wasm_ir_utils_h