Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/CacheIRCompiler.h"
8 :
9 : #include <utility>
10 :
11 : #include "jit/IonIC.h"
12 : #include "jit/SharedICHelpers.h"
13 :
14 : #include "builtin/Boolean-inl.h"
15 :
16 : #include "jit/MacroAssembler-inl.h"
17 : #include "vm/Realm-inl.h"
18 :
19 : using namespace js;
20 : using namespace js::jit;
21 :
22 : using mozilla::BitwiseCast;
23 :
24 : ValueOperand
25 0 : CacheRegisterAllocator::useValueRegister(MacroAssembler& masm, ValOperandId op)
26 : {
27 0 : OperandLocation& loc = operandLocations_[op.id()];
28 :
29 0 : switch (loc.kind()) {
30 : case OperandLocation::ValueReg:
31 1176 : currentOpRegs_.add(loc.valueReg());
32 588 : return loc.valueReg();
33 :
34 : case OperandLocation::ValueStack: {
35 0 : ValueOperand reg = allocateValueRegister(masm);
36 0 : popValue(masm, &loc, reg);
37 0 : return reg;
38 : }
39 :
40 : case OperandLocation::BaselineFrame: {
41 0 : ValueOperand reg = allocateValueRegister(masm);
42 0 : Address addr = addressOf(masm, loc.baselineFrameSlot());
43 0 : masm.loadValue(addr, reg);
44 4 : loc.setValueReg(reg);
45 4 : return reg;
46 : }
47 :
48 : case OperandLocation::Constant: {
49 0 : ValueOperand reg = allocateValueRegister(masm);
50 0 : masm.moveValue(loc.constant(), reg);
51 0 : loc.setValueReg(reg);
52 0 : return reg;
53 : }
54 :
55 : case OperandLocation::PayloadReg: {
56 : // Temporarily add the payload register to currentOpRegs_ so
57 : // allocateValueRegister will stay away from it.
58 0 : currentOpRegs_.add(loc.payloadReg());
59 0 : ValueOperand reg = allocateValueRegister(masm);
60 0 : masm.tagValue(loc.payloadType(), loc.payloadReg(), reg);
61 0 : currentOpRegs_.take(loc.payloadReg());
62 0 : availableRegs_.add(loc.payloadReg());
63 20 : loc.setValueReg(reg);
64 20 : return reg;
65 : }
66 :
67 : case OperandLocation::PayloadStack: {
68 0 : ValueOperand reg = allocateValueRegister(masm);
69 0 : popPayload(masm, &loc, reg.scratchReg());
70 0 : masm.tagValue(loc.payloadType(), reg.scratchReg(), reg);
71 0 : loc.setValueReg(reg);
72 0 : return reg;
73 : }
74 :
75 : case OperandLocation::DoubleReg: {
76 0 : ValueOperand reg = allocateValueRegister(masm);
77 0 : masm.boxDouble(loc.doubleReg(), reg, ScratchDoubleReg);
78 0 : loc.setValueReg(reg);
79 0 : return reg;
80 : }
81 :
82 : case OperandLocation::Uninitialized:
83 : break;
84 : }
85 :
86 0 : MOZ_CRASH();
87 : }
88 :
89 : ValueOperand
90 75 : CacheRegisterAllocator::useFixedValueRegister(MacroAssembler& masm, ValOperandId valId,
91 : ValueOperand reg)
92 : {
93 0 : allocateFixedValueRegister(masm, reg);
94 :
95 75 : OperandLocation& loc = operandLocations_[valId.id()];
96 0 : switch (loc.kind()) {
97 : case OperandLocation::ValueReg:
98 0 : masm.moveValue(loc.valueReg(), reg);
99 92 : MOZ_ASSERT(!currentOpRegs_.aliases(loc.valueReg()), "Register shouldn't be in use");
100 46 : availableRegs_.add(loc.valueReg());
101 : break;
102 : case OperandLocation::ValueStack:
103 0 : popValue(masm, &loc, reg);
104 0 : break;
105 : case OperandLocation::BaselineFrame: {
106 29 : Address addr = addressOf(masm, loc.baselineFrameSlot());
107 29 : masm.loadValue(addr, reg);
108 : break;
109 : }
110 : case OperandLocation::Constant:
111 0 : masm.moveValue(loc.constant(), reg);
112 0 : break;
113 : case OperandLocation::PayloadReg:
114 0 : masm.tagValue(loc.payloadType(), loc.payloadReg(), reg);
115 0 : MOZ_ASSERT(!currentOpRegs_.has(loc.payloadReg()), "Register shouldn't be in use");
116 0 : availableRegs_.add(loc.payloadReg());
117 0 : break;
118 : case OperandLocation::PayloadStack:
119 0 : popPayload(masm, &loc, reg.scratchReg());
120 0 : masm.tagValue(loc.payloadType(), reg.scratchReg(), reg);
121 0 : break;
122 : case OperandLocation::DoubleReg:
123 0 : masm.boxDouble(loc.doubleReg(), reg, ScratchDoubleReg);
124 : break;
125 : case OperandLocation::Uninitialized:
126 0 : MOZ_CRASH();
127 : }
128 :
129 75 : loc.setValueReg(reg);
130 75 : return reg;
131 : }
132 :
133 : Register
134 0 : CacheRegisterAllocator::useRegister(MacroAssembler& masm, TypedOperandId typedId)
135 : {
136 0 : MOZ_ASSERT(!addedFailurePath_);
137 :
138 3299 : OperandLocation& loc = operandLocations_[typedId.id()];
139 0 : switch (loc.kind()) {
140 : case OperandLocation::PayloadReg:
141 2856 : currentOpRegs_.add(loc.payloadReg());
142 2856 : return loc.payloadReg();
143 :
144 : case OperandLocation::ValueReg: {
145 : // It's possible the value is still boxed: as an optimization, we unbox
146 : // the first time we use a value as object.
147 0 : ValueOperand val = loc.valueReg();
148 0 : availableRegs_.add(val);
149 0 : Register reg = val.scratchReg();
150 0 : availableRegs_.take(reg);
151 0 : masm.unboxNonDouble(val, reg, typedId.type());
152 0 : loc.setPayloadReg(reg, typedId.type());
153 423 : currentOpRegs_.add(reg);
154 423 : return reg;
155 : }
156 :
157 : case OperandLocation::PayloadStack: {
158 0 : Register reg = allocateRegister(masm);
159 6 : popPayload(masm, &loc, reg);
160 6 : return reg;
161 : }
162 :
163 : case OperandLocation::ValueStack: {
164 : // The value is on the stack, but boxed. If it's on top of the stack we
165 : // unbox it and then remove it from the stack, else we just unbox.
166 0 : Register reg = allocateRegister(masm);
167 0 : if (loc.valueStack() == stackPushed_) {
168 0 : masm.unboxNonDouble(Address(masm.getStackPointer(), 0), reg, typedId.type());
169 0 : masm.addToStackPtr(Imm32(sizeof(js::Value)));
170 0 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
171 0 : stackPushed_ -= sizeof(js::Value);
172 : } else {
173 0 : MOZ_ASSERT(loc.valueStack() < stackPushed_);
174 0 : masm.unboxNonDouble(Address(masm.getStackPointer(), stackPushed_ - loc.valueStack()),
175 0 : reg, typedId.type());
176 : }
177 0 : loc.setPayloadReg(reg, typedId.type());
178 0 : return reg;
179 : }
180 :
181 : case OperandLocation::BaselineFrame: {
182 0 : Register reg = allocateRegister(masm);
183 0 : Address addr = addressOf(masm, loc.baselineFrameSlot());
184 0 : masm.unboxNonDouble(addr, reg, typedId.type());
185 0 : loc.setPayloadReg(reg, typedId.type());
186 0 : return reg;
187 : };
188 :
189 : case OperandLocation::Constant: {
190 0 : Value v = loc.constant();
191 0 : Register reg = allocateRegister(masm);
192 0 : if (v.isString())
193 0 : masm.movePtr(ImmGCPtr(v.toString()), reg);
194 14 : else if (v.isSymbol())
195 1 : masm.movePtr(ImmGCPtr(v.toSymbol()), reg);
196 : else
197 0 : MOZ_CRASH("Unexpected Value");
198 28 : loc.setPayloadReg(reg, v.extractNonDoubleType());
199 14 : return reg;
200 : }
201 :
202 : case OperandLocation::DoubleReg:
203 : case OperandLocation::Uninitialized:
204 : break;
205 : }
206 :
207 0 : MOZ_CRASH();
208 : }
209 :
210 : ConstantOrRegister
211 0 : CacheRegisterAllocator::useConstantOrRegister(MacroAssembler& masm, ValOperandId val)
212 : {
213 0 : MOZ_ASSERT(!addedFailurePath_);
214 :
215 83 : OperandLocation& loc = operandLocations_[val.id()];
216 0 : switch (loc.kind()) {
217 : case OperandLocation::Constant:
218 0 : return loc.constant();
219 :
220 : case OperandLocation::PayloadReg:
221 : case OperandLocation::PayloadStack: {
222 0 : JSValueType payloadType = loc.payloadType();
223 90 : Register reg = useRegister(masm, TypedOperandId(val, payloadType));
224 135 : return TypedOrValueRegister(MIRTypeFromValueType(payloadType), AnyRegister(reg));
225 : }
226 :
227 : case OperandLocation::ValueReg:
228 : case OperandLocation::ValueStack:
229 : case OperandLocation::BaselineFrame:
230 114 : return TypedOrValueRegister(useValueRegister(masm, val));
231 :
232 : case OperandLocation::DoubleReg:
233 0 : return TypedOrValueRegister(MIRType::Double, AnyRegister(loc.doubleReg()));
234 :
235 : case OperandLocation::Uninitialized:
236 : break;
237 : }
238 :
239 0 : MOZ_CRASH();
240 : }
241 :
242 : Register
243 0 : CacheRegisterAllocator::defineRegister(MacroAssembler& masm, TypedOperandId typedId)
244 : {
245 0 : MOZ_ASSERT(!addedFailurePath_);
246 :
247 590 : OperandLocation& loc = operandLocations_[typedId.id()];
248 0 : MOZ_ASSERT(loc.kind() == OperandLocation::Uninitialized);
249 :
250 0 : Register reg = allocateRegister(masm);
251 1180 : loc.setPayloadReg(reg, typedId.type());
252 590 : return reg;
253 : }
254 :
255 : ValueOperand
256 0 : CacheRegisterAllocator::defineValueRegister(MacroAssembler& masm, ValOperandId val)
257 : {
258 0 : MOZ_ASSERT(!addedFailurePath_);
259 :
260 20 : OperandLocation& loc = operandLocations_[val.id()];
261 0 : MOZ_ASSERT(loc.kind() == OperandLocation::Uninitialized);
262 :
263 0 : ValueOperand reg = allocateValueRegister(masm);
264 20 : loc.setValueReg(reg);
265 20 : return reg;
266 : }
267 :
268 : void
269 1356 : CacheRegisterAllocator::freeDeadOperandLocations(MacroAssembler& masm)
270 : {
271 : // See if any operands are dead so we can reuse their registers. Note that
272 : // we skip the input operands, as those are also used by failure paths, and
273 : // we currently don't track those uses.
274 2587 : for (size_t i = writer_.numInputOperands(); i < operandLocations_.length(); i++) {
275 2462 : if (!writer_.operandIsDead(i, currentInstruction_))
276 : continue;
277 :
278 691 : OperandLocation& loc = operandLocations_[i];
279 0 : switch (loc.kind()) {
280 : case OperandLocation::PayloadReg:
281 415 : availableRegs_.add(loc.payloadReg());
282 0 : break;
283 : case OperandLocation::ValueReg:
284 3 : availableRegs_.add(loc.valueReg());
285 : break;
286 : case OperandLocation::PayloadStack:
287 0 : masm.propagateOOM(freePayloadSlots_.append(loc.payloadStack()));
288 0 : break;
289 : case OperandLocation::ValueStack:
290 0 : masm.propagateOOM(freeValueSlots_.append(loc.valueStack()));
291 0 : break;
292 : case OperandLocation::Uninitialized:
293 : case OperandLocation::BaselineFrame:
294 : case OperandLocation::Constant:
295 : case OperandLocation::DoubleReg:
296 : break;
297 : }
298 0 : loc.setUninitialized();
299 : }
300 1356 : }
301 :
302 : void
303 2566 : CacheRegisterAllocator::discardStack(MacroAssembler& masm)
304 : {
305 : // This should only be called when we are no longer using the operands,
306 : // as we're discarding everything from the native stack. Set all operand
307 : // locations to Uninitialized to catch bugs.
308 9059 : for (size_t i = 0; i < operandLocations_.length(); i++)
309 0 : operandLocations_[i].setUninitialized();
310 :
311 0 : if (stackPushed_ > 0) {
312 298 : masm.addToStackPtr(Imm32(stackPushed_));
313 0 : stackPushed_ = 0;
314 : }
315 0 : freePayloadSlots_.clear();
316 2566 : freeValueSlots_.clear();
317 2566 : }
318 :
319 : Register
320 0 : CacheRegisterAllocator::allocateRegister(MacroAssembler& masm)
321 : {
322 0 : MOZ_ASSERT(!addedFailurePath_);
323 :
324 6780 : if (availableRegs_.empty())
325 0 : freeDeadOperandLocations(masm);
326 :
327 6780 : if (availableRegs_.empty()) {
328 : // Still no registers available, try to spill unused operands to
329 : // the stack.
330 0 : for (size_t i = 0; i < operandLocations_.length(); i++) {
331 0 : OperandLocation& loc = operandLocations_[i];
332 0 : if (loc.kind() == OperandLocation::PayloadReg) {
333 0 : Register reg = loc.payloadReg();
334 792 : if (currentOpRegs_.has(reg))
335 0 : continue;
336 :
337 0 : spillOperandToStack(masm, &loc);
338 154 : availableRegs_.add(reg);
339 0 : break; // We got a register, so break out of the loop.
340 : }
341 0 : if (loc.kind() == OperandLocation::ValueReg) {
342 0 : ValueOperand reg = loc.valueReg();
343 0 : if (currentOpRegs_.aliases(reg))
344 0 : continue;
345 :
346 0 : spillOperandToStack(masm, &loc);
347 0 : availableRegs_.add(reg);
348 0 : break; // Break out of the loop.
349 : }
350 : }
351 : }
352 :
353 0 : if (availableRegs_.empty() && !availableRegsAfterSpill_.empty()) {
354 0 : Register reg = availableRegsAfterSpill_.takeAny();
355 176 : masm.push(reg);
356 0 : stackPushed_ += sizeof(uintptr_t);
357 :
358 0 : masm.propagateOOM(spilledRegs_.append(SpilledRegister(reg, stackPushed_)));
359 :
360 88 : availableRegs_.add(reg);
361 : }
362 :
363 : // At this point, there must be a free register.
364 0 : MOZ_RELEASE_ASSERT(!availableRegs_.empty());
365 :
366 0 : Register reg = availableRegs_.takeAny();
367 3390 : currentOpRegs_.add(reg);
368 3390 : return reg;
369 : }
370 :
371 : void
372 0 : CacheRegisterAllocator::allocateFixedRegister(MacroAssembler& masm, Register reg)
373 : {
374 914 : MOZ_ASSERT(!addedFailurePath_);
375 :
376 : // Fixed registers should be allocated first, to ensure they're
377 : // still available.
378 0 : MOZ_ASSERT(!currentOpRegs_.has(reg), "Register is in use");
379 :
380 0 : freeDeadOperandLocations(masm);
381 :
382 0 : if (availableRegs_.has(reg)) {
383 0 : availableRegs_.take(reg);
384 469 : currentOpRegs_.add(reg);
385 469 : return;
386 : }
387 :
388 : // The register must be used by some operand. Spill it to the stack.
389 0 : for (size_t i = 0; i < operandLocations_.length(); i++) {
390 0 : OperandLocation& loc = operandLocations_[i];
391 626 : if (loc.kind() == OperandLocation::PayloadReg) {
392 822 : if (loc.payloadReg() != reg)
393 : continue;
394 :
395 0 : spillOperandToStackOrRegister(masm, &loc);
396 297 : currentOpRegs_.add(reg);
397 0 : return;
398 : }
399 0 : if (loc.kind() == OperandLocation::ValueReg) {
400 296 : if (!loc.valueReg().aliases(reg))
401 0 : continue;
402 :
403 148 : ValueOperand valueReg = loc.valueReg();
404 0 : spillOperandToStackOrRegister(masm, &loc);
405 :
406 0 : availableRegs_.add(valueReg);
407 0 : availableRegs_.take(reg);
408 148 : currentOpRegs_.add(reg);
409 148 : return;
410 : }
411 : }
412 :
413 0 : MOZ_CRASH("Invalid register");
414 : }
415 :
416 : void
417 0 : CacheRegisterAllocator::allocateFixedValueRegister(MacroAssembler& masm, ValueOperand reg)
418 : {
419 : #ifdef JS_NUNBOX32
420 : allocateFixedRegister(masm, reg.payloadReg());
421 : allocateFixedRegister(masm, reg.typeReg());
422 : #else
423 1 : allocateFixedRegister(masm, reg.valueReg());
424 : #endif
425 0 : }
426 :
427 : ValueOperand
428 0 : CacheRegisterAllocator::allocateValueRegister(MacroAssembler& masm)
429 : {
430 : #ifdef JS_NUNBOX32
431 : Register reg1 = allocateRegister(masm);
432 : Register reg2 = allocateRegister(masm);
433 : return ValueOperand(reg1, reg2);
434 : #else
435 44 : Register reg = allocateRegister(masm);
436 44 : return ValueOperand(reg);
437 : #endif
438 : }
439 :
440 : bool
441 0 : CacheRegisterAllocator::init()
442 : {
443 0 : if (!origInputLocations_.resize(writer_.numInputOperands()))
444 : return false;
445 0 : if (!operandLocations_.resize(writer_.numOperandIds()))
446 : return false;
447 922 : return true;
448 : }
449 :
450 : void
451 543 : CacheRegisterAllocator::initAvailableRegsAfterSpill()
452 : {
453 : // Registers not in availableRegs_ and not used by input operands are
454 : // available after being spilled.
455 0 : availableRegsAfterSpill_.set() =
456 0 : GeneralRegisterSet::Intersect(GeneralRegisterSet::Not(availableRegs_.set()),
457 1629 : GeneralRegisterSet::Not(inputRegisterSet()));
458 543 : }
459 :
460 : void
461 628 : CacheRegisterAllocator::fixupAliasedInputs(MacroAssembler& masm)
462 : {
463 : // If IC inputs alias each other, make sure they are stored in different
464 : // locations so we don't have to deal with this complexity in the rest of
465 : // the allocator.
466 : //
467 : // Note that this can happen in IonMonkey with something like |o.foo = o|
468 : // or |o[i] = i|.
469 :
470 628 : size_t numInputs = writer_.numInputOperands();
471 0 : MOZ_ASSERT(origInputLocations_.length() == numInputs);
472 :
473 0 : for (size_t i = 1; i < numInputs; i++) {
474 420 : OperandLocation& loc1 = operandLocations_[i];
475 840 : if (!loc1.isInRegister())
476 : continue;
477 :
478 0 : for (size_t j = 0; j < i; j++) {
479 482 : OperandLocation& loc2 = operandLocations_[j];
480 482 : if (!loc1.aliasesReg(loc2))
481 : continue;
482 :
483 : // loc1 and loc2 alias so we spill one of them. If one is a
484 : // ValueReg and the other is a PayloadReg, we have to spill the
485 : // PayloadReg: spilling the ValueReg instead would leave its type
486 : // register unallocated on 32-bit platforms.
487 0 : if (loc1.kind() == OperandLocation::ValueReg) {
488 0 : spillOperandToStack(masm, &loc2);
489 : } else {
490 0 : MOZ_ASSERT(loc1.kind() == OperandLocation::PayloadReg);
491 0 : spillOperandToStack(masm, &loc1);
492 0 : break; // Spilled loc1, so nothing else will alias it.
493 : }
494 : }
495 : }
496 :
497 : #ifdef DEBUG
498 0 : assertValidState();
499 : #endif
500 628 : }
501 :
502 : GeneralRegisterSet
503 0 : CacheRegisterAllocator::inputRegisterSet() const
504 : {
505 628 : MOZ_ASSERT(origInputLocations_.length() == writer_.numInputOperands());
506 :
507 : AllocatableGeneralRegisterSet result;
508 0 : for (size_t i = 0; i < writer_.numInputOperands(); i++) {
509 1048 : const OperandLocation& loc = operandLocations_[i];
510 0 : MOZ_ASSERT(loc == origInputLocations_[i]);
511 :
512 0 : switch (loc.kind()) {
513 : case OperandLocation::PayloadReg:
514 982 : result.addUnchecked(loc.payloadReg());
515 : continue;
516 : case OperandLocation::ValueReg:
517 51 : result.addUnchecked(loc.valueReg());
518 : continue;
519 : case OperandLocation::PayloadStack:
520 : case OperandLocation::ValueStack:
521 : case OperandLocation::BaselineFrame:
522 : case OperandLocation::Constant:
523 : case OperandLocation::DoubleReg:
524 : continue;
525 : case OperandLocation::Uninitialized:
526 : break;
527 : }
528 0 : MOZ_CRASH("Invalid kind");
529 : }
530 :
531 1256 : return result.set();
532 : }
533 :
534 : JSValueType
535 0 : CacheRegisterAllocator::knownType(ValOperandId val) const
536 : {
537 0 : const OperandLocation& loc = operandLocations_[val.id()];
538 :
539 1324 : switch (loc.kind()) {
540 : case OperandLocation::ValueReg:
541 : case OperandLocation::ValueStack:
542 : case OperandLocation::BaselineFrame:
543 : return JSVAL_TYPE_UNKNOWN;
544 :
545 : case OperandLocation::PayloadStack:
546 : case OperandLocation::PayloadReg:
547 813 : return loc.payloadType();
548 :
549 : case OperandLocation::Constant:
550 0 : return loc.constant().isDouble()
551 28 : ? JSVAL_TYPE_DOUBLE
552 14 : : loc.constant().extractNonDoubleType();
553 :
554 : case OperandLocation::DoubleReg:
555 0 : return JSVAL_TYPE_DOUBLE;
556 :
557 : case OperandLocation::Uninitialized:
558 : break;
559 : }
560 :
561 0 : MOZ_CRASH("Invalid kind");
562 : }
563 :
564 : void
565 0 : CacheRegisterAllocator::initInputLocation(size_t i, const TypedOrValueRegister& reg)
566 : {
567 0 : if (reg.hasValue()) {
568 0 : initInputLocation(i, reg.valueReg());
569 0 : } else if (reg.typedReg().isFloat()) {
570 0 : MOZ_ASSERT(reg.type() == MIRType::Double);
571 0 : initInputLocation(i, reg.typedReg().fpu());
572 : } else {
573 0 : initInputLocation(i, reg.typedReg().gpr(), ValueTypeFromMIRType(reg.type()));
574 : }
575 848 : }
576 :
577 : void
578 0 : CacheRegisterAllocator::initInputLocation(size_t i, const ConstantOrRegister& value)
579 : {
580 388 : if (value.constant())
581 0 : initInputLocation(i, value.value());
582 : else
583 374 : initInputLocation(i, value.reg());
584 388 : }
585 :
586 : void
587 0 : CacheRegisterAllocator::spillOperandToStack(MacroAssembler& masm, OperandLocation* loc)
588 : {
589 0 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
590 :
591 1 : if (loc->kind() == OperandLocation::ValueReg) {
592 1 : if (!freeValueSlots_.empty()) {
593 0 : uint32_t stackPos = freeValueSlots_.popCopy();
594 0 : MOZ_ASSERT(stackPos <= stackPushed_);
595 0 : masm.storeValue(loc->valueReg(), Address(masm.getStackPointer(),
596 0 : stackPushed_ - stackPos));
597 : loc->setValueStack(stackPos);
598 : return;
599 : }
600 0 : stackPushed_ += sizeof(js::Value);
601 36 : masm.pushValue(loc->valueReg());
602 18 : loc->setValueStack(stackPushed_);
603 : return;
604 : }
605 :
606 0 : MOZ_ASSERT(loc->kind() == OperandLocation::PayloadReg);
607 :
608 0 : if (!freePayloadSlots_.empty()) {
609 0 : uint32_t stackPos = freePayloadSlots_.popCopy();
610 0 : MOZ_ASSERT(stackPos <= stackPushed_);
611 0 : masm.storePtr(loc->payloadReg(), Address(masm.getStackPointer(),
612 0 : stackPushed_ - stackPos));
613 0 : loc->setPayloadStack(stackPos, loc->payloadType());
614 : return;
615 : }
616 0 : stackPushed_ += sizeof(uintptr_t);
617 308 : masm.push(loc->payloadReg());
618 154 : loc->setPayloadStack(stackPushed_, loc->payloadType());
619 : }
620 :
621 : void
622 0 : CacheRegisterAllocator::spillOperandToStackOrRegister(MacroAssembler& masm, OperandLocation* loc)
623 : {
624 890 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
625 :
626 : // If enough registers are available, use them.
627 0 : if (loc->kind() == OperandLocation::ValueReg) {
628 : static const size_t BoxPieces = sizeof(Value) / sizeof(uintptr_t);
629 0 : if (availableRegs_.set().size() >= BoxPieces) {
630 0 : ValueOperand reg = availableRegs_.takeAnyValue();
631 148 : masm.moveValue(loc->valueReg(), reg);
632 148 : loc->setValueReg(reg);
633 : return;
634 : }
635 : } else {
636 0 : MOZ_ASSERT(loc->kind() == OperandLocation::PayloadReg);
637 0 : if (!availableRegs_.empty()) {
638 0 : Register reg = availableRegs_.takeAny();
639 594 : masm.movePtr(loc->payloadReg(), reg);
640 594 : loc->setPayloadReg(reg, loc->payloadType());
641 : return;
642 : }
643 : }
644 :
645 : // Not enough registers available, spill to the stack.
646 0 : spillOperandToStack(masm, loc);
647 : }
648 :
649 : void
650 0 : CacheRegisterAllocator::popPayload(MacroAssembler& masm, OperandLocation* loc, Register dest)
651 : {
652 662 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
653 331 : MOZ_ASSERT(stackPushed_ >= sizeof(uintptr_t));
654 :
655 : // The payload is on the stack. If it's on top of the stack we can just
656 : // pop it, else we emit a load.
657 0 : if (loc->payloadStack() == stackPushed_) {
658 406 : masm.pop(dest);
659 0 : stackPushed_ -= sizeof(uintptr_t);
660 : } else {
661 0 : MOZ_ASSERT(loc->payloadStack() < stackPushed_);
662 256 : masm.loadPtr(Address(masm.getStackPointer(), stackPushed_ - loc->payloadStack()), dest);
663 256 : masm.propagateOOM(freePayloadSlots_.append(loc->payloadStack()));
664 : }
665 :
666 662 : loc->setPayloadReg(dest, loc->payloadType());
667 331 : }
668 :
669 : void
670 0 : CacheRegisterAllocator::popValue(MacroAssembler& masm, OperandLocation* loc, ValueOperand dest)
671 : {
672 8 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
673 4 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
674 :
675 : // The Value is on the stack. If it's on top of the stack we can just
676 : // pop it, else we emit a load.
677 0 : if (loc->valueStack() == stackPushed_) {
678 8 : masm.popValue(dest);
679 0 : stackPushed_ -= sizeof(js::Value);
680 : } else {
681 0 : MOZ_ASSERT(loc->valueStack() < stackPushed_);
682 0 : masm.loadValue(Address(masm.getStackPointer(), stackPushed_ - loc->valueStack()), dest);
683 0 : masm.propagateOOM(freeValueSlots_.append(loc->valueStack()));
684 : }
685 :
686 4 : loc->setValueReg(dest);
687 4 : }
688 :
689 : #ifdef DEBUG
690 : void
691 6319 : CacheRegisterAllocator::assertValidState() const
692 : {
693 : // Assert different operands don't have aliasing storage. We depend on this
694 : // when spilling registers, for instance.
695 :
696 6319 : if (!JitOptions.fullDebugChecks)
697 : return;
698 :
699 0 : for (size_t i = 0; i < operandLocations_.length(); i++) {
700 0 : const auto& loc1 = operandLocations_[i];
701 0 : if (loc1.isUninitialized())
702 : continue;
703 :
704 0 : for (size_t j = 0; j < i; j++) {
705 0 : const auto& loc2 = operandLocations_[j];
706 0 : if (loc2.isUninitialized())
707 : continue;
708 0 : MOZ_ASSERT(!loc1.aliasesReg(loc2));
709 : }
710 : }
711 : }
712 : #endif
713 :
714 : bool
715 0 : OperandLocation::aliasesReg(const OperandLocation& other) const
716 : {
717 0 : MOZ_ASSERT(&other != this);
718 :
719 0 : switch (other.kind_) {
720 : case PayloadReg:
721 0 : return aliasesReg(other.payloadReg());
722 : case ValueReg:
723 516 : return aliasesReg(other.valueReg());
724 : case PayloadStack:
725 : case ValueStack:
726 : case BaselineFrame:
727 : case Constant:
728 : case DoubleReg:
729 : return false;
730 : case Uninitialized:
731 : break;
732 : }
733 :
734 0 : MOZ_CRASH("Invalid kind");
735 : }
736 :
737 : void
738 0 : CacheRegisterAllocator::restoreInputState(MacroAssembler& masm, bool shouldDiscardStack)
739 : {
740 2048 : size_t numInputOperands = origInputLocations_.length();
741 0 : MOZ_ASSERT(writer_.numInputOperands() == numInputOperands);
742 :
743 0 : for (size_t j = 0; j < numInputOperands; j++) {
744 0 : const OperandLocation& dest = origInputLocations_[j];
745 3596 : OperandLocation& cur = operandLocations_[j];
746 3596 : if (dest == cur)
747 : continue;
748 :
749 4068 : auto autoAssign = mozilla::MakeScopeExit([&] { cur = dest; });
750 :
751 : // We have a cycle if a destination register will be used later
752 : // as source register. If that happens, just push the current value
753 : // on the stack and later get it from there.
754 0 : for (size_t k = j + 1; k < numInputOperands; k++) {
755 0 : OperandLocation& laterSource = operandLocations_[k];
756 491 : if (dest.aliasesReg(laterSource))
757 18 : spillOperandToStack(masm, &laterSource);
758 : }
759 :
760 0 : if (dest.kind() == OperandLocation::ValueReg) {
761 : // We have to restore a Value register.
762 0 : switch (cur.kind()) {
763 : case OperandLocation::ValueReg:
764 22 : masm.moveValue(cur.valueReg(), dest.valueReg());
765 0 : continue;
766 : case OperandLocation::PayloadReg:
767 545 : masm.tagValue(cur.payloadType(), cur.payloadReg(), dest.valueReg());
768 0 : continue;
769 : case OperandLocation::PayloadStack: {
770 0 : Register scratch = dest.valueReg().scratchReg();
771 0 : popPayload(masm, &cur, scratch);
772 0 : masm.tagValue(cur.payloadType(), scratch, dest.valueReg());
773 : continue;
774 : }
775 : case OperandLocation::ValueStack:
776 4 : popValue(masm, &cur, dest.valueReg());
777 4 : continue;
778 : case OperandLocation::Constant:
779 : case OperandLocation::BaselineFrame:
780 : case OperandLocation::DoubleReg:
781 : case OperandLocation::Uninitialized:
782 : break;
783 : }
784 0 : } else if (dest.kind() == OperandLocation::PayloadReg) {
785 : // We have to restore a payload register.
786 0 : switch (cur.kind()) {
787 : case OperandLocation::ValueReg:
788 0 : MOZ_ASSERT(dest.payloadType() != JSVAL_TYPE_DOUBLE);
789 38 : masm.unboxNonDouble(cur.valueReg(), dest.payloadReg(), dest.payloadType());
790 0 : continue;
791 : case OperandLocation::PayloadReg:
792 26 : MOZ_ASSERT(cur.payloadType() == dest.payloadType());
793 26 : masm.mov(cur.payloadReg(), dest.payloadReg());
794 : continue;
795 : case OperandLocation::PayloadStack: {
796 0 : MOZ_ASSERT(cur.payloadType() == dest.payloadType());
797 325 : popPayload(masm, &cur, dest.payloadReg());
798 325 : continue;
799 : }
800 : case OperandLocation::ValueStack:
801 0 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
802 0 : MOZ_ASSERT(cur.valueStack() <= stackPushed_);
803 0 : MOZ_ASSERT(dest.payloadType() != JSVAL_TYPE_DOUBLE);
804 0 : masm.unboxNonDouble(Address(masm.getStackPointer(), stackPushed_ - cur.valueStack()),
805 0 : dest.payloadReg(), dest.payloadType());
806 0 : continue;
807 : case OperandLocation::Constant:
808 : case OperandLocation::BaselineFrame:
809 : case OperandLocation::DoubleReg:
810 : case OperandLocation::Uninitialized:
811 : break;
812 : }
813 0 : } else if (dest.kind() == OperandLocation::Constant ||
814 57 : dest.kind() == OperandLocation::BaselineFrame ||
815 0 : dest.kind() == OperandLocation::DoubleReg)
816 : {
817 : // Nothing to do.
818 : continue;
819 : }
820 :
821 0 : MOZ_CRASH("Invalid kind");
822 : }
823 :
824 4272 : for (const SpilledRegister& spill : spilledRegs_) {
825 0 : MOZ_ASSERT(stackPushed_ >= sizeof(uintptr_t));
826 :
827 0 : if (spill.stackPushed == stackPushed_) {
828 340 : masm.pop(spill.reg);
829 0 : stackPushed_ -= sizeof(uintptr_t);
830 : } else {
831 0 : MOZ_ASSERT(spill.stackPushed < stackPushed_);
832 18 : masm.loadPtr(Address(masm.getStackPointer(), stackPushed_ - spill.stackPushed),
833 6 : spill.reg);
834 : }
835 : }
836 :
837 0 : if (shouldDiscardStack)
838 1963 : discardStack(masm);
839 2048 : }
840 :
841 : size_t
842 0 : CacheIRStubInfo::stubDataSize() const
843 : {
844 22752 : size_t field = 0;
845 0 : size_t size = 0;
846 : while (true) {
847 0 : StubField::Type type = fieldType(field++);
848 0 : if (type == StubField::Type::Limit)
849 0 : return size;
850 59503 : size += StubField::sizeInBytes(type);
851 59503 : }
852 : }
853 :
854 : void
855 0 : CacheIRStubInfo::copyStubData(ICStub* src, ICStub* dest) const
856 : {
857 0 : uint8_t* srcBytes = reinterpret_cast<uint8_t*>(src);
858 0 : uint8_t* destBytes = reinterpret_cast<uint8_t*>(dest);
859 :
860 0 : size_t field = 0;
861 0 : size_t offset = 0;
862 : while (true) {
863 0 : StubField::Type type = fieldType(field);
864 0 : switch (type) {
865 : case StubField::Type::RawWord:
866 0 : *reinterpret_cast<uintptr_t*>(destBytes + offset) =
867 0 : *reinterpret_cast<uintptr_t*>(srcBytes + offset);
868 0 : break;
869 : case StubField::Type::RawInt64:
870 : case StubField::Type::DOMExpandoGeneration:
871 0 : *reinterpret_cast<uint64_t*>(destBytes + offset) =
872 0 : *reinterpret_cast<uint64_t*>(srcBytes + offset);
873 0 : break;
874 : case StubField::Type::Shape:
875 0 : getStubField<ICStub, Shape*>(dest, offset).init(getStubField<ICStub, Shape*>(src, offset));
876 : break;
877 : case StubField::Type::JSObject:
878 0 : getStubField<ICStub, JSObject*>(dest, offset).init(getStubField<ICStub, JSObject*>(src, offset));
879 0 : break;
880 : case StubField::Type::ObjectGroup:
881 0 : getStubField<ICStub, ObjectGroup*>(dest, offset).init(getStubField<ICStub, ObjectGroup*>(src, offset));
882 : break;
883 : case StubField::Type::Symbol:
884 0 : getStubField<ICStub, JS::Symbol*>(dest, offset).init(getStubField<ICStub, JS::Symbol*>(src, offset));
885 : break;
886 : case StubField::Type::String:
887 0 : getStubField<ICStub, JSString*>(dest, offset).init(getStubField<ICStub, JSString*>(src, offset));
888 0 : break;
889 : case StubField::Type::Id:
890 0 : getStubField<ICStub, jsid>(dest, offset).init(getStubField<ICStub, jsid>(src, offset));
891 : break;
892 : case StubField::Type::Value:
893 0 : getStubField<ICStub, Value>(dest, offset).init(getStubField<ICStub, Value>(src, offset));
894 0 : break;
895 : case StubField::Type::Limit:
896 0 : return; // Done.
897 : }
898 0 : field++;
899 0 : offset += StubField::sizeInBytes(type);
900 0 : }
901 : }
902 :
903 : template <typename T>
904 : static GCPtr<T>*
905 : AsGCPtr(uintptr_t* ptr)
906 : {
907 : return reinterpret_cast<GCPtr<T>*>(ptr);
908 : }
909 :
910 : uintptr_t
911 0 : CacheIRStubInfo::getStubRawWord(ICStub* stub, uint32_t offset) const {
912 0 : uint8_t* stubData = (uint8_t*)stub + stubDataOffset_;
913 0 : MOZ_ASSERT(uintptr_t(stubData) % sizeof(uintptr_t) == 0);
914 0 : return *(uintptr_t*)(stubData + offset);
915 : }
916 :
917 : template<class Stub, class T>
918 : GCPtr<T>&
919 0 : CacheIRStubInfo::getStubField(Stub* stub, uint32_t offset) const
920 : {
921 1036 : uint8_t* stubData = (uint8_t*)stub + stubDataOffset_;
922 0 : MOZ_ASSERT(uintptr_t(stubData) % sizeof(uintptr_t) == 0);
923 :
924 1036 : return *AsGCPtr<T>((uintptr_t*)(stubData + offset));
925 : }
926 :
927 : template GCPtr<Shape*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
928 : template GCPtr<ObjectGroup*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
929 : template GCPtr<JSObject*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
930 : template GCPtr<JSString*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
931 : template GCPtr<JS::Symbol*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
932 : template GCPtr<JS::Value>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
933 : template GCPtr<jsid>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
934 :
935 : template <typename T, typename V>
936 : static void
937 : InitGCPtr(uintptr_t* ptr, V val)
938 : {
939 39881 : AsGCPtr<T>(ptr)->init(mozilla::BitwiseCast<T>(val));
940 : }
941 :
942 : void
943 0 : CacheIRWriter::copyStubData(uint8_t* dest) const
944 : {
945 0 : MOZ_ASSERT(!failed());
946 :
947 0 : uintptr_t* destWords = reinterpret_cast<uintptr_t*>(dest);
948 :
949 53646 : for (const StubField& field : stubFields_) {
950 0 : switch (field.type()) {
951 : case StubField::Type::RawWord:
952 7545 : *destWords = field.asWord();
953 0 : break;
954 : case StubField::Type::Shape:
955 14412 : InitGCPtr<Shape*>(destWords, field.asWord());
956 : break;
957 : case StubField::Type::JSObject:
958 2610 : InitGCPtr<JSObject*>(destWords, field.asWord());
959 : break;
960 : case StubField::Type::ObjectGroup:
961 3044 : InitGCPtr<ObjectGroup*>(destWords, field.asWord());
962 : break;
963 : case StubField::Type::Symbol:
964 287 : InitGCPtr<JS::Symbol*>(destWords, field.asWord());
965 : break;
966 : case StubField::Type::String:
967 1785 : InitGCPtr<JSString*>(destWords, field.asWord());
968 : break;
969 : case StubField::Type::Id:
970 633 : AsGCPtr<jsid>(destWords)->init(jsid::fromRawBits(field.asWord()));
971 211 : break;
972 : case StubField::Type::RawInt64:
973 : case StubField::Type::DOMExpandoGeneration:
974 1 : *reinterpret_cast<uint64_t*>(destWords) = field.asInt64();
975 0 : break;
976 : case StubField::Type::Value:
977 1070 : AsGCPtr<Value>(destWords)->init(Value::fromRawBits(uint64_t(field.asInt64())));
978 1 : break;
979 : case StubField::Type::Limit:
980 0 : MOZ_CRASH("Invalid type");
981 : }
982 0 : destWords += StubField::sizeInBytes(field.type()) / sizeof(uintptr_t);
983 : }
984 11608 : }
985 :
986 : template <typename T>
987 : void
988 0 : jit::TraceCacheIRStub(JSTracer* trc, T* stub, const CacheIRStubInfo* stubInfo)
989 : {
990 0 : uint32_t field = 0;
991 0 : size_t offset = 0;
992 0 : while (true) {
993 0 : StubField::Type fieldType = stubInfo->fieldType(field);
994 0 : switch (fieldType) {
995 : case StubField::Type::RawWord:
996 : case StubField::Type::RawInt64:
997 : case StubField::Type::DOMExpandoGeneration:
998 : break;
999 : case StubField::Type::Shape:
1000 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, Shape*>(stub, offset),
1001 : "cacheir-shape");
1002 : break;
1003 : case StubField::Type::ObjectGroup:
1004 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, ObjectGroup*>(stub, offset),
1005 : "cacheir-group");
1006 : break;
1007 : case StubField::Type::JSObject:
1008 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JSObject*>(stub, offset),
1009 : "cacheir-object");
1010 : break;
1011 : case StubField::Type::Symbol:
1012 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JS::Symbol*>(stub, offset),
1013 : "cacheir-symbol");
1014 : break;
1015 : case StubField::Type::String:
1016 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JSString*>(stub, offset),
1017 : "cacheir-string");
1018 : break;
1019 : case StubField::Type::Id:
1020 0 : TraceEdge(trc, &stubInfo->getStubField<T, jsid>(stub, offset), "cacheir-id");
1021 : break;
1022 : case StubField::Type::Value:
1023 0 : TraceEdge(trc, &stubInfo->getStubField<T, JS::Value>(stub, offset),
1024 : "cacheir-value");
1025 : break;
1026 : case StubField::Type::Limit:
1027 0 : return; // Done.
1028 : }
1029 0 : field++;
1030 0 : offset += StubField::sizeInBytes(fieldType);
1031 : }
1032 : }
1033 :
1034 : template
1035 : void jit::TraceCacheIRStub(JSTracer* trc, ICStub* stub, const CacheIRStubInfo* stubInfo);
1036 :
1037 : template
1038 : void jit::TraceCacheIRStub(JSTracer* trc, IonICStub* stub, const CacheIRStubInfo* stubInfo);
1039 :
1040 : bool
1041 0 : CacheIRWriter::stubDataEqualsMaybeUpdate(uint8_t* stubData, bool* updated) const
1042 : {
1043 0 : MOZ_ASSERT(!failed());
1044 :
1045 7078 : *updated = false;
1046 7078 : const uintptr_t* stubDataWords = reinterpret_cast<const uintptr_t*>(stubData);
1047 :
1048 : // If DOMExpandoGeneration fields are different but all other stub fields
1049 : // are exactly the same, we overwrite the old stub data instead of attaching
1050 : // a new stub, as the old stub is never going to succeed. This works because
1051 : // even Ion stubs read the DOMExpandoGeneration field from the stub instead
1052 : // of baking it in.
1053 0 : bool expandoGenerationIsDifferent = false;
1054 :
1055 0 : for (const StubField& field : stubFields_) {
1056 17090 : if (field.sizeIsWord()) {
1057 0 : if (field.asWord() != *stubDataWords)
1058 : return false;
1059 1546 : stubDataWords++;
1060 1546 : continue;
1061 : }
1062 :
1063 0 : if (field.asInt64() != *reinterpret_cast<const uint64_t*>(stubDataWords)) {
1064 0 : if (field.type() != StubField::Type::DOMExpandoGeneration)
1065 : return false;
1066 : expandoGenerationIsDifferent = true;
1067 : }
1068 0 : stubDataWords += sizeof(uint64_t) / sizeof(uintptr_t);
1069 : }
1070 :
1071 0 : if (expandoGenerationIsDifferent) {
1072 0 : copyStubData(stubData);
1073 0 : *updated = true;
1074 : }
1075 :
1076 : return true;
1077 : }
1078 :
1079 : HashNumber
1080 0 : CacheIRStubKey::hash(const CacheIRStubKey::Lookup& l)
1081 : {
1082 0 : HashNumber hash = mozilla::HashBytes(l.code, l.length);
1083 0 : hash = mozilla::AddToHash(hash, uint32_t(l.kind));
1084 24216 : hash = mozilla::AddToHash(hash, uint32_t(l.engine));
1085 12108 : return hash;
1086 : }
1087 :
1088 : bool
1089 0 : CacheIRStubKey::match(const CacheIRStubKey& entry, const CacheIRStubKey::Lookup& l)
1090 : {
1091 22530 : if (entry.stubInfo->kind() != l.kind)
1092 : return false;
1093 :
1094 22530 : if (entry.stubInfo->engine() != l.engine)
1095 : return false;
1096 :
1097 11265 : if (entry.stubInfo->codeLength() != l.length)
1098 : return false;
1099 :
1100 11265 : if (!mozilla::PodEqual(entry.stubInfo->code(), l.code, l.length))
1101 : return false;
1102 :
1103 11265 : return true;
1104 : }
1105 :
1106 0 : CacheIRReader::CacheIRReader(const CacheIRStubInfo* stubInfo)
1107 6566 : : CacheIRReader(stubInfo->code(), stubInfo->code() + stubInfo->codeLength())
1108 3283 : {}
1109 :
1110 : CacheIRStubInfo*
1111 422 : CacheIRStubInfo::New(CacheKind kind, ICStubEngine engine, bool makesGCCalls,
1112 : uint32_t stubDataOffset, const CacheIRWriter& writer)
1113 : {
1114 0 : size_t numStubFields = writer.numStubFields();
1115 : size_t bytesNeeded = sizeof(CacheIRStubInfo) +
1116 0 : writer.codeLength() +
1117 0 : (numStubFields + 1); // +1 for the GCType::Limit terminator.
1118 422 : uint8_t* p = js_pod_malloc<uint8_t>(bytesNeeded);
1119 422 : if (!p)
1120 : return nullptr;
1121 :
1122 : // Copy the CacheIR code.
1123 422 : uint8_t* codeStart = p + sizeof(CacheIRStubInfo);
1124 422 : mozilla::PodCopy(codeStart, writer.codeStart(), writer.codeLength());
1125 :
1126 : static_assert(sizeof(StubField::Type) == sizeof(uint8_t),
1127 : "StubField::Type must fit in uint8_t");
1128 :
1129 : // Copy the stub field types.
1130 0 : uint8_t* fieldTypes = codeStart + writer.codeLength();
1131 0 : for (size_t i = 0; i < numStubFields; i++)
1132 2406 : fieldTypes[i] = uint8_t(writer.stubFieldType(i));
1133 422 : fieldTypes[numStubFields] = uint8_t(StubField::Type::Limit);
1134 :
1135 : return new(p) CacheIRStubInfo(kind, engine, makesGCCalls, stubDataOffset, codeStart,
1136 422 : writer.codeLength(), fieldTypes);
1137 : }
1138 :
1139 : bool
1140 0 : OperandLocation::operator==(const OperandLocation& other) const
1141 : {
1142 7563 : if (kind_ != other.kind_)
1143 : return false;
1144 :
1145 6230 : switch (kind()) {
1146 : case Uninitialized:
1147 : return true;
1148 : case PayloadReg:
1149 0 : return payloadReg() == other.payloadReg() && payloadType() == other.payloadType();
1150 : case ValueReg:
1151 0 : return valueReg() == other.valueReg();
1152 : case PayloadStack:
1153 0 : return payloadStack() == other.payloadStack() && payloadType() == other.payloadType();
1154 : case ValueStack:
1155 0 : return valueStack() == other.valueStack();
1156 : case BaselineFrame:
1157 0 : return baselineFrameSlot() == other.baselineFrameSlot();
1158 : case Constant:
1159 0 : return constant() == other.constant();
1160 : case DoubleReg:
1161 0 : return doubleReg() == other.doubleReg();
1162 : }
1163 :
1164 0 : MOZ_CRASH("Invalid OperandLocation kind");
1165 : }
1166 :
1167 0 : AutoOutputRegister::AutoOutputRegister(CacheIRCompiler& compiler)
1168 764 : : output_(compiler.outputUnchecked_.ref()),
1169 0 : alloc_(compiler.allocator)
1170 : {
1171 0 : if (output_.hasValue())
1172 0 : alloc_.allocateFixedValueRegister(compiler.masm, output_.valueReg());
1173 0 : else if (!output_.typedReg().isFloat())
1174 79 : alloc_.allocateFixedRegister(compiler.masm, output_.typedReg().gpr());
1175 0 : }
1176 :
1177 0 : AutoOutputRegister::~AutoOutputRegister()
1178 : {
1179 0 : if (output_.hasValue())
1180 0 : alloc_.releaseValueRegister(output_.valueReg());
1181 0 : else if (!output_.typedReg().isFloat())
1182 79 : alloc_.releaseRegister(output_.typedReg().gpr());
1183 764 : }
1184 :
1185 : bool
1186 0 : FailurePath::canShareFailurePath(const FailurePath& other) const
1187 : {
1188 1863 : if (stackPushed_ != other.stackPushed_)
1189 : return false;
1190 :
1191 1716 : if (spilledRegs_.length() != other.spilledRegs_.length())
1192 : return false;
1193 :
1194 1716 : for (size_t i = 0; i < spilledRegs_.length(); i++) {
1195 0 : if (spilledRegs_[i] != other.spilledRegs_[i])
1196 : return false;
1197 : }
1198 :
1199 0 : MOZ_ASSERT(inputs_.length() == other.inputs_.length());
1200 :
1201 6656 : for (size_t i = 0; i < inputs_.length(); i++) {
1202 5838 : if (inputs_[i] != other.inputs_[i])
1203 : return false;
1204 : }
1205 : return true;
1206 : }
1207 :
1208 : bool
1209 2772 : CacheIRCompiler::addFailurePath(FailurePath** failure)
1210 : {
1211 : #ifdef DEBUG
1212 2772 : allocator.setAddedFailurePath();
1213 : #endif
1214 :
1215 0 : FailurePath newFailure;
1216 7796 : for (size_t i = 0; i < writer_.numInputOperands(); i++) {
1217 15072 : if (!newFailure.appendInput(allocator.operandLocation(i)))
1218 : return false;
1219 : }
1220 0 : if (!newFailure.setSpilledRegs(allocator.spilledRegs()))
1221 : return false;
1222 5544 : newFailure.setStackPushed(allocator.stackPushed());
1223 :
1224 : // Reuse the previous failure path if the current one is the same, to
1225 : // avoid emitting duplicate code.
1226 0 : if (failurePaths.length() > 0 && failurePaths.back().canShareFailurePath(newFailure)) {
1227 1267 : *failure = &failurePaths.back();
1228 1267 : return true;
1229 : }
1230 :
1231 1505 : if (!failurePaths.append(std::move(newFailure)))
1232 : return false;
1233 :
1234 1505 : *failure = &failurePaths.back();
1235 1505 : return true;
1236 : }
1237 :
1238 : bool
1239 0 : CacheIRCompiler::emitFailurePath(size_t index)
1240 : {
1241 0 : FailurePath& failure = failurePaths[index];
1242 :
1243 0 : allocator.setStackPushed(failure.stackPushed());
1244 :
1245 4161 : for (size_t i = 0; i < writer_.numInputOperands(); i++)
1246 0 : allocator.setOperandLocation(i, failure.input(i));
1247 :
1248 3010 : if (!allocator.setSpilledRegs(failure.spilledRegs()))
1249 : return false;
1250 :
1251 0 : masm.bind(failure.label());
1252 1505 : allocator.restoreInputState(masm);
1253 1505 : return true;
1254 : }
1255 :
1256 : bool
1257 0 : CacheIRCompiler::emitGuardIsNumber()
1258 : {
1259 6 : ValOperandId inputId = reader.valOperandId();
1260 3 : JSValueType knownType = allocator.knownType(inputId);
1261 :
1262 : // Doubles and ints are numbers!
1263 3 : if (knownType == JSVAL_TYPE_DOUBLE || knownType == JSVAL_TYPE_INT32)
1264 : return true;
1265 :
1266 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1267 : FailurePath* failure;
1268 3 : if (!addFailurePath(&failure))
1269 : return false;
1270 :
1271 9 : masm.branchTestNumber(Assembler::NotEqual, input, failure->label());
1272 3 : return true;
1273 : }
1274 :
1275 : bool
1276 0 : CacheIRCompiler::emitGuardIsObject()
1277 : {
1278 1664 : ValOperandId inputId = reader.valOperandId();
1279 832 : if (allocator.knownType(inputId) == JSVAL_TYPE_OBJECT)
1280 : return true;
1281 :
1282 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1283 : FailurePath* failure;
1284 0 : if (!addFailurePath(&failure))
1285 : return false;
1286 882 : masm.branchTestObject(Assembler::NotEqual, input, failure->label());
1287 294 : return true;
1288 : }
1289 :
1290 : bool
1291 0 : CacheIRCompiler::emitGuardIsNullOrUndefined()
1292 : {
1293 0 : ValOperandId inputId = reader.valOperandId();
1294 4 : JSValueType knownType = allocator.knownType(inputId);
1295 4 : if (knownType == JSVAL_TYPE_UNDEFINED || knownType == JSVAL_TYPE_NULL)
1296 : return true;
1297 :
1298 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1299 : FailurePath* failure;
1300 4 : if (!addFailurePath(&failure))
1301 : return false;
1302 :
1303 0 : Label success;
1304 8 : masm.branchTestNull(Assembler::Equal, input, &success);
1305 0 : masm.branchTestUndefined(Assembler::NotEqual, input, failure->label());
1306 :
1307 4 : masm.bind(&success);
1308 : return true;
1309 : }
1310 :
1311 : bool
1312 0 : CacheIRCompiler::emitGuardIsObjectOrNull()
1313 : {
1314 0 : ValOperandId inputId = reader.valOperandId();
1315 4 : JSValueType knownType = allocator.knownType(inputId);
1316 4 : if (knownType == JSVAL_TYPE_OBJECT || knownType == JSVAL_TYPE_NULL)
1317 : return true;
1318 :
1319 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1320 : FailurePath* failure;
1321 4 : if (!addFailurePath(&failure))
1322 : return false;
1323 :
1324 0 : Label done;
1325 0 : masm.branchTestObject(Assembler::Equal, input, &done);
1326 12 : masm.branchTestNull(Assembler::NotEqual, input, failure->label());
1327 4 : masm.bind(&done);
1328 : return true;
1329 : }
1330 :
1331 : bool
1332 0 : CacheIRCompiler::emitGuardIsString()
1333 : {
1334 666 : ValOperandId inputId = reader.valOperandId();
1335 333 : if (allocator.knownType(inputId) == JSVAL_TYPE_STRING)
1336 : return true;
1337 :
1338 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1339 : FailurePath* failure;
1340 0 : if (!addFailurePath(&failure))
1341 : return false;
1342 336 : masm.branchTestString(Assembler::NotEqual, input, failure->label());
1343 112 : return true;
1344 : }
1345 :
1346 : bool
1347 0 : CacheIRCompiler::emitGuardIsSymbol()
1348 : {
1349 68 : ValOperandId inputId = reader.valOperandId();
1350 34 : if (allocator.knownType(inputId) == JSVAL_TYPE_SYMBOL)
1351 : return true;
1352 :
1353 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1354 : FailurePath* failure;
1355 0 : if (!addFailurePath(&failure))
1356 : return false;
1357 54 : masm.branchTestSymbol(Assembler::NotEqual, input, failure->label());
1358 18 : return true;
1359 : }
1360 :
1361 : bool
1362 0 : CacheIRCompiler::emitGuardIsInt32()
1363 : {
1364 0 : ValOperandId inputId = reader.valOperandId();
1365 0 : Register output = allocator.defineRegister(masm, reader.int32OperandId());
1366 :
1367 0 : if (allocator.knownType(inputId) == JSVAL_TYPE_INT32) {
1368 0 : Register input = allocator.useRegister(masm, Int32OperandId(inputId.id()));
1369 0 : masm.move32(input, output);
1370 : return true;
1371 : }
1372 0 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1373 :
1374 : FailurePath* failure;
1375 0 : if (!addFailurePath(&failure))
1376 : return false;
1377 :
1378 0 : Label notInt32, done;
1379 0 : masm.branchTestInt32(Assembler::NotEqual, input, failure->label());
1380 0 : masm.unboxInt32(input, output);
1381 : return true;
1382 : }
1383 :
1384 : bool
1385 0 : CacheIRCompiler::emitGuardIsInt32Index()
1386 : {
1387 156 : ValOperandId inputId = reader.valOperandId();
1388 0 : Register output = allocator.defineRegister(masm, reader.int32OperandId());
1389 :
1390 0 : if (allocator.knownType(inputId) == JSVAL_TYPE_INT32) {
1391 156 : Register input = allocator.useRegister(masm, Int32OperandId(inputId.id()));
1392 104 : masm.move32(input, output);
1393 : return true;
1394 : }
1395 :
1396 26 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1397 :
1398 : FailurePath* failure;
1399 26 : if (!addFailurePath(&failure))
1400 : return false;
1401 :
1402 0 : Label notInt32, done;
1403 0 : masm.branchTestInt32(Assembler::NotEqual, input, ¬Int32);
1404 52 : masm.unboxInt32(input, output);
1405 0 : masm.jump(&done);
1406 :
1407 0 : masm.bind(¬Int32);
1408 :
1409 78 : if (cx_->runtime()->jitSupportsFloatingPoint) {
1410 78 : masm.branchTestDouble(Assembler::NotEqual, input, failure->label());
1411 :
1412 : // If we're compiling a Baseline IC, FloatReg0 is always available.
1413 0 : Label failurePopReg;
1414 26 : if (mode_ != Mode::Baseline)
1415 0 : masm.push(FloatReg0);
1416 :
1417 0 : masm.unboxDouble(input, FloatReg0);
1418 : // ToPropertyKey(-0.0) is "0", so we can truncate -0.0 to 0 here.
1419 0 : masm.convertDoubleToInt32(FloatReg0, output,
1420 0 : (mode_ == Mode::Baseline) ? failure->label() : &failurePopReg,
1421 0 : false);
1422 0 : if (mode_ != Mode::Baseline) {
1423 0 : masm.pop(FloatReg0);
1424 0 : masm.jump(&done);
1425 :
1426 0 : masm.bind(&failurePopReg);
1427 0 : masm.pop(FloatReg0);
1428 0 : masm.jump(failure->label());
1429 : }
1430 : } else {
1431 0 : masm.jump(failure->label());
1432 : }
1433 :
1434 26 : masm.bind(&done);
1435 : return true;
1436 : }
1437 :
1438 : bool
1439 0 : CacheIRCompiler::emitGuardType()
1440 : {
1441 72 : ValOperandId inputId = reader.valOperandId();
1442 0 : JSValueType type = reader.valueType();
1443 :
1444 36 : if (allocator.knownType(inputId) == type)
1445 : return true;
1446 :
1447 36 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1448 :
1449 : FailurePath* failure;
1450 36 : if (!addFailurePath(&failure))
1451 : return false;
1452 :
1453 0 : switch (type) {
1454 : case JSVAL_TYPE_STRING:
1455 30 : masm.branchTestString(Assembler::NotEqual, input, failure->label());
1456 : break;
1457 : case JSVAL_TYPE_SYMBOL:
1458 4 : masm.branchTestSymbol(Assembler::NotEqual, input, failure->label());
1459 : break;
1460 : case JSVAL_TYPE_INT32:
1461 12 : masm.branchTestInt32(Assembler::NotEqual, input, failure->label());
1462 : break;
1463 : case JSVAL_TYPE_DOUBLE:
1464 6 : masm.branchTestDouble(Assembler::NotEqual, input, failure->label());
1465 : break;
1466 : case JSVAL_TYPE_BOOLEAN:
1467 8 : masm.branchTestBoolean(Assembler::NotEqual, input, failure->label());
1468 : break;
1469 : case JSVAL_TYPE_UNDEFINED:
1470 10 : masm.branchTestUndefined(Assembler::NotEqual, input, failure->label());
1471 : break;
1472 : case JSVAL_TYPE_NULL:
1473 2 : masm.branchTestNull(Assembler::NotEqual, input, failure->label());
1474 : break;
1475 : default:
1476 0 : MOZ_CRASH("Unexpected type");
1477 : }
1478 :
1479 : return true;
1480 : }
1481 :
1482 : bool
1483 0 : CacheIRCompiler::emitGuardClass()
1484 : {
1485 0 : ObjOperandId objId = reader.objOperandId();
1486 42 : Register obj = allocator.useRegister(masm, objId);
1487 84 : AutoScratchRegister scratch(allocator, masm);
1488 :
1489 : FailurePath* failure;
1490 42 : if (!addFailurePath(&failure))
1491 : return false;
1492 :
1493 42 : const Class* clasp = nullptr;
1494 0 : switch (reader.guardClassKind()) {
1495 : case GuardClassKind::Array:
1496 36 : clasp = &ArrayObject::class_;
1497 0 : break;
1498 : case GuardClassKind::MappedArguments:
1499 0 : clasp = &MappedArgumentsObject::class_;
1500 0 : break;
1501 : case GuardClassKind::UnmappedArguments:
1502 6 : clasp = &UnmappedArgumentsObject::class_;
1503 0 : break;
1504 : case GuardClassKind::WindowProxy:
1505 0 : clasp = cx_->runtime()->maybeWindowProxyClass();
1506 0 : break;
1507 : case GuardClassKind::JSFunction:
1508 0 : clasp = &JSFunction::class_;
1509 0 : break;
1510 : }
1511 0 : MOZ_ASSERT(clasp);
1512 :
1513 42 : if (objectGuardNeedsSpectreMitigations(objId)) {
1514 0 : masm.branchTestObjClass(Assembler::NotEqual, obj, clasp, scratch, obj, failure->label());
1515 : } else {
1516 0 : masm.branchTestObjClassNoSpectreMitigations(Assembler::NotEqual, obj, clasp, scratch,
1517 0 : failure->label());
1518 : }
1519 :
1520 : return true;
1521 : }
1522 :
1523 : bool
1524 0 : CacheIRCompiler::emitGuardIsNativeFunction()
1525 : {
1526 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1527 10 : JSNative nativeFunc = reinterpret_cast<JSNative>(reader.pointer());
1528 10 : AutoScratchRegister scratch(allocator, masm);
1529 :
1530 : FailurePath* failure;
1531 5 : if (!addFailurePath(&failure))
1532 : return false;
1533 :
1534 : // Ensure obj is a function.
1535 5 : const Class* clasp = &JSFunction::class_;
1536 10 : masm.branchTestObjClass(Assembler::NotEqual, obj, clasp, scratch, obj, failure->label());
1537 :
1538 : // Ensure function native matches.
1539 0 : masm.branchPtr(Assembler::NotEqual, Address(obj, JSFunction::offsetOfNativeOrEnv()),
1540 5 : ImmPtr(nativeFunc), failure->label());
1541 5 : return true;
1542 : }
1543 :
1544 : bool
1545 0 : CacheIRCompiler::emitGuardIsNativeObject()
1546 : {
1547 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1548 0 : AutoScratchRegister scratch(allocator, masm);
1549 :
1550 : FailurePath* failure;
1551 0 : if (!addFailurePath(&failure))
1552 : return false;
1553 :
1554 0 : masm.branchIfNonNativeObj(obj, scratch, failure->label());
1555 0 : return true;
1556 : }
1557 :
1558 : bool
1559 0 : CacheIRCompiler::emitGuardIsProxy()
1560 : {
1561 312 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1562 208 : AutoScratchRegister scratch(allocator, masm);
1563 :
1564 : FailurePath* failure;
1565 104 : if (!addFailurePath(&failure))
1566 : return false;
1567 :
1568 208 : masm.branchTestObjectIsProxy(false, obj, scratch, failure->label());
1569 104 : return true;
1570 : }
1571 :
1572 : bool
1573 0 : CacheIRCompiler::emitGuardNotDOMProxy()
1574 : {
1575 138 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1576 92 : AutoScratchRegister scratch(allocator, masm);
1577 :
1578 : FailurePath* failure;
1579 46 : if (!addFailurePath(&failure))
1580 : return false;
1581 :
1582 0 : masm.branchTestProxyHandlerFamily(Assembler::Equal, obj, scratch,
1583 46 : GetDOMProxyHandlerFamily(), failure->label());
1584 46 : return true;
1585 : }
1586 :
1587 : bool
1588 0 : CacheIRCompiler::emitGuardSpecificInt32Immediate()
1589 : {
1590 0 : Register reg = allocator.useRegister(masm, reader.int32OperandId());
1591 8 : int32_t ival = reader.int32Immediate();
1592 8 : Assembler::Condition cond = (Assembler::Condition) reader.readByte();
1593 :
1594 : FailurePath* failure;
1595 4 : if (!addFailurePath(&failure))
1596 : return false;
1597 :
1598 12 : masm.branch32(Assembler::InvertCondition(cond), reg, Imm32(ival), failure->label());
1599 4 : return true;
1600 : }
1601 :
1602 : bool
1603 0 : CacheIRCompiler::emitGuardMagicValue()
1604 : {
1605 12 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1606 12 : JSWhyMagic magic = reader.whyMagic();
1607 :
1608 : FailurePath* failure;
1609 6 : if (!addFailurePath(&failure))
1610 : return false;
1611 :
1612 12 : masm.branchTestMagicValue(Assembler::NotEqual, val, magic, failure->label());
1613 6 : return true;
1614 : }
1615 :
1616 : bool
1617 0 : CacheIRCompiler::emitGuardNoUnboxedExpando()
1618 : {
1619 21 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1620 :
1621 : FailurePath* failure;
1622 7 : if (!addFailurePath(&failure))
1623 : return false;
1624 :
1625 0 : Address expandoAddr(obj, UnboxedPlainObject::offsetOfExpando());
1626 21 : masm.branchPtr(Assembler::NotEqual, expandoAddr, ImmWord(0), failure->label());
1627 7 : return true;
1628 : }
1629 :
1630 : bool
1631 0 : CacheIRCompiler::emitGuardAndLoadUnboxedExpando()
1632 : {
1633 6 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1634 6 : Register output = allocator.defineRegister(masm, reader.objOperandId());
1635 :
1636 : FailurePath* failure;
1637 2 : if (!addFailurePath(&failure))
1638 : return false;
1639 :
1640 0 : Address expandoAddr(obj, UnboxedPlainObject::offsetOfExpando());
1641 0 : masm.loadPtr(expandoAddr, output);
1642 4 : masm.branchTestPtr(Assembler::Zero, output, output, failure->label());
1643 2 : return true;
1644 : }
1645 :
1646 : bool
1647 0 : CacheIRCompiler::emitGuardNoDetachedTypedObjects()
1648 : {
1649 : FailurePath* failure;
1650 0 : if (!addFailurePath(&failure))
1651 : return false;
1652 :
1653 : // All stubs manipulating typed objects must check the zone-wide flag
1654 : // indicating whether their underlying storage might be detached, to bail
1655 : // out if needed.
1656 0 : uint32_t* address = &cx_->zone()->detachedTypedObjects;
1657 0 : masm.branch32(Assembler::NotEqual, AbsoluteAddress(address), Imm32(0), failure->label());
1658 0 : return true;
1659 : }
1660 :
1661 : bool
1662 0 : CacheIRCompiler::emitGuardNoDenseElements()
1663 : {
1664 45 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1665 30 : AutoScratchRegister scratch(allocator, masm);
1666 :
1667 : FailurePath* failure;
1668 15 : if (!addFailurePath(&failure))
1669 : return false;
1670 :
1671 : // Load obj->elements.
1672 30 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1673 :
1674 : // Make sure there are no dense elements.
1675 0 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1676 45 : masm.branch32(Assembler::NotEqual, initLength, Imm32(0), failure->label());
1677 15 : return true;
1678 : }
1679 :
1680 : bool
1681 0 : CacheIRCompiler::emitGuardAndGetIndexFromString()
1682 : {
1683 6 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1684 6 : Register output = allocator.defineRegister(masm, reader.int32OperandId());
1685 :
1686 : FailurePath* failure;
1687 2 : if (!addFailurePath(&failure))
1688 : return false;
1689 :
1690 0 : Label vmCall, done;
1691 2 : masm.loadStringIndexValue(str, output, &vmCall);
1692 4 : masm.jump(&done);
1693 :
1694 : {
1695 0 : masm.bind(&vmCall);
1696 6 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1697 0 : masm.PushRegsInMask(save);
1698 :
1699 0 : masm.setupUnalignedABICall(output);
1700 0 : masm.passABIArg(str);
1701 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, GetIndexFromString));
1702 0 : masm.mov(ReturnReg, output);
1703 :
1704 0 : LiveRegisterSet ignore;
1705 2 : ignore.add(output);
1706 4 : masm.PopRegsInMaskIgnore(save, ignore);
1707 :
1708 : // GetIndexFromString returns a negative value on failure.
1709 4 : masm.branchTest32(Assembler::Signed, output, output, failure->label());
1710 : }
1711 :
1712 2 : masm.bind(&done);
1713 : return true;
1714 : }
1715 :
1716 : bool
1717 0 : CacheIRCompiler::emitLoadProto()
1718 : {
1719 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1720 0 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1721 300 : masm.loadObjProto(obj, reg);
1722 300 : return true;
1723 : }
1724 :
1725 : bool
1726 0 : CacheIRCompiler::emitLoadEnclosingEnvironment()
1727 : {
1728 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1729 0 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1730 92 : masm.extractObject(Address(obj, EnvironmentObject::offsetOfEnclosingEnvironment()), reg);
1731 46 : return true;
1732 : }
1733 :
1734 : bool
1735 0 : CacheIRCompiler::emitLoadWrapperTarget()
1736 : {
1737 153 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1738 0 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1739 :
1740 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), reg);
1741 102 : masm.unboxObject(Address(reg, detail::ProxyReservedSlots::offsetOfPrivateSlot()), reg);
1742 51 : return true;
1743 : }
1744 :
1745 : bool
1746 0 : CacheIRCompiler::emitLoadValueTag()
1747 : {
1748 24 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1749 0 : Register res = allocator.defineRegister(masm, reader.valueTagOperandId());
1750 :
1751 1 : Register tag = masm.extractTag(val, res);
1752 0 : if (tag != res)
1753 0 : masm.mov(tag, res);
1754 12 : return true;
1755 : }
1756 :
1757 : bool
1758 0 : CacheIRCompiler::emitLoadDOMExpandoValue()
1759 : {
1760 12 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1761 0 : ValueOperand val = allocator.defineValueRegister(masm, reader.valOperandId());
1762 :
1763 8 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), val.scratchReg());
1764 0 : masm.loadValue(Address(val.scratchReg(),
1765 : detail::ProxyReservedSlots::offsetOfPrivateSlot()),
1766 4 : val);
1767 4 : return true;
1768 : }
1769 :
1770 : bool
1771 0 : CacheIRCompiler::emitLoadDOMExpandoValueIgnoreGeneration()
1772 : {
1773 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1774 0 : ValueOperand output = allocator.defineValueRegister(masm, reader.valOperandId());
1775 :
1776 : // Determine the expando's Address.
1777 0 : Register scratch = output.scratchReg();
1778 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), scratch);
1779 0 : Address expandoAddr(scratch, detail::ProxyReservedSlots::offsetOfPrivateSlot());
1780 :
1781 : #ifdef DEBUG
1782 : // Private values are stored as doubles, so assert we have a double.
1783 0 : Label ok;
1784 0 : masm.branchTestDouble(Assembler::Equal, expandoAddr, &ok);
1785 0 : masm.assumeUnreachable("DOM expando is not a PrivateValue!");
1786 0 : masm.bind(&ok);
1787 : #endif
1788 :
1789 : // Load the ExpandoAndGeneration* from the PrivateValue.
1790 0 : masm.loadPrivate(expandoAddr, scratch);
1791 :
1792 : // Load expandoAndGeneration->expando into the output Value register.
1793 0 : masm.loadValue(Address(scratch, ExpandoAndGeneration::offsetOfExpando()), output);
1794 0 : return true;
1795 : }
1796 :
1797 : bool
1798 0 : CacheIRCompiler::emitLoadUndefinedResult()
1799 : {
1800 0 : AutoOutputRegister output(*this);
1801 34 : if (output.hasValue())
1802 1 : masm.moveValue(UndefinedValue(), output.valueReg());
1803 : else
1804 0 : masm.assumeUnreachable("Should have monitored undefined result");
1805 34 : return true;
1806 : }
1807 :
1808 : static void
1809 0 : EmitStoreBoolean(MacroAssembler& masm, bool b, const AutoOutputRegister& output)
1810 : {
1811 0 : if (output.hasValue()) {
1812 43 : Value val = BooleanValue(b);
1813 0 : masm.moveValue(val, output.valueReg());
1814 : } else {
1815 65 : MOZ_ASSERT(output.type() == JSVAL_TYPE_BOOLEAN);
1816 0 : masm.movePtr(ImmWord(b), output.typedReg().gpr());
1817 : }
1818 108 : }
1819 :
1820 : bool
1821 0 : CacheIRCompiler::emitLoadBooleanResult()
1822 : {
1823 0 : AutoOutputRegister output(*this);
1824 96 : bool b = reader.readBool();
1825 0 : EmitStoreBoolean(masm, b, output);
1826 :
1827 96 : return true;
1828 : }
1829 :
1830 : static void
1831 40 : EmitStoreResult(MacroAssembler& masm, Register reg, JSValueType type,
1832 : const AutoOutputRegister& output)
1833 : {
1834 0 : if (output.hasValue()) {
1835 40 : masm.tagValue(type, reg, output.valueReg());
1836 0 : return;
1837 : }
1838 0 : if (type == JSVAL_TYPE_INT32 && output.typedReg().isFloat()) {
1839 0 : masm.convertInt32ToDouble(reg, output.typedReg().fpu());
1840 0 : return;
1841 : }
1842 0 : if (type == output.type()) {
1843 0 : masm.mov(reg, output.typedReg().gpr());
1844 0 : return;
1845 : }
1846 0 : masm.assumeUnreachable("Should have monitored result");
1847 : }
1848 :
1849 : bool
1850 0 : CacheIRCompiler::emitLoadInt32ArrayLengthResult()
1851 : {
1852 0 : AutoOutputRegister output(*this);
1853 99 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1854 66 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1855 :
1856 : FailurePath* failure;
1857 33 : if (!addFailurePath(&failure))
1858 : return false;
1859 :
1860 66 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1861 66 : masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch);
1862 :
1863 : // Guard length fits in an int32.
1864 0 : masm.branchTest32(Assembler::Signed, scratch, scratch, failure->label());
1865 33 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1866 33 : return true;
1867 : }
1868 :
1869 : bool
1870 0 : CacheIRCompiler::emitInt32NegationResult()
1871 : {
1872 0 : AutoOutputRegister output(*this);
1873 0 : Register val = allocator.useRegister(masm, reader.int32OperandId());
1874 :
1875 : FailurePath* failure;
1876 0 : if (!addFailurePath(&failure))
1877 : return false;
1878 :
1879 : // Guard against 0 and MIN_INT by checking if low 31-bits are all zero.
1880 : // Both of these result in a double.
1881 0 : masm.branchTest32(Assembler::Zero, val, Imm32(0x7fffffff), failure->label());
1882 0 : masm.neg32(val);
1883 0 : masm.tagValue(JSVAL_TYPE_INT32, val, output.valueReg());
1884 0 : return true;
1885 : }
1886 :
1887 : bool
1888 0 : CacheIRCompiler::emitInt32NotResult()
1889 : {
1890 0 : AutoOutputRegister output(*this);
1891 0 : Register val = allocator.useRegister(masm, reader.int32OperandId());
1892 0 : masm.not32(val);
1893 0 : masm.tagValue(JSVAL_TYPE_INT32, val, output.valueReg());
1894 0 : return true;
1895 : }
1896 :
1897 : bool
1898 0 : CacheIRCompiler::emitDoubleNegationResult()
1899 : {
1900 2 : AutoOutputRegister output(*this);
1901 2 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1902 :
1903 : FailurePath* failure;
1904 1 : if (!addFailurePath(&failure))
1905 : return false;
1906 :
1907 : // If we're compiling a Baseline IC, FloatReg0 is always available.
1908 0 : Label failurePopReg, done;
1909 1 : if (mode_ != Mode::Baseline)
1910 0 : masm.push(FloatReg0);
1911 :
1912 0 : masm.ensureDouble(val, FloatReg0, (mode_ != Mode::Baseline) ? &failurePopReg : failure->label());
1913 1 : masm.negateDouble(FloatReg0);
1914 0 : masm.boxDouble(FloatReg0, output.valueReg(), FloatReg0);
1915 :
1916 0 : if (mode_ != Mode::Baseline) {
1917 0 : masm.pop(FloatReg0);
1918 0 : masm.jump(&done);
1919 :
1920 0 : masm.bind(&failurePopReg);
1921 0 : masm.pop(FloatReg0);
1922 0 : masm.jump(failure->label());
1923 : }
1924 :
1925 1 : masm.bind(&done);
1926 : return true;
1927 : }
1928 :
1929 : bool
1930 0 : CacheIRCompiler::emitTruncateDoubleToUInt32()
1931 : {
1932 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1933 0 : Register res = allocator.defineRegister(masm, reader.int32OperandId());
1934 :
1935 0 : Label doneTruncate, truncateABICall;
1936 0 : if (mode_ != Mode::Baseline)
1937 0 : masm.push(FloatReg0);
1938 :
1939 0 : masm.unboxDouble(val, FloatReg0);
1940 0 : masm.branchTruncateDoubleMaybeModUint32(FloatReg0, res, &truncateABICall);
1941 0 : masm.jump(&doneTruncate);
1942 :
1943 0 : masm.bind(&truncateABICall);
1944 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1945 0 : save.takeUnchecked(FloatReg0);
1946 : // Bug 1451976
1947 0 : save.takeUnchecked(FloatReg0.asSingle());
1948 0 : masm.PushRegsInMask(save);
1949 :
1950 0 : masm.setupUnalignedABICall(res);
1951 0 : masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1952 0 : masm.callWithABI(BitwiseCast<void*, int32_t(*)(double)>(JS::ToInt32),
1953 0 : MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckOther);
1954 0 : masm.storeCallInt32Result(res);
1955 :
1956 0 : LiveRegisterSet ignore;
1957 0 : ignore.add(res);
1958 0 : masm.PopRegsInMaskIgnore(save, ignore);
1959 :
1960 0 : masm.bind(&doneTruncate);
1961 0 : if (mode_ != Mode::Baseline)
1962 0 : masm.pop(FloatReg0);
1963 0 : return true;
1964 : }
1965 :
1966 : bool
1967 0 : CacheIRCompiler::emitLoadArgumentsObjectLengthResult()
1968 : {
1969 0 : AutoOutputRegister output(*this);
1970 9 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1971 6 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1972 :
1973 : FailurePath* failure;
1974 3 : if (!addFailurePath(&failure))
1975 : return false;
1976 :
1977 : // Get initial length value.
1978 9 : masm.unboxInt32(Address(obj, ArgumentsObject::getInitialLengthSlotOffset()), scratch);
1979 :
1980 : // Test if length has been overridden.
1981 12 : masm.branchTest32(Assembler::NonZero,
1982 : scratch,
1983 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
1984 3 : failure->label());
1985 :
1986 : // Shift out arguments length and return it. No need to type monitor
1987 : // because this stub always returns int32.
1988 0 : masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratch);
1989 3 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1990 3 : return true;
1991 : }
1992 :
1993 : bool
1994 0 : CacheIRCompiler::emitLoadFunctionLengthResult()
1995 : {
1996 0 : AutoOutputRegister output(*this);
1997 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1998 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1999 :
2000 : FailurePath* failure;
2001 0 : if (!addFailurePath(&failure))
2002 : return false;
2003 :
2004 : // Get the JSFunction flags.
2005 0 : masm.load16ZeroExtend(Address(obj, JSFunction::offsetOfFlags()), scratch);
2006 :
2007 : // Functions with lazy scripts don't store their length.
2008 : // If the length was resolved before the length property might be shadowed.
2009 0 : masm.branchTest32(Assembler::NonZero,
2010 : scratch,
2011 : Imm32(JSFunction::INTERPRETED_LAZY |
2012 : JSFunction::RESOLVED_LENGTH),
2013 0 : failure->label());
2014 :
2015 0 : Label boundFunction;
2016 0 : masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSFunction::BOUND_FUN), &boundFunction);
2017 0 : Label interpreted;
2018 0 : masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSFunction::INTERPRETED), &interpreted);
2019 :
2020 : // Load the length of the native function.
2021 0 : masm.load16ZeroExtend(Address(obj, JSFunction::offsetOfNargs()), scratch);
2022 0 : Label done;
2023 0 : masm.jump(&done);
2024 :
2025 0 : masm.bind(&boundFunction);
2026 : // Bound functions might have a non-int32 length.
2027 0 : Address boundLength(obj, FunctionExtended::offsetOfExtendedSlot(BOUND_FUN_LENGTH_SLOT));
2028 0 : masm.branchTestInt32(Assembler::NotEqual, boundLength, failure->label());
2029 0 : masm.unboxInt32(boundLength, scratch);
2030 0 : masm.jump(&done);
2031 :
2032 0 : masm.bind(&interpreted);
2033 : // Load the length from the function's script.
2034 0 : masm.loadPtr(Address(obj, JSFunction::offsetOfScript()), scratch);
2035 0 : masm.load16ZeroExtend(Address(scratch, JSScript::offsetOfFunLength()), scratch);
2036 :
2037 0 : masm.bind(&done);
2038 0 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
2039 : return true;
2040 : }
2041 :
2042 : bool
2043 0 : CacheIRCompiler::emitLoadStringLengthResult()
2044 : {
2045 0 : AutoOutputRegister output(*this);
2046 6 : Register str = allocator.useRegister(masm, reader.stringOperandId());
2047 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2048 :
2049 0 : masm.loadStringLength(str, scratch);
2050 2 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
2051 4 : return true;
2052 : }
2053 :
2054 : bool
2055 0 : CacheIRCompiler::emitLoadStringCharResult()
2056 : {
2057 0 : AutoOutputRegister output(*this);
2058 0 : Register str = allocator.useRegister(masm, reader.stringOperandId());
2059 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2060 4 : AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
2061 4 : AutoScratchRegister scratch2(allocator, masm);
2062 :
2063 : FailurePath* failure;
2064 2 : if (!addFailurePath(&failure))
2065 : return false;
2066 :
2067 : // Bounds check, load string char.
2068 0 : masm.spectreBoundsCheck32(index, Address(str, JSString::offsetOfLength()), scratch1,
2069 2 : failure->label());
2070 4 : masm.loadStringChar(str, index, scratch1, scratch2, failure->label());
2071 :
2072 : // Load StaticString for this char.
2073 0 : masm.boundsCheck32PowerOfTwo(scratch1, StaticStrings::UNIT_STATIC_LIMIT, failure->label());
2074 6 : masm.movePtr(ImmPtr(&cx_->staticStrings().unitStaticTable), scratch2);
2075 0 : masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch2);
2076 :
2077 2 : EmitStoreResult(masm, scratch2, JSVAL_TYPE_STRING, output);
2078 2 : return true;
2079 : }
2080 :
2081 : bool
2082 0 : CacheIRCompiler::emitLoadArgumentsObjectArgResult()
2083 : {
2084 0 : AutoOutputRegister output(*this);
2085 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2086 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2087 6 : AutoScratchRegister scratch1(allocator, masm);
2088 6 : AutoScratchRegisterMaybeOutput scratch2(allocator, masm, output);
2089 :
2090 : FailurePath* failure;
2091 3 : if (!addFailurePath(&failure))
2092 : return false;
2093 :
2094 : // Get initial length value.
2095 9 : masm.unboxInt32(Address(obj, ArgumentsObject::getInitialLengthSlotOffset()), scratch1);
2096 :
2097 : // Ensure no overridden length/element.
2098 12 : masm.branchTest32(Assembler::NonZero,
2099 : scratch1,
2100 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT |
2101 : ArgumentsObject::ELEMENT_OVERRIDDEN_BIT),
2102 3 : failure->label());
2103 :
2104 : // Bounds check.
2105 6 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratch1);
2106 6 : masm.spectreBoundsCheck32(index, scratch1, scratch2, failure->label());
2107 :
2108 : // Load ArgumentsData.
2109 9 : masm.loadPrivate(Address(obj, ArgumentsObject::getDataSlotOffset()), scratch1);
2110 :
2111 : // Fail if we have a RareArgumentsData (elements were deleted).
2112 9 : masm.branchPtr(Assembler::NotEqual,
2113 0 : Address(scratch1, offsetof(ArgumentsData, rareData)),
2114 : ImmWord(0),
2115 3 : failure->label());
2116 :
2117 : // Guard the argument is not a FORWARD_TO_CALL_SLOT MagicValue.
2118 0 : BaseValueIndex argValue(scratch1, index, ArgumentsData::offsetOfArgs());
2119 0 : masm.branchTestMagic(Assembler::Equal, argValue, failure->label());
2120 3 : masm.loadValue(argValue, output.valueReg());
2121 3 : return true;
2122 : }
2123 :
2124 : bool
2125 0 : CacheIRCompiler::emitLoadDenseElementResult()
2126 : {
2127 0 : AutoOutputRegister output(*this);
2128 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2129 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2130 88 : AutoScratchRegister scratch1(allocator, masm);
2131 88 : AutoScratchRegisterMaybeOutput scratch2(allocator, masm, output);
2132 :
2133 : FailurePath* failure;
2134 44 : if (!addFailurePath(&failure))
2135 : return false;
2136 :
2137 : // Load obj->elements.
2138 88 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch1);
2139 :
2140 : // Bounds check.
2141 88 : Address initLength(scratch1, ObjectElements::offsetOfInitializedLength());
2142 88 : masm.spectreBoundsCheck32(index, initLength, scratch2, failure->label());
2143 :
2144 : // Hole check.
2145 0 : BaseObjectElementIndex element(scratch1, index);
2146 0 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
2147 88 : masm.loadTypedOrValue(element, output);
2148 44 : return true;
2149 : }
2150 :
2151 : bool
2152 0 : CacheIRCompiler::emitGuardIndexIsNonNegative()
2153 : {
2154 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2155 :
2156 : FailurePath* failure;
2157 0 : if (!addFailurePath(&failure))
2158 : return false;
2159 :
2160 0 : masm.branch32(Assembler::LessThan, index, Imm32(0), failure->label());
2161 0 : return true;
2162 : }
2163 :
2164 : bool
2165 0 : CacheIRCompiler::emitGuardTagNotEqual()
2166 : {
2167 18 : Register lhs = allocator.useRegister(masm, reader.valueTagOperandId());
2168 18 : Register rhs = allocator.useRegister(masm, reader.valueTagOperandId());
2169 :
2170 : FailurePath* failure;
2171 6 : if (!addFailurePath(&failure))
2172 : return false;
2173 :
2174 12 : Label done;
2175 12 : masm.branch32(Assembler::Equal, lhs, rhs, failure->label());
2176 :
2177 : // If both lhs and rhs are numbers, can't use tag comparison to do inequality comparison
2178 0 : masm.branchTestNumber(Assembler::NotEqual, lhs, &done);
2179 12 : masm.branchTestNumber(Assembler::NotEqual, rhs, &done);
2180 0 : masm.jump(failure->label());
2181 :
2182 6 : masm.bind(&done);
2183 : return true;
2184 : }
2185 :
2186 : bool
2187 0 : CacheIRCompiler::emitLoadDenseElementHoleResult()
2188 : {
2189 0 : AutoOutputRegister output(*this);
2190 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2191 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2192 8 : AutoScratchRegister scratch1(allocator, masm);
2193 0 : AutoScratchRegisterMaybeOutput scratch2(allocator, masm, output);
2194 :
2195 1 : if (!output.hasValue()) {
2196 0 : masm.assumeUnreachable("Should have monitored undefined value after attaching stub");
2197 0 : return true;
2198 : }
2199 :
2200 : FailurePath* failure;
2201 4 : if (!addFailurePath(&failure))
2202 : return false;
2203 :
2204 : // Make sure the index is nonnegative.
2205 12 : masm.branch32(Assembler::LessThan, index, Imm32(0), failure->label());
2206 :
2207 : // Load obj->elements.
2208 8 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch1);
2209 :
2210 : // Guard on the initialized length.
2211 0 : Label hole;
2212 8 : Address initLength(scratch1, ObjectElements::offsetOfInitializedLength());
2213 4 : masm.spectreBoundsCheck32(index, initLength, scratch2, &hole);
2214 :
2215 : // Load the value.
2216 0 : Label done;
2217 8 : masm.loadValue(BaseObjectElementIndex(scratch1, index), output.valueReg());
2218 8 : masm.branchTestMagic(Assembler::NotEqual, output.valueReg(), &done);
2219 :
2220 : // Load undefined for the hole.
2221 4 : masm.bind(&hole);
2222 0 : masm.moveValue(UndefinedValue(), output.valueReg());
2223 :
2224 4 : masm.bind(&done);
2225 : return true;
2226 : }
2227 :
2228 : bool
2229 0 : CacheIRCompiler::emitLoadTypedElementExistsResult()
2230 : {
2231 0 : AutoOutputRegister output(*this);
2232 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2233 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2234 0 : TypedThingLayout layout = reader.typedThingLayout();
2235 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2236 :
2237 0 : Label outOfBounds, done;
2238 :
2239 : // Bound check.
2240 0 : LoadTypedThingLength(masm, layout, obj, scratch);
2241 0 : masm.branch32(Assembler::BelowOrEqual, scratch, index, &outOfBounds);
2242 0 : EmitStoreBoolean(masm, true, output);
2243 0 : masm.jump(&done);
2244 :
2245 0 : masm.bind(&outOfBounds);
2246 0 : EmitStoreBoolean(masm, false, output);
2247 :
2248 0 : masm.bind(&done);
2249 0 : return true;
2250 : }
2251 :
2252 : bool
2253 0 : CacheIRCompiler::emitLoadDenseElementExistsResult()
2254 : {
2255 0 : AutoOutputRegister output(*this);
2256 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2257 12 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2258 8 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2259 :
2260 : FailurePath* failure;
2261 4 : if (!addFailurePath(&failure))
2262 : return false;
2263 :
2264 : // Load obj->elements.
2265 8 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
2266 :
2267 : // Bounds check. Unsigned compare sends negative indices to next IC.
2268 8 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
2269 8 : masm.branch32(Assembler::BelowOrEqual, initLength, index, failure->label());
2270 :
2271 : // Hole check.
2272 8 : BaseObjectElementIndex element(scratch, index);
2273 0 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
2274 :
2275 4 : EmitStoreBoolean(masm, true, output);
2276 4 : return true;
2277 : }
2278 :
2279 : bool
2280 0 : CacheIRCompiler::emitLoadDenseElementHoleExistsResult()
2281 : {
2282 0 : AutoOutputRegister output(*this);
2283 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2284 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2285 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2286 :
2287 : FailurePath* failure;
2288 0 : if (!addFailurePath(&failure))
2289 : return false;
2290 :
2291 : // Make sure the index is nonnegative.
2292 0 : masm.branch32(Assembler::LessThan, index, Imm32(0), failure->label());
2293 :
2294 : // Load obj->elements.
2295 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
2296 :
2297 : // Guard on the initialized length.
2298 0 : Label hole;
2299 0 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
2300 0 : masm.branch32(Assembler::BelowOrEqual, initLength, index, &hole);
2301 :
2302 : // Load value and replace with true.
2303 0 : Label done;
2304 0 : BaseObjectElementIndex element(scratch, index);
2305 0 : masm.branchTestMagic(Assembler::Equal, element, &hole);
2306 0 : EmitStoreBoolean(masm, true, output);
2307 0 : masm.jump(&done);
2308 :
2309 : // Load false for the hole.
2310 0 : masm.bind(&hole);
2311 0 : EmitStoreBoolean(masm, false, output);
2312 :
2313 0 : masm.bind(&done);
2314 : return true;
2315 : }
2316 :
2317 : bool
2318 0 : CacheIRCompiler::emitArrayJoinResult()
2319 : {
2320 0 : ObjOperandId objId = reader.objOperandId();
2321 :
2322 0 : AutoOutputRegister output(*this);
2323 1 : Register obj = allocator.useRegister(masm, objId);
2324 2 : AutoScratchRegister scratch(allocator, masm);
2325 :
2326 : FailurePath* failure;
2327 1 : if (!addFailurePath(&failure))
2328 : return false;
2329 :
2330 : // Load obj->elements in scratch.
2331 2 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
2332 2 : Address lengthAddr(scratch, ObjectElements::offsetOfLength());
2333 :
2334 : // If array length is 0, return empty string.
2335 2 : Label finished;
2336 :
2337 : {
2338 0 : Label arrayNotEmpty;
2339 0 : masm.branch32(Assembler::NotEqual, lengthAddr, Imm32(0), &arrayNotEmpty);
2340 0 : masm.movePtr(ImmGCPtr(cx_->names().empty), scratch);
2341 0 : masm.tagValue(JSVAL_TYPE_STRING, scratch, output.valueReg());
2342 2 : masm.jump(&finished);
2343 1 : masm.bind(&arrayNotEmpty);
2344 : }
2345 :
2346 : // Otherwise, handle array length 1 case.
2347 3 : masm.branch32(Assembler::NotEqual, lengthAddr, Imm32(1), failure->label());
2348 :
2349 : // But only if initializedLength is also 1.
2350 2 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
2351 3 : masm.branch32(Assembler::NotEqual, initLength, Imm32(1), failure->label());
2352 :
2353 : // And only if elem0 is a string.
2354 2 : Address elementAddr(scratch, 0);
2355 3 : masm.branchTestString(Assembler::NotEqual, elementAddr, failure->label());
2356 :
2357 : // Store the value.
2358 0 : masm.loadValue(elementAddr, output.valueReg());
2359 :
2360 1 : masm.bind(&finished);
2361 :
2362 : return true;
2363 : }
2364 :
2365 : bool
2366 0 : CacheIRCompiler::emitLoadTypedElementResult()
2367 : {
2368 0 : AutoOutputRegister output(*this);
2369 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2370 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2371 0 : TypedThingLayout layout = reader.typedThingLayout();
2372 0 : Scalar::Type type = reader.scalarType();
2373 :
2374 0 : AutoScratchRegister scratch1(allocator, masm);
2375 0 : AutoScratchRegisterMaybeOutput scratch2(allocator, masm, output);
2376 :
2377 0 : if (!output.hasValue()) {
2378 0 : if (type == Scalar::Float32 || type == Scalar::Float64) {
2379 0 : if (output.type() != JSVAL_TYPE_DOUBLE) {
2380 0 : masm.assumeUnreachable("Should have monitored double after attaching stub");
2381 0 : return true;
2382 : }
2383 : } else {
2384 0 : if (output.type() != JSVAL_TYPE_INT32 && output.type() != JSVAL_TYPE_DOUBLE) {
2385 0 : masm.assumeUnreachable("Should have monitored int32 after attaching stub");
2386 0 : return true;
2387 : }
2388 : }
2389 : }
2390 :
2391 : FailurePath* failure;
2392 0 : if (!addFailurePath(&failure))
2393 : return false;
2394 :
2395 : // Bounds check.
2396 0 : LoadTypedThingLength(masm, layout, obj, scratch1);
2397 0 : masm.spectreBoundsCheck32(index, scratch1, scratch2, failure->label());
2398 :
2399 : // Load the elements vector.
2400 0 : LoadTypedThingData(masm, layout, obj, scratch1);
2401 :
2402 : // Load the value.
2403 0 : BaseIndex source(scratch1, index, ScaleFromElemWidth(Scalar::byteSize(type)));
2404 0 : if (output.hasValue()) {
2405 0 : masm.loadFromTypedArray(type, source, output.valueReg(), *allowDoubleResult_, scratch1,
2406 0 : failure->label());
2407 : } else {
2408 : bool needGpr = (type == Scalar::Int8 || type == Scalar::Uint8 ||
2409 0 : type == Scalar::Int16 || type == Scalar::Uint16 ||
2410 0 : type == Scalar::Uint8Clamped || type == Scalar::Int32);
2411 0 : if (needGpr && output.type() == JSVAL_TYPE_DOUBLE) {
2412 : // Load the element as integer, then convert it to double.
2413 0 : masm.loadFromTypedArray(type, source, AnyRegister(scratch1), scratch1,
2414 0 : failure->label());
2415 0 : masm.convertInt32ToDouble(source, output.typedReg().fpu());
2416 : } else {
2417 0 : masm.loadFromTypedArray(type, source, output.typedReg(), scratch1, failure->label());
2418 : }
2419 : }
2420 : return true;
2421 : }
2422 :
2423 : void
2424 0 : CacheIRCompiler::emitLoadTypedObjectResultShared(const Address& fieldAddr, Register scratch,
2425 : uint32_t typeDescr,
2426 : const AutoOutputRegister& output)
2427 : {
2428 0 : MOZ_ASSERT(output.hasValue());
2429 :
2430 0 : if (SimpleTypeDescrKeyIsScalar(typeDescr)) {
2431 0 : Scalar::Type type = ScalarTypeFromSimpleTypeDescrKey(typeDescr);
2432 0 : masm.loadFromTypedArray(type, fieldAddr, output.valueReg(),
2433 0 : /* allowDouble = */ true, scratch, nullptr);
2434 : } else {
2435 0 : ReferenceTypeDescr::Type type = ReferenceTypeFromSimpleTypeDescrKey(typeDescr);
2436 0 : switch (type) {
2437 : case ReferenceTypeDescr::TYPE_ANY:
2438 0 : masm.loadValue(fieldAddr, output.valueReg());
2439 0 : break;
2440 :
2441 : case ReferenceTypeDescr::TYPE_OBJECT: {
2442 0 : Label notNull, done;
2443 0 : masm.loadPtr(fieldAddr, scratch);
2444 0 : masm.branchTestPtr(Assembler::NonZero, scratch, scratch, ¬Null);
2445 0 : masm.moveValue(NullValue(), output.valueReg());
2446 0 : masm.jump(&done);
2447 0 : masm.bind(¬Null);
2448 0 : masm.tagValue(JSVAL_TYPE_OBJECT, scratch, output.valueReg());
2449 0 : masm.bind(&done);
2450 : break;
2451 : }
2452 :
2453 : case ReferenceTypeDescr::TYPE_STRING:
2454 0 : masm.loadPtr(fieldAddr, scratch);
2455 0 : masm.tagValue(JSVAL_TYPE_STRING, scratch, output.valueReg());
2456 0 : break;
2457 :
2458 : default:
2459 0 : MOZ_CRASH("Invalid ReferenceTypeDescr");
2460 : }
2461 : }
2462 0 : }
2463 :
2464 : bool
2465 0 : CacheIRCompiler::emitLoadObjectResult()
2466 : {
2467 6 : AutoOutputRegister output(*this);
2468 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2469 :
2470 3 : if (output.hasValue())
2471 0 : masm.tagValue(JSVAL_TYPE_OBJECT, obj, output.valueReg());
2472 : else
2473 0 : masm.mov(obj, output.typedReg().gpr());
2474 :
2475 3 : return true;
2476 : }
2477 :
2478 : bool
2479 0 : CacheIRCompiler::emitLoadTypeOfObjectResult()
2480 : {
2481 0 : AutoOutputRegister output(*this);
2482 6 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2483 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2484 :
2485 12 : Label slowCheck, isObject, isCallable, isUndefined, done;
2486 0 : masm.typeOfObject(obj, scratch, &slowCheck, &isObject, &isCallable, &isUndefined);
2487 :
2488 0 : masm.bind(&isCallable);
2489 8 : masm.moveValue(StringValue(cx_->names().function), output.valueReg());
2490 0 : masm.jump(&done);
2491 :
2492 0 : masm.bind(&isUndefined);
2493 8 : masm.moveValue(StringValue(cx_->names().undefined), output.valueReg());
2494 0 : masm.jump(&done);
2495 :
2496 0 : masm.bind(&isObject);
2497 8 : masm.moveValue(StringValue(cx_->names().object), output.valueReg());
2498 4 : masm.jump(&done);
2499 :
2500 : {
2501 0 : masm.bind(&slowCheck);
2502 6 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2503 0 : masm.PushRegsInMask(save);
2504 :
2505 0 : masm.setupUnalignedABICall(scratch);
2506 0 : masm.passABIArg(obj);
2507 0 : masm.movePtr(ImmPtr(cx_->runtime()), scratch);
2508 0 : masm.passABIArg(scratch);
2509 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, TypeOfObject));
2510 0 : masm.mov(ReturnReg, scratch);
2511 :
2512 0 : LiveRegisterSet ignore;
2513 2 : ignore.add(scratch);
2514 0 : masm.PopRegsInMaskIgnore(save, ignore);
2515 :
2516 2 : masm.tagValue(JSVAL_TYPE_STRING, scratch, output.valueReg());
2517 : }
2518 :
2519 2 : masm.bind(&done);
2520 2 : return true;
2521 : }
2522 :
2523 : bool
2524 0 : CacheIRCompiler::emitLoadInt32TruthyResult()
2525 : {
2526 10 : AutoOutputRegister output(*this);
2527 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
2528 :
2529 0 : Label ifFalse, done;
2530 0 : masm.branchTestInt32Truthy(false, val, &ifFalse);
2531 10 : masm.moveValue(BooleanValue(true), output.valueReg());
2532 0 : masm.jump(&done);
2533 :
2534 5 : masm.bind(&ifFalse);
2535 0 : masm.moveValue(BooleanValue(false), output.valueReg());
2536 :
2537 5 : masm.bind(&done);
2538 5 : return true;
2539 : }
2540 :
2541 : bool
2542 0 : CacheIRCompiler::emitLoadStringTruthyResult()
2543 : {
2544 8 : AutoOutputRegister output(*this);
2545 0 : Register str = allocator.useRegister(masm, reader.stringOperandId());
2546 :
2547 0 : Label ifFalse, done;
2548 0 : masm.branch32(Assembler::Equal, Address(str, JSString::offsetOfLength()), Imm32(0), &ifFalse);
2549 8 : masm.moveValue(BooleanValue(true), output.valueReg());
2550 0 : masm.jump(&done);
2551 :
2552 4 : masm.bind(&ifFalse);
2553 0 : masm.moveValue(BooleanValue(false), output.valueReg());
2554 :
2555 4 : masm.bind(&done);
2556 4 : return true;
2557 : }
2558 :
2559 : bool
2560 0 : CacheIRCompiler::emitLoadDoubleTruthyResult()
2561 : {
2562 2 : AutoOutputRegister output(*this);
2563 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
2564 :
2565 4 : Label ifFalse, done, failurePopReg;
2566 :
2567 : // If we're compiling a Baseline IC, FloatReg0 is always available.
2568 1 : if (mode_ != Mode::Baseline)
2569 0 : masm.push(FloatReg0);
2570 :
2571 0 : masm.unboxDouble(val, FloatReg0);
2572 :
2573 0 : masm.branchTestDoubleTruthy(false, FloatReg0, &ifFalse);
2574 2 : masm.moveValue(BooleanValue(true), output.valueReg());
2575 0 : masm.jump(&done);
2576 :
2577 1 : masm.bind(&ifFalse);
2578 0 : masm.moveValue(BooleanValue(false), output.valueReg());
2579 :
2580 0 : if (mode_ != Mode::Baseline)
2581 0 : masm.pop(FloatReg0);
2582 1 : masm.bind(&done);
2583 1 : return true;
2584 : }
2585 :
2586 : bool
2587 0 : CacheIRCompiler::emitLoadObjectTruthyResult()
2588 : {
2589 0 : AutoOutputRegister output(*this);
2590 18 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2591 12 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2592 :
2593 :
2594 0 : Label emulatesUndefined, slowPath, done;
2595 0 : masm.branchIfObjectEmulatesUndefined(obj, scratch, &slowPath, &emulatesUndefined);
2596 12 : masm.moveValue(BooleanValue(true), output.valueReg());
2597 0 : masm.jump(&done);
2598 :
2599 0 : masm.bind(&emulatesUndefined);
2600 12 : masm.moveValue(BooleanValue(false), output.valueReg());
2601 0 : masm.jump(&done);
2602 :
2603 0 : masm.bind(&slowPath);
2604 0 : masm.setupUnalignedABICall(scratch);
2605 0 : masm.passABIArg(obj);
2606 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, js::EmulatesUndefined));
2607 0 : masm.convertBoolToInt32(ReturnReg, ReturnReg);
2608 6 : masm.xor32(Imm32(1), ReturnReg);
2609 0 : masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, output.valueReg());
2610 :
2611 6 : masm.bind(&done);
2612 6 : return true;
2613 : }
2614 :
2615 : bool
2616 0 : CacheIRCompiler::emitComparePointerResultShared(bool symbol)
2617 : {
2618 0 : AutoOutputRegister output(*this);
2619 :
2620 0 : Register left = symbol ? allocator.useRegister(masm, reader.symbolOperandId())
2621 15 : : allocator.useRegister(masm, reader.objOperandId());
2622 0 : Register right = symbol ? allocator.useRegister(masm, reader.symbolOperandId())
2623 15 : : allocator.useRegister(masm, reader.objOperandId());
2624 10 : JSOp op = reader.jsop();
2625 :
2626 10 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2627 :
2628 0 : Label ifTrue, done;
2629 0 : masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
2630 :
2631 10 : masm.moveValue(BooleanValue(false), output.valueReg());
2632 10 : masm.jump(&done);
2633 :
2634 0 : masm.bind(&ifTrue);
2635 10 : masm.moveValue(BooleanValue(true), output.valueReg());
2636 0 : masm.bind(&done);
2637 5 : return true;
2638 : }
2639 :
2640 :
2641 : bool
2642 0 : CacheIRCompiler::emitCompareObjectResult()
2643 : {
2644 0 : return emitComparePointerResultShared(false);
2645 : }
2646 :
2647 : bool
2648 0 : CacheIRCompiler::emitCompareSymbolResult()
2649 : {
2650 0 : return emitComparePointerResultShared(true);
2651 : }
2652 :
2653 : bool
2654 0 : CacheIRCompiler::emitCallPrintString()
2655 : {
2656 0 : const char* str = reinterpret_cast<char*>(reader.pointer());
2657 0 : masm.printf(str);
2658 0 : return true;
2659 : }
2660 :
2661 : bool
2662 0 : CacheIRCompiler::emitBreakpoint()
2663 : {
2664 0 : masm.breakpoint();
2665 0 : return true;
2666 : }
2667 :
2668 : void
2669 0 : CacheIRCompiler::emitStoreTypedObjectReferenceProp(ValueOperand val, ReferenceTypeDescr::Type type,
2670 : const Address& dest, Register scratch)
2671 : {
2672 : // Callers will post-barrier this store.
2673 :
2674 0 : switch (type) {
2675 : case ReferenceTypeDescr::TYPE_ANY:
2676 0 : EmitPreBarrier(masm, dest, MIRType::Value);
2677 0 : masm.storeValue(val, dest);
2678 0 : break;
2679 :
2680 : case ReferenceTypeDescr::TYPE_OBJECT: {
2681 0 : EmitPreBarrier(masm, dest, MIRType::Object);
2682 0 : Label isNull, done;
2683 0 : masm.branchTestObject(Assembler::NotEqual, val, &isNull);
2684 0 : masm.unboxObject(val, scratch);
2685 0 : masm.storePtr(scratch, dest);
2686 0 : masm.jump(&done);
2687 0 : masm.bind(&isNull);
2688 0 : masm.storePtr(ImmWord(0), dest);
2689 0 : masm.bind(&done);
2690 : break;
2691 : }
2692 :
2693 : case ReferenceTypeDescr::TYPE_STRING:
2694 0 : EmitPreBarrier(masm, dest, MIRType::String);
2695 0 : masm.unboxString(val, scratch);
2696 0 : masm.storePtr(scratch, dest);
2697 0 : break;
2698 : }
2699 0 : }
2700 :
2701 : void
2702 0 : CacheIRCompiler::emitRegisterEnumerator(Register enumeratorsList, Register iter, Register scratch)
2703 : {
2704 : // iter->next = list
2705 0 : masm.storePtr(enumeratorsList, Address(iter, NativeIterator::offsetOfNext()));
2706 :
2707 : // iter->prev = list->prev
2708 3 : masm.loadPtr(Address(enumeratorsList, NativeIterator::offsetOfPrev()), scratch);
2709 3 : masm.storePtr(scratch, Address(iter, NativeIterator::offsetOfPrev()));
2710 :
2711 : // list->prev->next = iter
2712 0 : masm.storePtr(iter, Address(scratch, NativeIterator::offsetOfNext()));
2713 :
2714 : // list->prev = ni
2715 0 : masm.storePtr(iter, Address(enumeratorsList, NativeIterator::offsetOfPrev()));
2716 3 : }
2717 :
2718 : void
2719 122 : CacheIRCompiler::emitPostBarrierShared(Register obj, const ConstantOrRegister& val,
2720 : Register scratch, Register maybeIndex)
2721 : {
2722 244 : if (!cx_->nursery().exists())
2723 0 : return;
2724 :
2725 122 : if (val.constant()) {
2726 0 : MOZ_ASSERT_IF(val.value().isGCThing(), !IsInsideNursery(val.value().toGCThing()));
2727 : return;
2728 : }
2729 :
2730 0 : TypedOrValueRegister reg = val.reg();
2731 122 : if (reg.hasTyped()) {
2732 33 : if (reg.type() != MIRType::Object && reg.type() != MIRType::String)
2733 : return;
2734 : }
2735 :
2736 244 : Label skipBarrier;
2737 0 : if (reg.hasValue()) {
2738 89 : masm.branchValueIsNurseryCell(Assembler::NotEqual, reg.valueReg(), scratch, &skipBarrier);
2739 : } else {
2740 0 : masm.branchPtrInNurseryChunk(Assembler::NotEqual, reg.typedReg().gpr(), scratch,
2741 0 : &skipBarrier);
2742 : }
2743 0 : masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier);
2744 :
2745 : // Call one of these, depending on maybeIndex:
2746 : //
2747 : // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
2748 : // void PostWriteElementBarrier(JSRuntime* rt, JSObject* obj,
2749 : // int32_t index);
2750 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2751 122 : masm.PushRegsInMask(save);
2752 122 : masm.setupUnalignedABICall(scratch);
2753 366 : masm.movePtr(ImmPtr(cx_->runtime()), scratch);
2754 0 : masm.passABIArg(scratch);
2755 0 : masm.passABIArg(obj);
2756 0 : if (maybeIndex != InvalidReg) {
2757 46 : masm.passABIArg(maybeIndex);
2758 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*,
2759 0 : (PostWriteElementBarrier<IndexInBounds::Yes>)));
2760 : } else {
2761 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));
2762 : }
2763 122 : masm.PopRegsInMask(save);
2764 :
2765 122 : masm.bind(&skipBarrier);
2766 : }
2767 :
2768 : bool
2769 0 : CacheIRCompiler::emitWrapResult()
2770 : {
2771 0 : AutoOutputRegister output(*this);
2772 0 : AutoScratchRegister scratch(allocator, masm);
2773 :
2774 : FailurePath* failure;
2775 0 : if (!addFailurePath(&failure))
2776 : return false;
2777 :
2778 98 : Label done;
2779 : // We only have to wrap objects, because we are in the same zone.
2780 98 : masm.branchTestObject(Assembler::NotEqual, output.valueReg(), &done);
2781 :
2782 98 : Register obj = output.valueReg().scratchReg();
2783 0 : masm.unboxObject(output.valueReg(), obj);
2784 :
2785 147 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2786 49 : masm.PushRegsInMask(save);
2787 :
2788 49 : masm.setupUnalignedABICall(scratch);
2789 0 : masm.loadJSContext(scratch);
2790 0 : masm.passABIArg(scratch);
2791 98 : masm.passABIArg(obj);
2792 49 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, WrapObjectPure));
2793 0 : masm.mov(ReturnReg, obj);
2794 :
2795 49 : LiveRegisterSet ignore;
2796 0 : ignore.add(obj);
2797 98 : masm.PopRegsInMaskIgnore(save, ignore);
2798 :
2799 : // We could not get a wrapper for this object.
2800 0 : masm.branchTestPtr(Assembler::Zero, obj, obj, failure->label());
2801 :
2802 : // We clobbered the output register, so we have to retag.
2803 0 : masm.tagValue(JSVAL_TYPE_OBJECT, obj, output.valueReg());
2804 :
2805 49 : masm.bind(&done);
2806 : return true;
2807 : }
2808 :
2809 : bool
2810 0 : CacheIRCompiler::emitMegamorphicLoadSlotByValueResult()
2811 : {
2812 28 : AutoOutputRegister output(*this);
2813 :
2814 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2815 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
2816 14 : bool handleMissing = reader.readBool();
2817 :
2818 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2819 :
2820 : FailurePath* failure;
2821 0 : if (!addFailurePath(&failure))
2822 : return false;
2823 :
2824 : // The object must be Native.
2825 28 : masm.branchIfNonNativeObj(obj, scratch, failure->label());
2826 :
2827 : // idVal will be in vp[0], result will be stored in vp[1].
2828 0 : masm.reserveStack(sizeof(Value));
2829 14 : masm.Push(idVal);
2830 0 : masm.moveStackPtrTo(idVal.scratchReg());
2831 :
2832 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2833 0 : volatileRegs.takeUnchecked(scratch);
2834 0 : volatileRegs.takeUnchecked(idVal);
2835 14 : masm.PushRegsInMask(volatileRegs);
2836 :
2837 14 : masm.setupUnalignedABICall(scratch);
2838 14 : masm.loadJSContext(scratch);
2839 0 : masm.passABIArg(scratch);
2840 28 : masm.passABIArg(obj);
2841 28 : masm.passABIArg(idVal.scratchReg());
2842 14 : if (handleMissing)
2843 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataPropertyByValue<true>)));
2844 : else
2845 13 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataPropertyByValue<false>)));
2846 0 : masm.mov(ReturnReg, scratch);
2847 0 : masm.PopRegsInMask(volatileRegs);
2848 :
2849 14 : masm.Pop(idVal);
2850 :
2851 0 : Label ok;
2852 0 : uint32_t framePushed = masm.framePushed();
2853 0 : masm.branchIfTrueBool(scratch, &ok);
2854 14 : masm.adjustStack(sizeof(Value));
2855 0 : masm.jump(failure->label());
2856 :
2857 0 : masm.bind(&ok);
2858 0 : if (JitOptions.spectreJitToCxxCalls)
2859 0 : masm.speculationBarrier();
2860 0 : masm.setFramePushed(framePushed);
2861 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
2862 14 : masm.adjustStack(sizeof(Value));
2863 : return true;
2864 : }
2865 :
2866 : bool
2867 0 : CacheIRCompiler::emitMegamorphicHasPropResult()
2868 : {
2869 0 : AutoOutputRegister output(*this);
2870 :
2871 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2872 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
2873 0 : bool hasOwn = reader.readBool();
2874 :
2875 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2876 :
2877 : FailurePath* failure;
2878 0 : if (!addFailurePath(&failure))
2879 : return false;
2880 :
2881 : // idVal will be in vp[0], result will be stored in vp[1].
2882 24 : masm.reserveStack(sizeof(Value));
2883 12 : masm.Push(idVal);
2884 24 : masm.moveStackPtrTo(idVal.scratchReg());
2885 :
2886 36 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2887 0 : volatileRegs.takeUnchecked(scratch);
2888 12 : volatileRegs.takeUnchecked(idVal);
2889 0 : masm.PushRegsInMask(volatileRegs);
2890 :
2891 0 : masm.setupUnalignedABICall(scratch);
2892 12 : masm.loadJSContext(scratch);
2893 0 : masm.passABIArg(scratch);
2894 24 : masm.passABIArg(obj);
2895 24 : masm.passABIArg(idVal.scratchReg());
2896 0 : if (hasOwn)
2897 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, HasNativeDataProperty<true>));
2898 : else
2899 10 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, HasNativeDataProperty<false>));
2900 0 : masm.mov(ReturnReg, scratch);
2901 0 : masm.PopRegsInMask(volatileRegs);
2902 :
2903 12 : masm.Pop(idVal);
2904 :
2905 0 : Label ok;
2906 0 : uint32_t framePushed = masm.framePushed();
2907 0 : masm.branchIfTrueBool(scratch, &ok);
2908 12 : masm.adjustStack(sizeof(Value));
2909 0 : masm.jump(failure->label());
2910 :
2911 0 : masm.bind(&ok);
2912 0 : masm.setFramePushed(framePushed);
2913 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
2914 0 : masm.adjustStack(sizeof(Value));
2915 : return true;
2916 : }
2917 :
2918 : bool
2919 0 : CacheIRCompiler::emitCallObjectHasSparseElementResult()
2920 : {
2921 0 : AutoOutputRegister output(*this);
2922 :
2923 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2924 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2925 :
2926 0 : AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
2927 0 : AutoScratchRegister scratch2(allocator, masm);
2928 :
2929 : FailurePath* failure;
2930 0 : if (!addFailurePath(&failure))
2931 : return false;
2932 :
2933 0 : masm.reserveStack(sizeof(Value));
2934 0 : masm.moveStackPtrTo(scratch2.get());
2935 :
2936 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2937 0 : volatileRegs.takeUnchecked(scratch1);
2938 0 : volatileRegs.takeUnchecked(index);
2939 0 : masm.PushRegsInMask(volatileRegs);
2940 :
2941 0 : masm.setupUnalignedABICall(scratch1);
2942 0 : masm.loadJSContext(scratch1);
2943 0 : masm.passABIArg(scratch1);
2944 0 : masm.passABIArg(obj);
2945 0 : masm.passABIArg(index);
2946 0 : masm.passABIArg(scratch2);
2947 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, HasNativeElement));
2948 0 : masm.mov(ReturnReg, scratch1);
2949 0 : masm.PopRegsInMask(volatileRegs);
2950 :
2951 0 : Label ok;
2952 0 : uint32_t framePushed = masm.framePushed();
2953 0 : masm.branchIfTrueBool(scratch1, &ok);
2954 0 : masm.adjustStack(sizeof(Value));
2955 0 : masm.jump(failure->label());
2956 :
2957 0 : masm.bind(&ok);
2958 0 : masm.setFramePushed(framePushed);
2959 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
2960 0 : masm.adjustStack(sizeof(Value));
2961 : return true;
2962 : }
2963 :
2964 : /*
2965 : * Move a constant value into register dest.
2966 : */
2967 0 : void CacheIRCompiler::emitLoadStubFieldConstant(StubFieldOffset val, Register dest) {
2968 33 : MOZ_ASSERT(mode_ == Mode::Ion);
2969 0 : switch (val.getStubFieldType()) {
2970 : case StubField::Type::Shape:
2971 0 : masm.movePtr(ImmGCPtr(shapeStubField(val.getOffset())),dest);
2972 0 : break;
2973 : case StubField::Type::String:
2974 6 : masm.movePtr(ImmGCPtr(stringStubField(val.getOffset())), dest);
2975 0 : break;
2976 : case StubField::Type::ObjectGroup:
2977 0 : masm.movePtr(ImmGCPtr(groupStubField(val.getOffset())), dest);
2978 0 : break;
2979 : case StubField::Type::JSObject:
2980 60 : masm.movePtr(ImmGCPtr(objectStubField(val.getOffset())), dest);
2981 30 : break;
2982 : default:
2983 0 : MOZ_CRASH("Unhandled stub field constant type");
2984 : }
2985 0 : }
2986 :
2987 : /*
2988 : * After this is done executing, dest contains the value; either through a constant load
2989 : * or through the load from the stub data.
2990 : *
2991 : * The current policy is that Baseline will use loads from the stub data (to allow IC
2992 : * sharing), where as Ion doesn't share ICs, and so we can safely use constants in the
2993 : * IC.
2994 : */
2995 0 : void CacheIRCompiler::emitLoadStubField(StubFieldOffset val, Register dest) {
2996 0 : if (stubFieldPolicy_ == StubFieldPolicy::Constant) {
2997 33 : emitLoadStubFieldConstant(val, dest);
2998 : } else {
2999 0 : Address load(ICStubReg, stubDataOffset_ + val.getOffset());
3000 80 : masm.loadPtr(load, dest);
3001 : }
3002 113 : }
3003 :
3004 : bool
3005 4 : CacheIRCompiler::emitLoadInstanceOfObjectResult()
3006 : {
3007 8 : AutoOutputRegister output(*this);
3008 8 : ValueOperand lhs = allocator.useValueRegister(masm, reader.valOperandId());
3009 12 : Register proto = allocator.useRegister(masm, reader.objOperandId());
3010 :
3011 8 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
3012 :
3013 : FailurePath* failure;
3014 0 : if (!addFailurePath(&failure))
3015 : return false;
3016 :
3017 0 : Label returnFalse, returnTrue, done;
3018 0 : masm.branchTestObject(Assembler::NotEqual, lhs, &returnFalse);
3019 :
3020 : // LHS is an object. Load its proto.
3021 8 : masm.unboxObject(lhs, scratch);
3022 4 : masm.loadObjProto(scratch, scratch);
3023 : {
3024 : // Walk the proto chain until we either reach the target object,
3025 : // nullptr or LazyProto.
3026 0 : Label loop;
3027 0 : masm.bind(&loop);
3028 :
3029 0 : masm.branchPtr(Assembler::Equal, scratch, proto, &returnTrue);
3030 4 : masm.branchTestPtr(Assembler::Zero, scratch, scratch, &returnFalse);
3031 :
3032 0 : MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
3033 12 : masm.branchPtr(Assembler::Equal, scratch, ImmWord(1), failure->label());
3034 :
3035 0 : masm.loadObjProto(scratch, scratch);
3036 0 : masm.jump(&loop);
3037 : }
3038 :
3039 :
3040 0 : masm.bind(&returnFalse);
3041 4 : EmitStoreBoolean(masm, false, output);
3042 8 : masm.jump(&done);
3043 :
3044 0 : masm.bind(&returnTrue);
3045 0 : EmitStoreBoolean(masm, true, output);
3046 : //fallthrough
3047 0 : masm.bind(&done);
3048 : return true;
3049 : }
3050 :
3051 : bool
3052 6 : CacheIRCompiler::emitMegamorphicLoadSlotResult()
3053 : {
3054 0 : AutoOutputRegister output(*this);
3055 :
3056 18 : Register obj = allocator.useRegister(masm, reader.objOperandId());
3057 18 : StubFieldOffset name(reader.stubOffset(), StubField::Type::String);
3058 0 : bool handleMissing = reader.readBool();
3059 :
3060 0 : AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
3061 12 : AutoScratchRegister scratch2(allocator, masm);
3062 0 : AutoScratchRegister scratch3(allocator, masm);
3063 :
3064 : FailurePath* failure;
3065 0 : if (!addFailurePath(&failure))
3066 : return false;
3067 :
3068 : // The object must be Native.
3069 12 : masm.branchIfNonNativeObj(obj, scratch3, failure->label());
3070 :
3071 6 : masm.Push(UndefinedValue());
3072 0 : masm.moveStackPtrTo(scratch3.get());
3073 :
3074 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
3075 0 : volatileRegs.takeUnchecked(scratch1);
3076 0 : volatileRegs.takeUnchecked(scratch2);
3077 12 : volatileRegs.takeUnchecked(scratch3);
3078 0 : masm.PushRegsInMask(volatileRegs);
3079 :
3080 0 : masm.setupUnalignedABICall(scratch1);
3081 6 : masm.loadJSContext(scratch1);
3082 12 : masm.passABIArg(scratch1);
3083 0 : masm.passABIArg(obj);
3084 6 : emitLoadStubField(name, scratch2);
3085 12 : masm.passABIArg(scratch2);
3086 12 : masm.passABIArg(scratch3);
3087 0 : if (handleMissing)
3088 1 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataProperty<true>)));
3089 : else
3090 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataProperty<false>)));
3091 12 : masm.mov(ReturnReg, scratch2);
3092 0 : masm.PopRegsInMask(volatileRegs);
3093 :
3094 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
3095 0 : masm.adjustStack(sizeof(Value));
3096 :
3097 18 : masm.branchIfFalseBool(scratch2, failure->label());
3098 0 : if (JitOptions.spectreJitToCxxCalls)
3099 0 : masm.speculationBarrier();
3100 :
3101 : return true;
3102 : }
3103 :
3104 : bool
3105 0 : CacheIRCompiler::emitMegamorphicStoreSlot()
3106 : {
3107 3 : Register obj = allocator.useRegister(masm, reader.objOperandId());
3108 0 : StubFieldOffset name(reader.stubOffset(), StubField::Type::String);
3109 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
3110 0 : bool needsTypeBarrier = reader.readBool();
3111 :
3112 0 : AutoScratchRegister scratch1(allocator, masm);
3113 0 : AutoScratchRegister scratch2(allocator, masm);
3114 :
3115 : FailurePath* failure;
3116 0 : if (!addFailurePath(&failure))
3117 : return false;
3118 :
3119 1 : masm.Push(val);
3120 2 : masm.moveStackPtrTo(val.scratchReg());
3121 :
3122 3 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
3123 0 : volatileRegs.takeUnchecked(scratch1);
3124 2 : volatileRegs.takeUnchecked(scratch2);
3125 0 : volatileRegs.takeUnchecked(val);
3126 0 : masm.PushRegsInMask(volatileRegs);
3127 :
3128 1 : masm.setupUnalignedABICall(scratch1);
3129 1 : masm.loadJSContext(scratch1);
3130 0 : masm.passABIArg(scratch1);
3131 2 : masm.passABIArg(obj);
3132 1 : emitLoadStubField(name, scratch2);
3133 0 : masm.passABIArg(scratch2);
3134 0 : masm.passABIArg(val.scratchReg());
3135 0 : if (needsTypeBarrier)
3136 1 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (SetNativeDataProperty<true>)));
3137 : else
3138 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (SetNativeDataProperty<false>)));
3139 0 : masm.mov(ReturnReg, scratch1);
3140 1 : masm.PopRegsInMask(volatileRegs);
3141 :
3142 0 : masm.loadValue(Address(masm.getStackPointer(), 0), val);
3143 0 : masm.adjustStack(sizeof(Value));
3144 :
3145 : masm.branchIfFalseBool(scratch1, failure->label());
3146 : return true;
3147 : }
3148 :
3149 : bool
3150 : CacheIRCompiler::emitGuardGroupHasUnanalyzedNewScript()
3151 : {
3152 : StubFieldOffset group(reader.stubOffset(), StubField::Type::ObjectGroup);
3153 : AutoScratchRegister scratch1(allocator, masm);
3154 : AutoScratchRegister scratch2(allocator, masm);
3155 :
3156 : FailurePath* failure;
3157 : if (!addFailurePath(&failure))
3158 : return false;
3159 :
3160 : emitLoadStubField(group, scratch1);
3161 : masm.guardGroupHasUnanalyzedNewScript(scratch1, scratch2, failure->label());
3162 : return true;
3163 : }
3164 :
3165 : bool
3166 : CacheIRCompiler::emitLoadObject()
3167 : {
3168 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
3169 : StubFieldOffset obj(reader.stubOffset(), StubField::Type::JSObject);
3170 : emitLoadStubField(obj, reg);
3171 : return true;
3172 : }
|