Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "mozilla/DebugOnly.h"
8 : #include "mozilla/Maybe.h"
9 :
10 : #include "jit/BaselineIC.h"
11 : #include "jit/CacheIRCompiler.h"
12 : #include "jit/IonIC.h"
13 : #include "jit/JSJitFrameIter.h"
14 : #include "jit/Linker.h"
15 : #include "jit/SharedICHelpers.h"
16 : #include "jit/VMFunctions.h"
17 : #include "proxy/DeadObjectProxy.h"
18 : #include "proxy/Proxy.h"
19 :
20 : #include "jit/JSJitFrameIter-inl.h"
21 : #include "jit/MacroAssembler-inl.h"
22 : #include "vm/Realm-inl.h"
23 : #include "vm/TypeInference-inl.h"
24 :
25 : using namespace js;
26 : using namespace js::jit;
27 :
28 : using mozilla::DebugOnly;
29 : using mozilla::Maybe;
30 :
31 : namespace js {
32 : namespace jit {
33 :
34 : // IonCacheIRCompiler compiles CacheIR to IonIC native code.
35 1647 : class MOZ_RAII IonCacheIRCompiler : public CacheIRCompiler
36 : {
37 : public:
38 : friend class AutoSaveLiveRegisters;
39 :
40 549 : IonCacheIRCompiler(JSContext* cx, const CacheIRWriter& writer, IonIC* ic, IonScript* ionScript,
41 : IonICStub* stub, const PropertyTypeCheckInfo* typeCheckInfo, uint32_t stubDataOffset)
42 549 : : CacheIRCompiler(cx, writer, stubDataOffset, Mode::Ion, StubFieldPolicy::Constant),
43 : writer_(writer),
44 : ic_(ic),
45 : ionScript_(ionScript),
46 : stub_(stub),
47 : typeCheckInfo_(typeCheckInfo),
48 : #ifdef DEBUG
49 : calledPrepareVMCall_(false),
50 : #endif
51 2745 : savedLiveRegs_(false)
52 : {
53 0 : MOZ_ASSERT(ic_);
54 0 : MOZ_ASSERT(ionScript_);
55 549 : }
56 :
57 : MOZ_MUST_USE bool init();
58 : JitCode* compile();
59 :
60 : private:
61 : const CacheIRWriter& writer_;
62 : IonIC* ic_;
63 : IonScript* ionScript_;
64 :
65 : // The stub we're generating code for.
66 : IonICStub* stub_;
67 :
68 : // Information necessary to generate property type checks. Non-null iff
69 : // this is a SetProp/SetElem stub.
70 : const PropertyTypeCheckInfo* typeCheckInfo_;
71 :
72 : CodeOffsetJump rejoinOffset_;
73 : Vector<CodeOffset, 4, SystemAllocPolicy> nextCodeOffsets_;
74 : Maybe<LiveRegisterSet> liveRegs_;
75 : Maybe<CodeOffset> stubJitCodeOffset_;
76 :
77 : #ifdef DEBUG
78 : bool calledPrepareVMCall_;
79 : #endif
80 : bool savedLiveRegs_;
81 :
82 :
83 : template <typename T>
84 : T rawWordStubField(uint32_t offset) {
85 : static_assert(sizeof(T) == sizeof(uintptr_t), "T must have word size");
86 0 : return (T)readStubWord(offset, StubField::Type::RawWord);
87 : }
88 : template <typename T>
89 : T rawInt64StubField(uint32_t offset) {
90 : static_assert(sizeof(T) == sizeof(int64_t), "T musthave int64 size");
91 : return (T)readStubInt64(offset, StubField::Type::RawInt64);
92 : }
93 :
94 0 : uint64_t* expandoGenerationStubFieldPtr(uint32_t offset) {
95 : DebugOnly<uint64_t> generation =
96 0 : readStubInt64(offset, StubField::Type::DOMExpandoGeneration);
97 0 : uint64_t* ptr = reinterpret_cast<uint64_t*>(stub_->stubDataStart() + offset);
98 0 : MOZ_ASSERT(*ptr == generation);
99 0 : return ptr;
100 : }
101 :
102 : void prepareVMCall(MacroAssembler& masm);
103 : MOZ_MUST_USE bool callVM(MacroAssembler& masm, const VMFunction& fun);
104 :
105 : MOZ_MUST_USE bool emitAddAndStoreSlotShared(CacheOp op);
106 :
107 : bool needsPostBarrier() const {
108 138 : return ic_->asSetPropertyIC()->needsPostBarrier();
109 : }
110 :
111 0 : void pushStubCodePointer() {
112 0 : stubJitCodeOffset_.emplace(masm.PushWithPatch(ImmPtr((void*)-1)));
113 84 : }
114 :
115 : #define DEFINE_OP(op) MOZ_MUST_USE bool emit##op();
116 : CACHE_IR_OPS(DEFINE_OP)
117 : #undef DEFINE_OP
118 : };
119 :
120 : // AutoSaveLiveRegisters must be used when we make a call that can GC. The
121 : // constructor ensures all live registers are stored on the stack (where the GC
122 : // expects them) and the destructor restores these registers.
123 : class MOZ_RAII AutoSaveLiveRegisters
124 : {
125 : IonCacheIRCompiler& compiler_;
126 :
127 : AutoSaveLiveRegisters(const AutoSaveLiveRegisters&) = delete;
128 : void operator=(const AutoSaveLiveRegisters&) = delete;
129 :
130 : public:
131 0 : explicit AutoSaveLiveRegisters(IonCacheIRCompiler& compiler)
132 84 : : compiler_(compiler)
133 : {
134 0 : MOZ_ASSERT(compiler_.liveRegs_.isSome());
135 0 : compiler_.allocator.saveIonLiveRegisters(compiler_.masm,
136 0 : compiler_.liveRegs_.ref(),
137 0 : compiler_.ic_->scratchRegisterForEntryJump(),
138 0 : compiler_.ionScript_);
139 0 : compiler_.savedLiveRegs_ = true;
140 0 : }
141 0 : ~AutoSaveLiveRegisters() {
142 0 : MOZ_ASSERT(compiler_.stubJitCodeOffset_.isSome(), "Must have pushed JitCode* pointer");
143 0 : compiler_.allocator.restoreIonLiveRegisters(compiler_.masm, compiler_.liveRegs_.ref());
144 0 : MOZ_ASSERT(compiler_.masm.framePushed() == compiler_.ionScript_->frameSize());
145 84 : }
146 : };
147 :
148 : } // namespace jit
149 : } // namespace js
150 :
151 : #define DEFINE_SHARED_OP(op) \
152 : bool IonCacheIRCompiler::emit##op() { return CacheIRCompiler::emit##op(); }
153 1461 : CACHE_IR_SHARED_OPS(DEFINE_SHARED_OP)
154 : #undef DEFINE_SHARED_OP
155 :
156 : void
157 84 : CacheRegisterAllocator::saveIonLiveRegisters(MacroAssembler& masm, LiveRegisterSet liveRegs,
158 : Register scratch, IonScript* ionScript)
159 : {
160 : // We have to push all registers in liveRegs on the stack. It's possible we
161 : // stored other values in our live registers and stored operands on the
162 : // stack (where our live registers should go), so this requires some careful
163 : // work. Try to keep it simple by taking one small step at a time.
164 :
165 : // Step 1. Discard any dead operands so we can reuse their registers.
166 84 : freeDeadOperandLocations(masm);
167 :
168 : // Step 2. Figure out the size of our live regs.
169 : size_t sizeOfLiveRegsInBytes =
170 0 : liveRegs.gprs().size() * sizeof(intptr_t) +
171 252 : liveRegs.fpus().getPushSizeInBytes();
172 :
173 84 : MOZ_ASSERT(sizeOfLiveRegsInBytes > 0);
174 :
175 : // Step 3. Ensure all non-input operands are on the stack.
176 0 : size_t numInputs = writer_.numInputOperands();
177 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
178 0 : OperandLocation& loc = operandLocations_[i];
179 0 : if (loc.isInRegister())
180 0 : spillOperandToStack(masm, &loc);
181 : }
182 :
183 : // Step 4. Restore the register state, but don't discard the stack as
184 : // non-input operands are stored there.
185 84 : restoreInputState(masm, /* shouldDiscardStack = */ false);
186 :
187 : // We just restored the input state, so no input operands should be stored
188 : // on the stack.
189 : #ifdef DEBUG
190 0 : for (size_t i = 0; i < numInputs; i++) {
191 0 : const OperandLocation& loc = operandLocations_[i];
192 214 : MOZ_ASSERT(!loc.isOnStack());
193 : }
194 : #endif
195 :
196 : // Step 5. At this point our register state is correct. Stack values,
197 : // however, may cover the space where we have to store the live registers.
198 : // Move them out of the way.
199 :
200 : bool hasOperandOnStack = false;
201 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
202 0 : OperandLocation& loc = operandLocations_[i];
203 16 : if (!loc.isOnStack())
204 : continue;
205 :
206 0 : hasOperandOnStack = true;
207 :
208 0 : size_t operandSize = loc.stackSizeInBytes();
209 0 : size_t operandStackPushed = loc.stackPushed();
210 0 : MOZ_ASSERT(operandSize > 0);
211 0 : MOZ_ASSERT(stackPushed_ >= operandStackPushed);
212 0 : MOZ_ASSERT(operandStackPushed >= operandSize);
213 :
214 : // If this operand doesn't cover the live register space, there's
215 : // nothing to do.
216 0 : if (operandStackPushed - operandSize >= sizeOfLiveRegsInBytes) {
217 0 : MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
218 : continue;
219 : }
220 :
221 : // Reserve stack space for the live registers if needed.
222 0 : if (sizeOfLiveRegsInBytes > stackPushed_) {
223 0 : size_t extraBytes = sizeOfLiveRegsInBytes - stackPushed_;
224 0 : MOZ_ASSERT((extraBytes % sizeof(uintptr_t)) == 0);
225 0 : masm.subFromStackPtr(Imm32(extraBytes));
226 0 : stackPushed_ += extraBytes;
227 : }
228 :
229 : // Push the operand below the live register space.
230 0 : if (loc.kind() == OperandLocation::PayloadStack) {
231 0 : masm.push(Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
232 0 : stackPushed_ += operandSize;
233 0 : loc.setPayloadStack(stackPushed_, loc.payloadType());
234 : continue;
235 : }
236 0 : MOZ_ASSERT(loc.kind() == OperandLocation::ValueStack);
237 0 : masm.pushValue(Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
238 0 : stackPushed_ += operandSize;
239 0 : loc.setValueStack(stackPushed_);
240 : }
241 :
242 : // Step 6. If we have any operands on the stack, adjust their stackPushed
243 : // values to not include sizeOfLiveRegsInBytes (this simplifies code down
244 : // the line). Then push/store the live registers.
245 0 : if (hasOperandOnStack) {
246 0 : MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
247 0 : stackPushed_ -= sizeOfLiveRegsInBytes;
248 :
249 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
250 0 : OperandLocation& loc = operandLocations_[i];
251 0 : if (loc.isOnStack())
252 0 : loc.adjustStackPushed(-int32_t(sizeOfLiveRegsInBytes));
253 : }
254 :
255 0 : size_t stackBottom = stackPushed_ + sizeOfLiveRegsInBytes;
256 0 : masm.storeRegsInMask(liveRegs, Address(masm.getStackPointer(), stackBottom), scratch);
257 0 : masm.setFramePushed(masm.framePushed() + sizeOfLiveRegsInBytes);
258 : } else {
259 : // If no operands are on the stack, discard the unused stack space.
260 0 : if (stackPushed_ > 0) {
261 0 : masm.addToStackPtr(Imm32(stackPushed_));
262 1 : stackPushed_ = 0;
263 : }
264 84 : masm.PushRegsInMask(liveRegs);
265 : }
266 0 : freePayloadSlots_.clear();
267 84 : freeValueSlots_.clear();
268 :
269 84 : MOZ_ASSERT(masm.framePushed() == ionScript->frameSize() + sizeOfLiveRegsInBytes);
270 :
271 : // Step 7. All live registers and non-input operands are stored on the stack
272 : // now, so at this point all registers except for the input registers are
273 : // available.
274 0 : availableRegs_.set() = GeneralRegisterSet::Not(inputRegisterSet());
275 84 : availableRegsAfterSpill_.set() = GeneralRegisterSet();
276 :
277 : // Step 8. We restored our input state, so we have to fix up aliased input
278 : // registers again.
279 0 : fixupAliasedInputs(masm);
280 84 : }
281 :
282 : void
283 0 : CacheRegisterAllocator::restoreIonLiveRegisters(MacroAssembler& masm, LiveRegisterSet liveRegs)
284 : {
285 84 : masm.PopRegsInMask(liveRegs);
286 :
287 0 : availableRegs_.set() = GeneralRegisterSet();
288 1 : availableRegsAfterSpill_.set() = GeneralRegisterSet::All();
289 0 : }
290 :
291 : static void*
292 84 : GetReturnAddressToIonCode(JSContext* cx)
293 : {
294 0 : JSJitFrameIter frame(cx->activation()->asJit());
295 84 : MOZ_ASSERT(frame.type() == JitFrame_Exit,
296 : "An exit frame is expected as update functions are called with a VMFunction.");
297 :
298 168 : void* returnAddr = frame.returnAddress();
299 : #ifdef DEBUG
300 0 : ++frame;
301 84 : MOZ_ASSERT(frame.isIonJS());
302 : #endif
303 84 : return returnAddr;
304 : }
305 :
306 : void
307 8 : IonCacheIRCompiler::prepareVMCall(MacroAssembler& masm)
308 : {
309 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
310 0 : IonICCallFrameLayout::Size());
311 0 : pushStubCodePointer();
312 0 : masm.Push(Imm32(descriptor));
313 8 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
314 :
315 : #ifdef DEBUG
316 8 : calledPrepareVMCall_ = true;
317 : #endif
318 8 : }
319 :
320 : bool
321 8 : IonCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
322 : {
323 8 : MOZ_ASSERT(calledPrepareVMCall_);
324 :
325 24 : TrampolinePtr code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
326 :
327 16 : uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*);
328 : uint32_t descriptor = MakeFrameDescriptor(frameSize, JitFrame_IonICCall,
329 0 : ExitFrameLayout::Size());
330 0 : masm.Push(Imm32(descriptor));
331 8 : masm.callJit(code);
332 :
333 : // Remove rest of the frame left on the stack. We remove the return address
334 : // which is implicitly poped when returning.
335 8 : int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
336 :
337 : // Pop arguments from framePushed.
338 0 : masm.implicitPop(frameSize + framePop);
339 0 : masm.freeStack(IonICCallFrameLayout::Size());
340 8 : return true;
341 : }
342 :
343 : bool
344 549 : IonCacheIRCompiler::init()
345 : {
346 549 : if (!allocator.init())
347 : return false;
348 :
349 549 : size_t numInputs = writer_.numInputOperands();
350 :
351 549 : AllocatableGeneralRegisterSet available;
352 :
353 549 : switch (ic_->kind()) {
354 : case CacheKind::GetProp:
355 : case CacheKind::GetElem: {
356 0 : IonGetPropertyIC* ic = ic_->asGetPropertyIC();
357 796 : TypedOrValueRegister output = ic->output();
358 :
359 0 : if (output.hasValue())
360 0 : available.add(output.valueReg());
361 0 : else if (!output.typedReg().isFloat())
362 6 : available.add(output.typedReg().gpr());
363 :
364 0 : if (ic->maybeTemp() != InvalidReg)
365 0 : available.add(ic->maybeTemp());
366 :
367 0 : liveRegs_.emplace(ic->liveRegs());
368 398 : outputUnchecked_.emplace(output);
369 :
370 796 : allowDoubleResult_.emplace(ic->allowDoubleResult());
371 :
372 398 : MOZ_ASSERT(numInputs == 1 || numInputs == 2);
373 :
374 0 : allocator.initInputLocation(0, ic->value());
375 0 : if (numInputs > 1)
376 360 : allocator.initInputLocation(1, ic->id());
377 : break;
378 : }
379 : case CacheKind::GetPropSuper:
380 : case CacheKind::GetElemSuper: {
381 0 : IonGetPropSuperIC* ic = ic_->asGetPropSuperIC();
382 4 : TypedOrValueRegister output = ic->output();
383 :
384 4 : available.add(output.valueReg());
385 :
386 0 : liveRegs_.emplace(ic->liveRegs());
387 2 : outputUnchecked_.emplace(output);
388 :
389 2 : allowDoubleResult_.emplace(true);
390 :
391 2 : MOZ_ASSERT(numInputs == 2 || numInputs == 3);
392 :
393 2 : allocator.initInputLocation(0, ic->object(), JSVAL_TYPE_OBJECT);
394 :
395 0 : if (ic->kind() == CacheKind::GetPropSuper) {
396 0 : MOZ_ASSERT(numInputs == 2);
397 4 : allocator.initInputLocation(1, ic->receiver());
398 : } else {
399 0 : MOZ_ASSERT(numInputs == 3);
400 0 : allocator.initInputLocation(1, ic->id());
401 0 : allocator.initInputLocation(2, ic->receiver());
402 : }
403 : break;
404 : }
405 : case CacheKind::SetProp:
406 : case CacheKind::SetElem: {
407 76 : IonSetPropertyIC* ic = ic_->asSetPropertyIC();
408 :
409 76 : available.add(ic->temp());
410 :
411 76 : liveRegs_.emplace(ic->liveRegs());
412 :
413 76 : allocator.initInputLocation(0, ic->object(), JSVAL_TYPE_OBJECT);
414 :
415 0 : if (ic->kind() == CacheKind::SetProp) {
416 0 : MOZ_ASSERT(numInputs == 2);
417 12 : allocator.initInputLocation(1, ic->rhs());
418 : } else {
419 0 : MOZ_ASSERT(numInputs == 3);
420 0 : allocator.initInputLocation(1, ic->id());
421 140 : allocator.initInputLocation(2, ic->rhs());
422 : }
423 : break;
424 : }
425 : case CacheKind::GetName: {
426 0 : IonGetNameIC* ic = ic_->asGetNameIC();
427 0 : ValueOperand output = ic->output();
428 :
429 0 : available.add(output);
430 0 : available.add(ic->temp());
431 :
432 0 : liveRegs_.emplace(ic->liveRegs());
433 0 : outputUnchecked_.emplace(output);
434 :
435 0 : MOZ_ASSERT(numInputs == 1);
436 0 : allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
437 : break;
438 : }
439 : case CacheKind::BindName: {
440 0 : IonBindNameIC* ic = ic_->asBindNameIC();
441 0 : Register output = ic->output();
442 :
443 0 : available.add(output);
444 0 : available.add(ic->temp());
445 :
446 0 : liveRegs_.emplace(ic->liveRegs());
447 0 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Object, AnyRegister(output)));
448 :
449 0 : MOZ_ASSERT(numInputs == 1);
450 0 : allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
451 : break;
452 : }
453 : case CacheKind::GetIterator: {
454 0 : IonGetIteratorIC* ic = ic_->asGetIteratorIC();
455 0 : Register output = ic->output();
456 :
457 0 : available.add(output);
458 0 : available.add(ic->temp1());
459 0 : available.add(ic->temp2());
460 :
461 0 : liveRegs_.emplace(ic->liveRegs());
462 0 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Object, AnyRegister(output)));
463 :
464 0 : MOZ_ASSERT(numInputs == 1);
465 0 : allocator.initInputLocation(0, ic->value());
466 : break;
467 : }
468 : case CacheKind::In: {
469 0 : IonInIC* ic = ic_->asInIC();
470 66 : Register output = ic->output();
471 :
472 66 : available.add(output);
473 :
474 0 : liveRegs_.emplace(ic->liveRegs());
475 132 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
476 :
477 0 : MOZ_ASSERT(numInputs == 2);
478 0 : allocator.initInputLocation(0, ic->key());
479 0 : allocator.initInputLocation(1, TypedOrValueRegister(MIRType::Object,
480 66 : AnyRegister(ic->object())));
481 : break;
482 : }
483 : case CacheKind::HasOwn: {
484 0 : IonHasOwnIC* ic = ic_->asHasOwnIC();
485 7 : Register output = ic->output();
486 :
487 7 : available.add(output);
488 :
489 0 : liveRegs_.emplace(ic->liveRegs());
490 14 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
491 :
492 0 : MOZ_ASSERT(numInputs == 2);
493 0 : allocator.initInputLocation(0, ic->id());
494 14 : allocator.initInputLocation(1, ic->value());
495 : break;
496 : }
497 : case CacheKind::InstanceOf: {
498 0 : IonInstanceOfIC* ic = ic_->asInstanceOfIC();
499 0 : Register output = ic->output();
500 0 : available.add(output);
501 0 : liveRegs_.emplace(ic->liveRegs());
502 0 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
503 :
504 0 : MOZ_ASSERT(numInputs == 2);
505 0 : allocator.initInputLocation(0, ic->lhs());
506 0 : allocator.initInputLocation(1, TypedOrValueRegister(MIRType::Object,
507 0 : AnyRegister(ic->rhs())));
508 : break;
509 : }
510 : case CacheKind::UnaryArith: {
511 0 : IonUnaryArithIC *ic = ic_->asUnaryArithIC();
512 0 : ValueOperand output = ic->output();
513 :
514 0 : available.add(output);
515 :
516 0 : liveRegs_.emplace(ic->liveRegs());
517 0 : outputUnchecked_.emplace(TypedOrValueRegister(output));
518 :
519 0 : MOZ_ASSERT(numInputs == 1);
520 0 : allocator.initInputLocation(0, ic->input());
521 : break;
522 : }
523 : case CacheKind::Call:
524 : case CacheKind::Compare:
525 : case CacheKind::TypeOf:
526 : case CacheKind::ToBool:
527 : case CacheKind::GetIntrinsic:
528 0 : MOZ_CRASH("Unsupported IC");
529 : }
530 :
531 0 : if (liveRegs_)
532 2196 : liveFloatRegs_ = LiveFloatRegisterSet(liveRegs_->fpus());
533 :
534 0 : allocator.initAvailableRegs(available);
535 0 : allocator.initAvailableRegsAfterSpill();
536 549 : return true;
537 : }
538 :
539 : JitCode*
540 549 : IonCacheIRCompiler::compile()
541 : {
542 0 : masm.setFramePushed(ionScript_->frameSize());
543 0 : if (cx_->runtime()->geckoProfiler().enabled())
544 0 : masm.enableProfilingInstrumentation();
545 :
546 549 : allocator.fixupAliasedInputs(masm);
547 :
548 0 : do {
549 6832 : switch (reader.readOp()) {
550 : #define DEFINE_OP(op) \
551 : case CacheOp::op: \
552 : if (!emit##op()) \
553 : return nullptr; \
554 : break;
555 1839 : CACHE_IR_OPS(DEFINE_OP)
556 : #undef DEFINE_OP
557 :
558 : default:
559 0 : MOZ_CRASH("Invalid op");
560 : }
561 :
562 0 : allocator.nextOp();
563 3416 : } while (reader.more());
564 :
565 1098 : MOZ_RELEASE_ASSERT(nextStubField_ == writer_.numStubFields());
566 :
567 549 : masm.assumeUnreachable("Should have returned from IC");
568 :
569 : // Done emitting the main IC code. Now emit the failure paths.
570 0 : for (size_t i = 0; i < failurePaths.length(); i++) {
571 1 : if (!emitFailurePath(i))
572 0 : return nullptr;
573 0 : Register scratch = ic_->scratchRegisterForEntryJump();
574 0 : CodeOffset offset = masm.movWithPatch(ImmWord(-1), scratch);
575 0 : masm.jump(Address(scratch, 0));
576 706 : if (!nextCodeOffsets_.append(offset))
577 : return nullptr;
578 : }
579 :
580 0 : Linker linker(masm);
581 0 : AutoFlushICache afc("getStubCode");
582 0 : Rooted<JitCode*> newStubCode(cx_, linker.newCode(cx_, CodeKind::Ion));
583 0 : if (!newStubCode) {
584 0 : cx_->recoverFromOutOfMemory();
585 0 : return nullptr;
586 : }
587 :
588 0 : rejoinOffset_.fixup(&masm);
589 0 : CodeLocationJump rejoinJump(newStubCode, rejoinOffset_);
590 549 : PatchJump(rejoinJump, ic_->rejoinLabel());
591 :
592 0 : for (CodeOffset offset : nextCodeOffsets_) {
593 0 : Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, offset),
594 0 : ImmPtr(stub_->nextCodeRawPtr()),
595 706 : ImmPtr((void*)-1));
596 : }
597 0 : if (stubJitCodeOffset_) {
598 0 : Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, *stubJitCodeOffset_),
599 0 : ImmPtr(newStubCode.get()),
600 84 : ImmPtr((void*)-1));
601 : }
602 :
603 549 : return newStubCode;
604 : }
605 :
606 : bool
607 588 : IonCacheIRCompiler::emitGuardShape()
608 : {
609 0 : ObjOperandId objId = reader.objOperandId();
610 0 : Register obj = allocator.useRegister(masm, objId);
611 1176 : Shape* shape = shapeStubField(reader.stubOffset());
612 :
613 588 : bool needSpectreMitigations = objectGuardNeedsSpectreMitigations(objId);
614 :
615 0 : Maybe<AutoScratchRegister> maybeScratch;
616 0 : if (needSpectreMitigations)
617 419 : maybeScratch.emplace(allocator, masm);
618 :
619 : FailurePath* failure;
620 588 : if (!addFailurePath(&failure))
621 : return false;
622 :
623 0 : if (needSpectreMitigations) {
624 0 : masm.branchTestObjShape(Assembler::NotEqual, obj, shape, *maybeScratch, obj,
625 419 : failure->label());
626 : } else {
627 0 : masm.branchTestObjShapeNoSpectreMitigations(Assembler::NotEqual, obj, shape,
628 169 : failure->label());
629 : }
630 :
631 : return true;
632 : }
633 :
634 : bool
635 117 : IonCacheIRCompiler::emitGuardGroup()
636 : {
637 0 : ObjOperandId objId = reader.objOperandId();
638 0 : Register obj = allocator.useRegister(masm, objId);
639 234 : ObjectGroup* group = groupStubField(reader.stubOffset());
640 :
641 117 : bool needSpectreMitigations = objectGuardNeedsSpectreMitigations(objId);
642 :
643 0 : Maybe<AutoScratchRegister> maybeScratch;
644 0 : if (needSpectreMitigations)
645 113 : maybeScratch.emplace(allocator, masm);
646 :
647 : FailurePath* failure;
648 117 : if (!addFailurePath(&failure))
649 : return false;
650 :
651 0 : if (needSpectreMitigations) {
652 0 : masm.branchTestObjGroup(Assembler::NotEqual, obj, group, *maybeScratch, obj,
653 113 : failure->label());
654 : } else {
655 0 : masm.branchTestObjGroupNoSpectreMitigations(Assembler::NotEqual, obj, group,
656 4 : failure->label());
657 : }
658 :
659 : return true;
660 : }
661 :
662 : bool
663 0 : IonCacheIRCompiler::emitGuardProto()
664 : {
665 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
666 0 : JSObject* proto = objectStubField(reader.stubOffset());
667 :
668 0 : AutoScratchRegister scratch(allocator, masm);
669 :
670 : FailurePath* failure;
671 0 : if (!addFailurePath(&failure))
672 : return false;
673 :
674 0 : masm.loadObjProto(obj, scratch);
675 0 : masm.branchPtr(Assembler::NotEqual, scratch, ImmGCPtr(proto), failure->label());
676 0 : return true;
677 : }
678 :
679 : bool
680 40 : IonCacheIRCompiler::emitGuardCompartment()
681 : {
682 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
683 0 : JSObject* globalWrapper = objectStubField(reader.stubOffset());
684 0 : JS::Compartment* compartment = compartmentStubField(reader.stubOffset());
685 80 : AutoScratchRegister scratch(allocator, masm);
686 :
687 : FailurePath* failure;
688 40 : if (!addFailurePath(&failure))
689 : return false;
690 :
691 : // Verify that the global wrapper is still valid, as
692 : // it is pre-requisite for doing the compartment check.
693 0 : masm.movePtr(ImmGCPtr(globalWrapper), scratch);
694 0 : Address handlerAddr(scratch, ProxyObject::offsetOfHandler());
695 120 : masm.branchPtr(Assembler::Equal, handlerAddr, ImmPtr(&DeadObjectProxy::singleton), failure->label());
696 :
697 0 : masm.branchTestObjCompartment(Assembler::NotEqual, obj, compartment, scratch,
698 0 : failure->label());
699 40 : return true;
700 : }
701 :
702 : bool
703 0 : IonCacheIRCompiler::emitGuardAnyClass()
704 : {
705 0 : ObjOperandId objId = reader.objOperandId();
706 0 : Register obj = allocator.useRegister(masm, objId);
707 0 : AutoScratchRegister scratch(allocator, masm);
708 :
709 0 : const Class* clasp = classStubField(reader.stubOffset());
710 :
711 : FailurePath* failure;
712 0 : if (!addFailurePath(&failure))
713 : return false;
714 :
715 0 : if (objectGuardNeedsSpectreMitigations(objId)) {
716 0 : masm.branchTestObjClass(Assembler::NotEqual, obj, clasp, scratch, obj, failure->label());
717 : } else {
718 0 : masm.branchTestObjClassNoSpectreMitigations(Assembler::NotEqual, obj, clasp, scratch,
719 0 : failure->label());
720 : }
721 :
722 : return true;
723 : }
724 :
725 : bool
726 40 : IonCacheIRCompiler::emitGuardHasProxyHandler()
727 : {
728 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
729 80 : const void* handler = proxyHandlerStubField(reader.stubOffset());
730 :
731 : FailurePath* failure;
732 40 : if (!addFailurePath(&failure))
733 : return false;
734 :
735 0 : Address handlerAddr(obj, ProxyObject::offsetOfHandler());
736 0 : masm.branchPtr(Assembler::NotEqual, handlerAddr, ImmPtr(handler), failure->label());
737 40 : return true;
738 : }
739 :
740 : bool
741 0 : IonCacheIRCompiler::emitGuardSpecificObject()
742 : {
743 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
744 0 : JSObject* expected = objectStubField(reader.stubOffset());
745 :
746 : FailurePath* failure;
747 0 : if (!addFailurePath(&failure))
748 : return false;
749 :
750 0 : masm.branchPtr(Assembler::NotEqual, obj, ImmGCPtr(expected), failure->label());
751 0 : return true;
752 : }
753 :
754 : bool
755 225 : IonCacheIRCompiler::emitGuardSpecificAtom()
756 : {
757 0 : Register str = allocator.useRegister(masm, reader.stringOperandId());
758 450 : AutoScratchRegister scratch(allocator, masm);
759 :
760 450 : JSAtom* atom = &stringStubField(reader.stubOffset())->asAtom();
761 :
762 : FailurePath* failure;
763 225 : if (!addFailurePath(&failure))
764 : return false;
765 :
766 0 : Label done;
767 450 : masm.branchPtr(Assembler::Equal, str, ImmGCPtr(atom), &done);
768 :
769 : // The pointers are not equal, so if the input string is also an atom it
770 : // must be a different string.
771 0 : masm.branchTest32(Assembler::Zero, Address(str, JSString::offsetOfFlags()),
772 225 : Imm32(JSString::NON_ATOM_BIT), failure->label());
773 :
774 : // Check the length.
775 0 : masm.branch32(Assembler::NotEqual, Address(str, JSString::offsetOfLength()),
776 450 : Imm32(atom->length()), failure->label());
777 :
778 : // We have a non-atomized string with the same length. Call a helper
779 : // function to do the comparison.
780 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
781 225 : masm.PushRegsInMask(volatileRegs);
782 :
783 0 : masm.setupUnalignedABICall(scratch);
784 0 : masm.movePtr(ImmGCPtr(atom), scratch);
785 0 : masm.passABIArg(scratch);
786 0 : masm.passABIArg(str);
787 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, EqualStringsHelper));
788 450 : masm.mov(ReturnReg, scratch);
789 :
790 0 : LiveRegisterSet ignore;
791 0 : ignore.add(scratch);
792 0 : masm.PopRegsInMaskIgnore(volatileRegs, ignore);
793 675 : masm.branchIfFalseBool(scratch, failure->label());
794 :
795 225 : masm.bind(&done);
796 : return true;
797 : }
798 :
799 : bool
800 16 : IonCacheIRCompiler::emitGuardSpecificSymbol()
801 : {
802 0 : Register sym = allocator.useRegister(masm, reader.symbolOperandId());
803 32 : JS::Symbol* expected = symbolStubField(reader.stubOffset());
804 :
805 : FailurePath* failure;
806 16 : if (!addFailurePath(&failure))
807 : return false;
808 :
809 0 : masm.branchPtr(Assembler::NotEqual, sym, ImmGCPtr(expected), failure->label());
810 16 : return true;
811 : }
812 :
813 : bool
814 0 : IonCacheIRCompiler::emitGuardXrayExpandoShapeAndDefaultProto()
815 : {
816 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
817 0 : bool hasExpando = reader.readBool();
818 0 : JSObject* shapeWrapper = objectStubField(reader.stubOffset());
819 0 : MOZ_ASSERT(hasExpando == !!shapeWrapper);
820 :
821 0 : AutoScratchRegister scratch(allocator, masm);
822 0 : Maybe<AutoScratchRegister> scratch2, scratch3;
823 0 : if (hasExpando) {
824 0 : scratch2.emplace(allocator, masm);
825 0 : scratch3.emplace(allocator, masm);
826 : }
827 :
828 : FailurePath* failure;
829 0 : if (!addFailurePath(&failure))
830 : return false;
831 :
832 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), scratch);
833 0 : Address holderAddress(scratch, sizeof(Value) * GetXrayJitInfo()->xrayHolderSlot);
834 0 : Address expandoAddress(scratch, NativeObject::getFixedSlotOffset(GetXrayJitInfo()->holderExpandoSlot));
835 :
836 0 : if (hasExpando) {
837 0 : masm.branchTestObject(Assembler::NotEqual, holderAddress, failure->label());
838 0 : masm.unboxObject(holderAddress, scratch);
839 0 : masm.branchTestObject(Assembler::NotEqual, expandoAddress, failure->label());
840 0 : masm.unboxObject(expandoAddress, scratch);
841 :
842 : // Unwrap the expando before checking its shape.
843 0 : masm.loadPtr(Address(scratch, ProxyObject::offsetOfReservedSlots()), scratch);
844 0 : masm.unboxObject(Address(scratch, detail::ProxyReservedSlots::offsetOfPrivateSlot()), scratch);
845 :
846 0 : masm.movePtr(ImmGCPtr(shapeWrapper), scratch2.ref());
847 0 : LoadShapeWrapperContents(masm, scratch2.ref(), scratch2.ref(), failure->label());
848 0 : masm.branchTestObjShape(Assembler::NotEqual, scratch, *scratch2, *scratch3, scratch,
849 0 : failure->label());
850 :
851 : // The reserved slots on the expando should all be in fixed slots.
852 0 : Address protoAddress(scratch, NativeObject::getFixedSlotOffset(GetXrayJitInfo()->expandoProtoSlot));
853 0 : masm.branchTestUndefined(Assembler::NotEqual, protoAddress, failure->label());
854 : } else {
855 0 : Label done;
856 0 : masm.branchTestObject(Assembler::NotEqual, holderAddress, &done);
857 0 : masm.unboxObject(holderAddress, scratch);
858 0 : masm.branchTestObject(Assembler::Equal, expandoAddress, failure->label());
859 0 : masm.bind(&done);
860 : }
861 :
862 : return true;
863 : }
864 :
865 : bool
866 0 : IonCacheIRCompiler::emitGuardFunctionPrototype()
867 : {
868 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
869 0 : Register prototypeObject = allocator.useRegister(masm, reader.objOperandId());
870 :
871 : // Allocate registers before the failure path to make sure they're registered
872 : // by addFailurePath.
873 0 : AutoScratchRegister scratch1(allocator, masm);
874 0 : AutoScratchRegister scratch2(allocator, masm);
875 :
876 : FailurePath* failure;
877 0 : if (!addFailurePath(&failure))
878 : return false;
879 :
880 : // Guard on the .prototype object.
881 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch1);
882 0 : uintptr_t slot = readStubWord(reader.stubOffset(), StubField::Type::RawWord);
883 0 : masm.move32(Imm32(slot), scratch2);
884 0 : BaseValueIndex prototypeSlot(scratch1, scratch2);
885 0 : masm.branchTestObject(Assembler::NotEqual, prototypeSlot, failure->label());
886 0 : masm.unboxObject(prototypeSlot, scratch1);
887 0 : masm.branchPtr(Assembler::NotEqual,
888 : prototypeObject,
889 0 : scratch1, failure->label());
890 :
891 0 : return true;
892 : }
893 :
894 : bool
895 0 : IonCacheIRCompiler::emitLoadValueResult()
896 : {
897 0 : MOZ_CRASH("Baseline-specific op");
898 : }
899 :
900 :
901 : bool
902 84 : IonCacheIRCompiler::emitLoadFixedSlotResult()
903 : {
904 0 : AutoOutputRegister output(*this);
905 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
906 0 : int32_t offset = int32StubField(reader.stubOffset());
907 0 : masm.loadTypedOrValue(Address(obj, offset), output);
908 84 : return true;
909 : }
910 :
911 : bool
912 89 : IonCacheIRCompiler::emitLoadDynamicSlotResult()
913 : {
914 0 : AutoOutputRegister output(*this);
915 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
916 178 : int32_t offset = int32StubField(reader.stubOffset());
917 :
918 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
919 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
920 0 : masm.loadTypedOrValue(Address(scratch, offset), output);
921 178 : return true;
922 : }
923 :
924 : bool
925 12 : IonCacheIRCompiler::emitGuardHasGetterSetter()
926 : {
927 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
928 0 : Shape* shape = shapeStubField(reader.stubOffset());
929 :
930 24 : AutoScratchRegister scratch1(allocator, masm);
931 0 : AutoScratchRegister scratch2(allocator, masm);
932 :
933 : FailurePath* failure;
934 12 : if (!addFailurePath(&failure))
935 : return false;
936 :
937 48 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
938 0 : volatileRegs.takeUnchecked(scratch1);
939 0 : volatileRegs.takeUnchecked(scratch2);
940 12 : masm.PushRegsInMask(volatileRegs);
941 :
942 0 : masm.setupUnalignedABICall(scratch1);
943 0 : masm.loadJSContext(scratch1);
944 0 : masm.passABIArg(scratch1);
945 0 : masm.passABIArg(obj);
946 24 : masm.movePtr(ImmGCPtr(shape), scratch2);
947 0 : masm.passABIArg(scratch2);
948 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, ObjectHasGetterSetter));
949 0 : masm.mov(ReturnReg, scratch1);
950 0 : masm.PopRegsInMask(volatileRegs);
951 :
952 0 : masm.branchIfFalseBool(scratch1, failure->label());
953 0 : return true;
954 : }
955 :
956 : bool
957 0 : IonCacheIRCompiler::emitCallScriptedGetterResult()
958 : {
959 0 : AutoSaveLiveRegisters save(*this);
960 72 : AutoOutputRegister output(*this);
961 :
962 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
963 72 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
964 0 : AutoScratchRegister scratch(allocator, masm);
965 :
966 36 : allocator.discardStack(masm);
967 :
968 36 : uint32_t framePushedBefore = masm.framePushed();
969 :
970 : // Construct IonICCallFrameLayout.
971 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
972 0 : IonICCallFrameLayout::Size());
973 36 : pushStubCodePointer();
974 0 : masm.Push(Imm32(descriptor));
975 0 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
976 :
977 : // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
978 : // so we just have to make sure the stack is aligned after we push the
979 : // |this| + argument Values.
980 72 : uint32_t argSize = (target->nargs() + 1) * sizeof(Value);
981 0 : uint32_t padding = ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
982 0 : MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
983 0 : MOZ_ASSERT(padding < JitStackAlignment);
984 0 : masm.reserveStack(padding);
985 :
986 0 : for (size_t i = 0; i < target->nargs(); i++)
987 0 : masm.Push(UndefinedValue());
988 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
989 :
990 0 : masm.movePtr(ImmGCPtr(target), scratch);
991 :
992 0 : descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonICCall,
993 0 : JitFrameLayout::Size());
994 0 : masm.Push(Imm32(0)); // argc
995 36 : masm.Push(scratch);
996 0 : masm.Push(Imm32(descriptor));
997 :
998 : // Check stack alignment. Add sizeof(uintptr_t) for the return address.
999 36 : MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
1000 :
1001 : // The getter currently has a jit entry or a non-lazy script. We will only
1002 : // relazify when we do a shrinking GC and when that happens we will also
1003 : // purge IC stubs.
1004 0 : MOZ_ASSERT(target->hasJitEntry());
1005 36 : masm.loadJitCodeRaw(scratch, scratch);
1006 0 : masm.callJit(scratch);
1007 0 : masm.storeCallResultValue(output);
1008 :
1009 36 : masm.freeStack(masm.framePushed() - framePushedBefore);
1010 0 : return true;
1011 : }
1012 :
1013 : bool
1014 1 : IonCacheIRCompiler::emitCallNativeGetterResult()
1015 : {
1016 0 : AutoSaveLiveRegisters save(*this);
1017 0 : AutoOutputRegister output(*this);
1018 :
1019 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1020 2 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
1021 1 : MOZ_ASSERT(target->isNative());
1022 :
1023 2 : AutoScratchRegister argJSContext(allocator, masm);
1024 0 : AutoScratchRegister argUintN(allocator, masm);
1025 0 : AutoScratchRegister argVp(allocator, masm);
1026 0 : AutoScratchRegister scratch(allocator, masm);
1027 :
1028 0 : allocator.discardStack(masm);
1029 :
1030 : // Native functions have the signature:
1031 : // bool (*)(JSContext*, unsigned, Value* vp)
1032 : // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
1033 : // are the function arguments.
1034 :
1035 : // Construct vp array:
1036 : // Push object value for |this|
1037 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
1038 : // Push callee/outparam.
1039 0 : masm.Push(ObjectValue(*target));
1040 :
1041 : // Preload arguments into registers.
1042 1 : masm.loadJSContext(argJSContext);
1043 0 : masm.move32(Imm32(0), argUintN);
1044 2 : masm.moveStackPtrTo(argVp.get());
1045 :
1046 : // Push marking data for later use.
1047 1 : masm.Push(argUintN);
1048 0 : pushStubCodePointer();
1049 :
1050 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
1051 : return false;
1052 2 : masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLNative);
1053 :
1054 : // Construct and execute call.
1055 1 : masm.setupUnalignedABICall(scratch);
1056 2 : masm.passABIArg(argJSContext);
1057 2 : masm.passABIArg(argUintN);
1058 0 : masm.passABIArg(argVp);
1059 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, target->native()), MoveOp::GENERAL,
1060 0 : CheckUnsafeCallWithABI::DontCheckHasExitFrame);
1061 :
1062 : // Test for failure.
1063 0 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
1064 :
1065 : // Load the outparam vp[0] into output register(s).
1066 2 : Address outparam(masm.getStackPointer(), IonOOLNativeExitFrameLayout::offsetOfResult());
1067 0 : masm.loadValue(outparam, output.valueReg());
1068 :
1069 0 : if (JitOptions.spectreJitToCxxCalls)
1070 0 : masm.speculationBarrier();
1071 :
1072 0 : masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0));
1073 1 : return true;
1074 : }
1075 :
1076 : bool
1077 39 : IonCacheIRCompiler::emitCallProxyGetResult()
1078 : {
1079 78 : AutoSaveLiveRegisters save(*this);
1080 78 : AutoOutputRegister output(*this);
1081 :
1082 117 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1083 0 : jsid id = idStubField(reader.stubOffset());
1084 :
1085 : // ProxyGetProperty(JSContext* cx, HandleObject proxy, HandleId id,
1086 : // MutableHandleValue vp)
1087 0 : AutoScratchRegisterMaybeOutput argJSContext(allocator, masm, output);
1088 0 : AutoScratchRegister argProxy(allocator, masm);
1089 78 : AutoScratchRegister argId(allocator, masm);
1090 78 : AutoScratchRegister argVp(allocator, masm);
1091 0 : AutoScratchRegister scratch(allocator, masm);
1092 :
1093 39 : allocator.discardStack(masm);
1094 :
1095 : // Push stubCode for marking.
1096 0 : pushStubCodePointer();
1097 :
1098 : // Push args on stack first so we can take pointers to make handles.
1099 0 : masm.Push(UndefinedValue());
1100 0 : masm.moveStackPtrTo(argVp.get());
1101 :
1102 0 : masm.Push(id, scratch);
1103 0 : masm.moveStackPtrTo(argId.get());
1104 :
1105 : // Push the proxy. Also used as receiver.
1106 39 : masm.Push(obj);
1107 0 : masm.moveStackPtrTo(argProxy.get());
1108 :
1109 39 : masm.loadJSContext(argJSContext);
1110 :
1111 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
1112 : return false;
1113 0 : masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLProxy);
1114 :
1115 : // Make the call.
1116 0 : masm.setupUnalignedABICall(scratch);
1117 0 : masm.passABIArg(argJSContext);
1118 78 : masm.passABIArg(argProxy);
1119 78 : masm.passABIArg(argId);
1120 78 : masm.passABIArg(argVp);
1121 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, ProxyGetProperty), MoveOp::GENERAL,
1122 39 : CheckUnsafeCallWithABI::DontCheckHasExitFrame);
1123 :
1124 : // Test for failure.
1125 117 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
1126 :
1127 : // Load the outparam vp[0] into output register(s).
1128 78 : Address outparam(masm.getStackPointer(), IonOOLProxyExitFrameLayout::offsetOfResult());
1129 39 : masm.loadValue(outparam, output.valueReg());
1130 :
1131 : // Spectre mitigation in case of speculative execution within C++ code.
1132 0 : if (JitOptions.spectreJitToCxxCalls)
1133 0 : masm.speculationBarrier();
1134 :
1135 : // masm.leaveExitFrame & pop locals
1136 39 : masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
1137 0 : return true;
1138 : }
1139 :
1140 : typedef bool (*ProxyGetPropertyByValueFn)(JSContext*, HandleObject, HandleValue, MutableHandleValue);
1141 1 : static const VMFunction ProxyGetPropertyByValueInfo =
1142 3 : FunctionInfo<ProxyGetPropertyByValueFn>(ProxyGetPropertyByValue, "ProxyGetPropertyByValue");
1143 :
1144 : bool
1145 1 : IonCacheIRCompiler::emitCallProxyGetByValueResult()
1146 : {
1147 0 : AutoSaveLiveRegisters save(*this);
1148 2 : AutoOutputRegister output(*this);
1149 :
1150 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1151 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
1152 :
1153 0 : allocator.discardStack(masm);
1154 :
1155 0 : prepareVMCall(masm);
1156 :
1157 0 : masm.Push(idVal);
1158 1 : masm.Push(obj);
1159 :
1160 0 : if (!callVM(masm, ProxyGetPropertyByValueInfo))
1161 : return false;
1162 :
1163 0 : masm.storeCallResultValue(output);
1164 0 : return true;
1165 : }
1166 :
1167 : typedef bool (*ProxyHasFn)(JSContext*, HandleObject, HandleValue, MutableHandleValue);
1168 2 : static const VMFunction ProxyHasInfo = FunctionInfo<ProxyHasFn>(ProxyHas, "ProxyHas");
1169 :
1170 : typedef bool (*ProxyHasOwnFn)(JSContext*, HandleObject, HandleValue, MutableHandleValue);
1171 2 : static const VMFunction ProxyHasOwnInfo = FunctionInfo<ProxyHasOwnFn>(ProxyHasOwn, "ProxyHasOwn");
1172 :
1173 : bool
1174 0 : IonCacheIRCompiler::emitCallProxyHasPropResult()
1175 : {
1176 0 : AutoSaveLiveRegisters save(*this);
1177 0 : AutoOutputRegister output(*this);
1178 :
1179 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1180 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
1181 0 : bool hasOwn = reader.readBool();
1182 :
1183 0 : allocator.discardStack(masm);
1184 :
1185 0 : prepareVMCall(masm);
1186 :
1187 0 : masm.Push(idVal);
1188 0 : masm.Push(obj);
1189 :
1190 0 : if (hasOwn) {
1191 0 : if (!callVM(masm, ProxyHasOwnInfo))
1192 : return false;
1193 : } else {
1194 0 : if (!callVM(masm, ProxyHasInfo))
1195 : return false;
1196 : }
1197 :
1198 0 : masm.storeCallResultValue(output);
1199 0 : return true;
1200 : }
1201 :
1202 : bool
1203 42 : IonCacheIRCompiler::emitLoadUnboxedPropertyResult()
1204 : {
1205 84 : AutoOutputRegister output(*this);
1206 126 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1207 :
1208 0 : JSValueType fieldType = reader.valueType();
1209 84 : int32_t fieldOffset = int32StubField(reader.stubOffset());
1210 126 : masm.loadUnboxedProperty(Address(obj, fieldOffset), fieldType, output);
1211 42 : return true;
1212 : }
1213 :
1214 : bool
1215 0 : IonCacheIRCompiler::emitGuardFrameHasNoArgumentsObject()
1216 : {
1217 0 : MOZ_CRASH("Baseline-specific op");
1218 : }
1219 :
1220 : bool
1221 0 : IonCacheIRCompiler::emitLoadFrameCalleeResult()
1222 : {
1223 0 : MOZ_CRASH("Baseline-specific op");
1224 : }
1225 :
1226 : bool
1227 0 : IonCacheIRCompiler::emitLoadFrameNumActualArgsResult()
1228 : {
1229 0 : MOZ_CRASH("Baseline-specific op");
1230 : }
1231 :
1232 : bool
1233 0 : IonCacheIRCompiler::emitLoadFrameArgumentResult()
1234 : {
1235 0 : MOZ_CRASH("Baseline-specific op");
1236 : }
1237 :
1238 : bool
1239 0 : IonCacheIRCompiler::emitLoadEnvironmentFixedSlotResult()
1240 : {
1241 0 : AutoOutputRegister output(*this);
1242 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1243 0 : int32_t offset = int32StubField(reader.stubOffset());
1244 :
1245 : FailurePath* failure;
1246 0 : if (!addFailurePath(&failure))
1247 : return false;
1248 :
1249 : // Check for uninitialized lexicals.
1250 0 : Address slot(obj, offset);
1251 0 : masm.branchTestMagic(Assembler::Equal, slot, failure->label());
1252 :
1253 : // Load the value.
1254 0 : masm.loadTypedOrValue(slot, output);
1255 0 : return true;
1256 : }
1257 :
1258 : bool
1259 0 : IonCacheIRCompiler::emitLoadEnvironmentDynamicSlotResult()
1260 : {
1261 0 : AutoOutputRegister output(*this);
1262 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1263 0 : int32_t offset = int32StubField(reader.stubOffset());
1264 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1265 :
1266 : FailurePath* failure;
1267 0 : if (!addFailurePath(&failure))
1268 : return false;
1269 :
1270 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
1271 :
1272 : // Check for uninitialized lexicals.
1273 0 : Address slot(scratch, offset);
1274 0 : masm.branchTestMagic(Assembler::Equal, slot, failure->label());
1275 :
1276 : // Load the value.
1277 0 : masm.loadTypedOrValue(slot, output);
1278 0 : return true;
1279 : }
1280 :
1281 :
1282 : bool
1283 0 : IonCacheIRCompiler::emitLoadStringResult()
1284 : {
1285 0 : MOZ_CRASH("not used in ion");
1286 : }
1287 :
1288 : typedef bool (*StringSplitHelperFn)(JSContext*, HandleString, HandleString, HandleObjectGroup,
1289 : uint32_t limit, MutableHandleValue);
1290 0 : static const VMFunction StringSplitHelperInfo =
1291 3 : FunctionInfo<StringSplitHelperFn>(StringSplitHelper, "StringSplitHelper");
1292 :
1293 : bool
1294 0 : IonCacheIRCompiler::emitCallStringSplitResult()
1295 : {
1296 0 : AutoSaveLiveRegisters save(*this);
1297 0 : AutoOutputRegister output(*this);
1298 :
1299 0 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1300 0 : Register sep = allocator.useRegister(masm, reader.stringOperandId());
1301 0 : ObjectGroup* group = groupStubField(reader.stubOffset());
1302 :
1303 0 : allocator.discardStack(masm);
1304 :
1305 0 : prepareVMCall(masm);
1306 :
1307 0 : masm.Push(str);
1308 0 : masm.Push(sep);
1309 0 : masm.Push(ImmGCPtr(group));
1310 0 : masm.Push(Imm32(INT32_MAX));
1311 :
1312 0 : if (!callVM(masm, StringSplitHelperInfo))
1313 : return false;
1314 :
1315 0 : masm.storeCallResultValue(output);
1316 0 : return true;
1317 : }
1318 :
1319 : bool
1320 0 : IonCacheIRCompiler::emitCompareStringResult()
1321 : {
1322 0 : AutoOutputRegister output(*this);
1323 :
1324 0 : Register left = allocator.useRegister(masm, reader.stringOperandId());
1325 0 : Register right = allocator.useRegister(masm, reader.stringOperandId());
1326 0 : JSOp op = reader.jsop();
1327 :
1328 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1329 :
1330 : FailurePath* failure;
1331 0 : if (!addFailurePath(&failure))
1332 : return false;
1333 :
1334 0 : allocator.discardStack(masm);
1335 :
1336 0 : Label slow, done;
1337 0 : masm.compareStrings(op, left, right, scratch, &slow);
1338 :
1339 0 : masm.jump(&done);
1340 0 : masm.bind(&slow);
1341 :
1342 0 : prepareVMCall(masm);
1343 0 : masm.Push(right);
1344 0 : masm.Push(left);
1345 :
1346 0 : if (!callVM(masm, (op == JSOP_EQ || op == JSOP_STRICTEQ) ?
1347 : StringsEqualInfo :
1348 : StringsNotEqualInfo))
1349 : {
1350 : return false;
1351 : }
1352 :
1353 0 : masm.storeCallBoolResult(scratch);
1354 0 : masm.bind(&done);
1355 :
1356 0 : masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, output.valueReg());
1357 0 : return true;
1358 : }
1359 :
1360 : static bool
1361 23 : GroupHasPropertyTypes(ObjectGroup* group, jsid* id, Value* v)
1362 : {
1363 46 : AutoUnsafeCallWithABI unsafe;
1364 0 : if (group->unknownPropertiesDontCheckGeneration())
1365 : return true;
1366 0 : HeapTypeSet* propTypes = group->maybeGetPropertyDontCheckGeneration(*id);
1367 0 : if (!propTypes)
1368 : return true;
1369 0 : if (!propTypes->nonConstantProperty())
1370 : return false;
1371 23 : return propTypes->hasType(TypeSet::GetValueType(*v));
1372 : }
1373 :
1374 : static void
1375 69 : EmitCheckPropertyTypes(MacroAssembler& masm, const PropertyTypeCheckInfo* typeCheckInfo,
1376 : Register obj, const ConstantOrRegister& val,
1377 : const LiveRegisterSet& liveRegs, Label* failures)
1378 : {
1379 : // Emit code to check |val| is part of the property's HeapTypeSet.
1380 :
1381 69 : if (!typeCheckInfo->isSet())
1382 19 : return;
1383 :
1384 0 : ObjectGroup* group = typeCheckInfo->group();
1385 0 : AutoSweepObjectGroup sweep(group);
1386 63 : if (group->unknownProperties(sweep))
1387 0 : return;
1388 :
1389 0 : jsid id = typeCheckInfo->id();
1390 0 : HeapTypeSet* propTypes = group->maybeGetProperty(sweep, id);
1391 102 : if (propTypes && propTypes->unknown())
1392 : return;
1393 :
1394 : // Use the object register as scratch, as we don't need it here.
1395 50 : masm.Push(obj);
1396 50 : Register scratch1 = obj;
1397 :
1398 : // We may also need a scratch register for guardTypeSet. Additionally,
1399 : // spectreRegToZero is the register that may be zeroed on speculatively
1400 : // executed paths.
1401 50 : Register objScratch = InvalidReg;
1402 50 : Register spectreRegToZero = InvalidReg;
1403 100 : if (propTypes && !propTypes->unknownObject() && propTypes->getObjectCount() > 0) {
1404 0 : AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
1405 0 : if (!val.constant()) {
1406 0 : TypedOrValueRegister valReg = val.reg();
1407 0 : if (valReg.hasValue()) {
1408 0 : regs.take(valReg.valueReg());
1409 0 : spectreRegToZero = valReg.valueReg().payloadOrValueReg();
1410 0 : } else if (!valReg.typedReg().isFloat()) {
1411 0 : regs.take(valReg.typedReg().gpr());
1412 0 : spectreRegToZero = valReg.typedReg().gpr();
1413 : }
1414 : }
1415 0 : regs.take(scratch1);
1416 25 : objScratch = regs.takeAny();
1417 25 : masm.Push(objScratch);
1418 : }
1419 :
1420 0 : bool checkTypeSet = true;
1421 100 : Label failedFastPath;
1422 :
1423 0 : if (propTypes && !propTypes->nonConstantProperty())
1424 0 : masm.jump(&failedFastPath);
1425 :
1426 0 : if (val.constant()) {
1427 : // If the input is a constant, then don't bother if the barrier will always fail.
1428 0 : if (!propTypes || !propTypes->hasType(TypeSet::GetValueType(val.value())))
1429 0 : masm.jump(&failedFastPath);
1430 : checkTypeSet = false;
1431 : } else {
1432 : // We can do the same trick as above for primitive types of specialized
1433 : // registers.
1434 50 : TypedOrValueRegister reg = val.reg();
1435 50 : if (reg.hasTyped() && reg.type() != MIRType::Object) {
1436 0 : JSValueType valType = ValueTypeFromMIRType(reg.type());
1437 0 : if (!propTypes || !propTypes->hasType(TypeSet::PrimitiveType(valType)))
1438 0 : masm.jump(&failedFastPath);
1439 : checkTypeSet = false;
1440 : }
1441 : }
1442 :
1443 100 : Label done;
1444 50 : if (checkTypeSet) {
1445 50 : TypedOrValueRegister valReg = val.reg();
1446 0 : if (propTypes) {
1447 : // guardTypeSet can read from type sets without triggering read barriers.
1448 0 : TypeSet::readBarrier(propTypes);
1449 : masm.guardTypeSet(valReg, propTypes, BarrierKind::TypeSet, scratch1, objScratch,
1450 50 : spectreRegToZero, &failedFastPath);
1451 0 : masm.jump(&done);
1452 : } else {
1453 0 : masm.jump(&failedFastPath);
1454 : }
1455 : }
1456 :
1457 50 : if (failedFastPath.used()) {
1458 : // The inline type check failed. Do a callWithABI to check the current
1459 : // TypeSet in case the type was added after we generated this stub.
1460 0 : masm.bind(&failedFastPath);
1461 :
1462 200 : AllocatableRegisterSet regs(GeneralRegisterSet::Volatile(), liveRegs.fpus());
1463 0 : LiveRegisterSet save(regs.asLiveSet());
1464 50 : masm.PushRegsInMask(save);
1465 :
1466 0 : regs.takeUnchecked(scratch1);
1467 :
1468 : // Push |val| first to make sure everything is fine if |val| aliases
1469 : // scratch2.
1470 50 : Register scratch2 = regs.takeAnyGeneral();
1471 50 : masm.Push(val);
1472 50 : masm.moveStackPtrTo(scratch2);
1473 :
1474 0 : Register scratch3 = regs.takeAnyGeneral();
1475 0 : masm.Push(id, scratch3);
1476 50 : masm.moveStackPtrTo(scratch3);
1477 :
1478 0 : masm.setupUnalignedABICall(scratch1);
1479 0 : masm.movePtr(ImmGCPtr(group), scratch1);
1480 50 : masm.passABIArg(scratch1);
1481 0 : masm.passABIArg(scratch3);
1482 0 : masm.passABIArg(scratch2);
1483 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, GroupHasPropertyTypes));
1484 0 : masm.mov(ReturnReg, scratch1);
1485 :
1486 0 : masm.adjustStack(sizeof(Value) + sizeof(jsid));
1487 :
1488 50 : LiveRegisterSet ignore;
1489 0 : ignore.add(scratch1);
1490 100 : masm.PopRegsInMaskIgnore(save, ignore);
1491 :
1492 0 : masm.branchIfTrueBool(scratch1, &done);
1493 0 : if (objScratch != InvalidReg)
1494 25 : masm.pop(objScratch);
1495 0 : masm.pop(obj);
1496 0 : masm.jump(failures);
1497 : }
1498 :
1499 0 : masm.bind(&done);
1500 50 : if (objScratch != InvalidReg)
1501 25 : masm.Pop(objScratch);
1502 0 : masm.Pop(obj);
1503 : }
1504 :
1505 : bool
1506 0 : IonCacheIRCompiler::emitStoreFixedSlot()
1507 : {
1508 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1509 0 : int32_t offset = int32StubField(reader.stubOffset());
1510 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1511 :
1512 0 : Maybe<AutoScratchRegister> scratch;
1513 0 : if (needsPostBarrier())
1514 0 : scratch.emplace(allocator, masm);
1515 :
1516 0 : if (typeCheckInfo_->isSet()) {
1517 : FailurePath* failure;
1518 0 : if (!addFailurePath(&failure))
1519 0 : return false;
1520 :
1521 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1522 : }
1523 :
1524 0 : Address slot(obj, offset);
1525 0 : EmitPreBarrier(masm, slot, MIRType::Value);
1526 0 : masm.storeConstantOrRegister(val, slot);
1527 0 : if (needsPostBarrier())
1528 0 : emitPostBarrierSlot(obj, val, scratch.ref());
1529 : return true;
1530 : }
1531 :
1532 : bool
1533 0 : IonCacheIRCompiler::emitStoreDynamicSlot()
1534 : {
1535 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1536 0 : int32_t offset = int32StubField(reader.stubOffset());
1537 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1538 0 : AutoScratchRegister scratch(allocator, masm);
1539 :
1540 0 : if (typeCheckInfo_->isSet()) {
1541 : FailurePath* failure;
1542 0 : if (!addFailurePath(&failure))
1543 0 : return false;
1544 :
1545 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1546 : }
1547 :
1548 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
1549 0 : Address slot(scratch, offset);
1550 0 : EmitPreBarrier(masm, slot, MIRType::Value);
1551 0 : masm.storeConstantOrRegister(val, slot);
1552 0 : if (needsPostBarrier())
1553 0 : emitPostBarrierSlot(obj, val, scratch);
1554 : return true;
1555 : }
1556 :
1557 : bool
1558 57 : IonCacheIRCompiler::emitAddAndStoreSlotShared(CacheOp op)
1559 : {
1560 171 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1561 0 : int32_t offset = int32StubField(reader.stubOffset());
1562 114 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1563 :
1564 0 : AutoScratchRegister scratch1(allocator, masm);
1565 :
1566 114 : Maybe<AutoScratchRegister> scratch2;
1567 0 : if (op == CacheOp::AllocateAndStoreDynamicSlot)
1568 20 : scratch2.emplace(allocator, masm);
1569 :
1570 0 : bool changeGroup = reader.readBool();
1571 0 : ObjectGroup* newGroup = groupStubField(reader.stubOffset());
1572 114 : Shape* newShape = shapeStubField(reader.stubOffset());
1573 :
1574 : FailurePath* failure;
1575 0 : if (!addFailurePath(&failure))
1576 : return false;
1577 :
1578 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1579 :
1580 57 : if (op == CacheOp::AllocateAndStoreDynamicSlot) {
1581 : // We have to (re)allocate dynamic slots. Do this first, as it's the
1582 : // only fallible operation here. Note that growSlotsDontReportOOM is
1583 : // fallible but does not GC.
1584 40 : int32_t numNewSlots = int32StubField(reader.stubOffset());
1585 20 : MOZ_ASSERT(numNewSlots > 0);
1586 :
1587 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1588 0 : masm.PushRegsInMask(save);
1589 :
1590 0 : masm.setupUnalignedABICall(scratch1);
1591 0 : masm.loadJSContext(scratch1);
1592 40 : masm.passABIArg(scratch1);
1593 0 : masm.passABIArg(obj);
1594 0 : masm.move32(Imm32(numNewSlots), scratch2.ref());
1595 0 : masm.passABIArg(scratch2.ref());
1596 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NativeObject::growSlotsDontReportOOM));
1597 0 : masm.mov(ReturnReg, scratch1);
1598 :
1599 0 : LiveRegisterSet ignore;
1600 0 : ignore.add(scratch1);
1601 40 : masm.PopRegsInMaskIgnore(save, ignore);
1602 :
1603 0 : masm.branchIfFalseBool(scratch1, failure->label());
1604 : }
1605 :
1606 0 : if (changeGroup) {
1607 : // Changing object's group from a partially to fully initialized group,
1608 : // per the acquired properties analysis. Only change the group if the
1609 : // old group still has a newScript. This only applies to PlainObjects.
1610 4 : Label noGroupChange;
1611 2 : masm.branchIfObjGroupHasNoAddendum(obj, scratch1, &noGroupChange);
1612 :
1613 : // Update the object's group.
1614 : masm.storeObjGroup(newGroup, obj, [](MacroAssembler& masm, const Address& addr) {
1615 2 : EmitPreBarrier(masm, addr, MIRType::ObjectGroup);
1616 2 : });
1617 :
1618 0 : masm.bind(&noGroupChange);
1619 : }
1620 :
1621 : // Update the object's shape.
1622 : masm.storeObjShape(newShape, obj, [](MacroAssembler& masm, const Address& addr) {
1623 57 : EmitPreBarrier(masm, addr, MIRType::Shape);
1624 57 : });
1625 :
1626 : // Perform the store. No pre-barrier required since this is a new
1627 : // initialization.
1628 57 : if (op == CacheOp::AddAndStoreFixedSlot) {
1629 6 : Address slot(obj, offset);
1630 6 : masm.storeConstantOrRegister(val, slot);
1631 : } else {
1632 0 : MOZ_ASSERT(op == CacheOp::AddAndStoreDynamicSlot ||
1633 : op == CacheOp::AllocateAndStoreDynamicSlot);
1634 102 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch1);
1635 0 : Address slot(scratch1, offset);
1636 51 : masm.storeConstantOrRegister(val, slot);
1637 : }
1638 :
1639 0 : if (needsPostBarrier())
1640 36 : emitPostBarrierSlot(obj, val, scratch1);
1641 :
1642 : return true;
1643 : }
1644 :
1645 : bool
1646 0 : IonCacheIRCompiler::emitAddAndStoreFixedSlot()
1647 : {
1648 6 : return emitAddAndStoreSlotShared(CacheOp::AddAndStoreFixedSlot);
1649 : }
1650 :
1651 : bool
1652 0 : IonCacheIRCompiler::emitAddAndStoreDynamicSlot()
1653 : {
1654 31 : return emitAddAndStoreSlotShared(CacheOp::AddAndStoreDynamicSlot);
1655 : }
1656 :
1657 : bool
1658 0 : IonCacheIRCompiler::emitAllocateAndStoreDynamicSlot()
1659 : {
1660 20 : return emitAddAndStoreSlotShared(CacheOp::AllocateAndStoreDynamicSlot);
1661 : }
1662 :
1663 : bool
1664 0 : IonCacheIRCompiler::emitStoreUnboxedProperty()
1665 : {
1666 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1667 0 : JSValueType fieldType = reader.valueType();
1668 0 : int32_t offset = int32StubField(reader.stubOffset());
1669 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1670 :
1671 0 : Maybe<AutoScratchRegister> scratch;
1672 0 : if (needsPostBarrier() && UnboxedTypeNeedsPostBarrier(fieldType))
1673 0 : scratch.emplace(allocator, masm);
1674 :
1675 0 : if (fieldType == JSVAL_TYPE_OBJECT && typeCheckInfo_->isSet()) {
1676 : FailurePath* failure;
1677 0 : if (!addFailurePath(&failure))
1678 0 : return false;
1679 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1680 : }
1681 :
1682 : // Note that the storeUnboxedProperty call here is infallible, as the
1683 : // IR emitter is responsible for guarding on |val|'s type.
1684 0 : Address fieldAddr(obj, offset);
1685 0 : EmitICUnboxedPreBarrier(masm, fieldAddr, fieldType);
1686 0 : masm.storeUnboxedProperty(fieldAddr, fieldType, val, /* failure = */ nullptr);
1687 0 : if (needsPostBarrier() && UnboxedTypeNeedsPostBarrier(fieldType))
1688 0 : emitPostBarrierSlot(obj, val, scratch.ref());
1689 : return true;
1690 : }
1691 :
1692 : bool
1693 0 : IonCacheIRCompiler::emitStoreTypedObjectReferenceProperty()
1694 : {
1695 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1696 0 : int32_t offset = int32StubField(reader.stubOffset());
1697 0 : TypedThingLayout layout = reader.typedThingLayout();
1698 0 : ReferenceTypeDescr::Type type = reader.referenceTypeDescrType();
1699 :
1700 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1701 :
1702 0 : AutoScratchRegister scratch1(allocator, masm);
1703 0 : AutoScratchRegister scratch2(allocator, masm);
1704 :
1705 : // We don't need to check property types if the property is always a
1706 : // string.
1707 0 : if (type != ReferenceTypeDescr::TYPE_STRING) {
1708 : FailurePath* failure;
1709 0 : if (!addFailurePath(&failure))
1710 0 : return false;
1711 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, TypedOrValueRegister(val),
1712 0 : *liveRegs_, failure->label());
1713 : }
1714 :
1715 : // Compute the address being written to.
1716 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1717 0 : Address dest(scratch1, offset);
1718 :
1719 0 : emitStoreTypedObjectReferenceProp(val, type, dest, scratch2);
1720 :
1721 0 : if (needsPostBarrier() && type != ReferenceTypeDescr::TYPE_STRING)
1722 0 : emitPostBarrierSlot(obj, val, scratch1);
1723 : return true;
1724 : }
1725 :
1726 : bool
1727 0 : IonCacheIRCompiler::emitStoreTypedObjectScalarProperty()
1728 : {
1729 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1730 0 : int32_t offset = int32StubField(reader.stubOffset());
1731 0 : TypedThingLayout layout = reader.typedThingLayout();
1732 0 : Scalar::Type type = reader.scalarType();
1733 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1734 0 : AutoScratchRegister scratch1(allocator, masm);
1735 0 : AutoScratchRegister scratch2(allocator, masm);
1736 :
1737 : FailurePath* failure;
1738 0 : if (!addFailurePath(&failure))
1739 : return false;
1740 :
1741 : // Compute the address being written to.
1742 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1743 0 : Address dest(scratch1, offset);
1744 :
1745 0 : StoreToTypedArray(cx_, masm, type, val, dest, scratch2, failure->label());
1746 0 : return true;
1747 : }
1748 :
1749 : static void
1750 12 : EmitStoreDenseElement(MacroAssembler& masm, const ConstantOrRegister& value,
1751 : Register elements, BaseObjectElementIndex target)
1752 : {
1753 : // If the ObjectElements::CONVERT_DOUBLE_ELEMENTS flag is set, int32 values
1754 : // have to be converted to double first. If the value is not int32, it can
1755 : // always be stored directly.
1756 :
1757 24 : Address elementsFlags(elements, ObjectElements::offsetOfFlags());
1758 12 : if (value.constant()) {
1759 0 : Value v = value.value();
1760 0 : Label done;
1761 0 : if (v.isInt32()) {
1762 0 : Label dontConvert;
1763 0 : masm.branchTest32(Assembler::Zero, elementsFlags,
1764 : Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
1765 0 : &dontConvert);
1766 0 : masm.storeValue(DoubleValue(v.toInt32()), target);
1767 0 : masm.jump(&done);
1768 0 : masm.bind(&dontConvert);
1769 : }
1770 0 : masm.storeValue(v, target);
1771 0 : masm.bind(&done);
1772 : return;
1773 : }
1774 :
1775 12 : TypedOrValueRegister reg = value.reg();
1776 12 : if (reg.hasTyped() && reg.type() != MIRType::Int32) {
1777 12 : masm.storeTypedOrValue(reg, target);
1778 0 : return;
1779 : }
1780 :
1781 0 : Label convert, storeValue, done;
1782 0 : masm.branchTest32(Assembler::NonZero, elementsFlags,
1783 : Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
1784 0 : &convert);
1785 0 : masm.bind(&storeValue);
1786 0 : masm.storeTypedOrValue(reg, target);
1787 0 : masm.jump(&done);
1788 :
1789 0 : masm.bind(&convert);
1790 0 : if (reg.hasValue()) {
1791 0 : masm.branchTestInt32(Assembler::NotEqual, reg.valueReg(), &storeValue);
1792 0 : masm.int32ValueToDouble(reg.valueReg(), ScratchDoubleReg);
1793 : masm.storeDouble(ScratchDoubleReg, target);
1794 : } else {
1795 0 : MOZ_ASSERT(reg.type() == MIRType::Int32);
1796 0 : masm.convertInt32ToDouble(reg.typedReg().gpr(), ScratchDoubleReg);
1797 : masm.storeDouble(ScratchDoubleReg, target);
1798 : }
1799 :
1800 0 : masm.bind(&done);
1801 : }
1802 :
1803 : static void
1804 12 : EmitAssertNoCopyOnWriteElements(MacroAssembler& masm, Register elementsReg)
1805 : {
1806 : #ifdef DEBUG
1807 : // IonBuilder::initOrSetElemTryCache ensures we have no copy-on-write
1808 : // elements. Assert this in debug builds.
1809 24 : Address elementsFlags(elementsReg, ObjectElements::offsetOfFlags());
1810 24 : Label ok;
1811 24 : masm.branchTest32(Assembler::Zero, elementsFlags,
1812 : Imm32(ObjectElements::COPY_ON_WRITE),
1813 0 : &ok);
1814 0 : masm.assumeUnreachable("Unexpected copy-on-write elements in Ion IC!");
1815 12 : masm.bind(&ok);
1816 : #endif
1817 0 : }
1818 :
1819 : bool
1820 0 : IonCacheIRCompiler::emitStoreDenseElement()
1821 : {
1822 36 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1823 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1824 24 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1825 :
1826 0 : AutoScratchRegister scratch1(allocator, masm);
1827 0 : AutoScratchRegister scratch2(allocator, masm);
1828 :
1829 : FailurePath* failure;
1830 0 : if (!addFailurePath(&failure))
1831 : return false;
1832 :
1833 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1834 :
1835 : // Load obj->elements in scratch1.
1836 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch1);
1837 :
1838 12 : EmitAssertNoCopyOnWriteElements(masm, scratch1);
1839 :
1840 : // Bounds check.
1841 0 : Address initLength(scratch1, ObjectElements::offsetOfInitializedLength());
1842 24 : masm.spectreBoundsCheck32(index, initLength, scratch2, failure->label());
1843 :
1844 : // Hole check.
1845 0 : BaseObjectElementIndex element(scratch1, index);
1846 36 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
1847 :
1848 : // Check for frozen elements. We have to check this here because we attach
1849 : // this stub also for non-extensible objects, and these can become frozen
1850 : // without triggering a Shape change.
1851 24 : Address flags(scratch1, ObjectElements::offsetOfFlags());
1852 36 : masm.branchTest32(Assembler::NonZero, flags, Imm32(ObjectElements::FROZEN), failure->label());
1853 :
1854 0 : EmitPreBarrier(masm, element, MIRType::Value);
1855 0 : EmitStoreDenseElement(masm, val, scratch1, element);
1856 24 : if (needsPostBarrier())
1857 0 : emitPostBarrierElement(obj, val, scratch1, index);
1858 : return true;
1859 : }
1860 :
1861 : bool
1862 0 : IonCacheIRCompiler::emitStoreDenseElementHole()
1863 : {
1864 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1865 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1866 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1867 :
1868 : // handleAdd boolean is only relevant for Baseline. Ion ICs can always
1869 : // handle adds as we don't have to set any flags on the fallback stub to
1870 : // track this.
1871 0 : reader.readBool();
1872 :
1873 0 : AutoScratchRegister scratch1(allocator, masm);
1874 0 : AutoScratchRegister scratch2(allocator, masm);
1875 :
1876 : FailurePath* failure;
1877 0 : if (!addFailurePath(&failure))
1878 : return false;
1879 :
1880 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1881 :
1882 : // Load obj->elements in scratch1.
1883 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch1);
1884 :
1885 0 : EmitAssertNoCopyOnWriteElements(masm, scratch1);
1886 :
1887 0 : Address initLength(scratch1, ObjectElements::offsetOfInitializedLength());
1888 0 : BaseObjectElementIndex element(scratch1, index);
1889 :
1890 0 : Label inBounds, outOfBounds;
1891 0 : Register spectreTemp = scratch2;
1892 0 : masm.spectreBoundsCheck32(index, initLength, spectreTemp, &outOfBounds);
1893 0 : masm.jump(&inBounds);
1894 :
1895 0 : masm.bind(&outOfBounds);
1896 0 : masm.branch32(Assembler::NotEqual, initLength, index, failure->label());
1897 :
1898 : // If index < capacity, we can add a dense element inline. If not we
1899 : // need to allocate more elements.
1900 0 : Label capacityOk, allocElement;
1901 0 : Address capacity(scratch1, ObjectElements::offsetOfCapacity());
1902 0 : masm.spectreBoundsCheck32(index, capacity, spectreTemp, &allocElement);
1903 0 : masm.jump(&capacityOk);
1904 :
1905 : // Check for non-writable array length. We only have to do this if
1906 : // index >= capacity.
1907 0 : masm.bind(&allocElement);
1908 0 : Address elementsFlags(scratch1, ObjectElements::offsetOfFlags());
1909 0 : masm.branchTest32(Assembler::NonZero, elementsFlags,
1910 : Imm32(ObjectElements::NONWRITABLE_ARRAY_LENGTH),
1911 0 : failure->label());
1912 :
1913 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1914 0 : save.takeUnchecked(scratch1);
1915 0 : masm.PushRegsInMask(save);
1916 :
1917 0 : masm.setupUnalignedABICall(scratch1);
1918 0 : masm.loadJSContext(scratch1);
1919 0 : masm.passABIArg(scratch1);
1920 0 : masm.passABIArg(obj);
1921 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NativeObject::addDenseElementDontReportOOM));
1922 0 : masm.mov(ReturnReg, scratch1);
1923 :
1924 0 : masm.PopRegsInMask(save);
1925 0 : masm.branchIfFalseBool(scratch1, failure->label());
1926 :
1927 : // Load the reallocated elements pointer.
1928 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch1);
1929 :
1930 0 : masm.bind(&capacityOk);
1931 :
1932 : // Increment initLength.
1933 0 : masm.add32(Imm32(1), initLength);
1934 :
1935 : // If length is now <= index, increment length too.
1936 0 : Label skipIncrementLength;
1937 0 : Address length(scratch1, ObjectElements::offsetOfLength());
1938 0 : masm.branch32(Assembler::Above, length, index, &skipIncrementLength);
1939 0 : masm.add32(Imm32(1), length);
1940 0 : masm.bind(&skipIncrementLength);
1941 :
1942 : // Skip EmitPreBarrier as the memory is uninitialized.
1943 0 : Label doStore;
1944 0 : masm.jump(&doStore);
1945 :
1946 0 : masm.bind(&inBounds);
1947 :
1948 0 : EmitPreBarrier(masm, element, MIRType::Value);
1949 :
1950 0 : masm.bind(&doStore);
1951 0 : EmitStoreDenseElement(masm, val, scratch1, element);
1952 0 : if (needsPostBarrier())
1953 0 : emitPostBarrierElement(obj, val, scratch1, index);
1954 : return true;
1955 : }
1956 :
1957 : bool
1958 0 : IonCacheIRCompiler::emitArrayPush()
1959 : {
1960 0 : MOZ_ASSERT_UNREACHABLE("emitArrayPush not supported for IonCaches.");
1961 : return false;
1962 : }
1963 :
1964 : bool
1965 0 : IonCacheIRCompiler::emitStoreTypedElement()
1966 : {
1967 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1968 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1969 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1970 :
1971 0 : TypedThingLayout layout = reader.typedThingLayout();
1972 0 : Scalar::Type arrayType = reader.scalarType();
1973 0 : bool handleOOB = reader.readBool();
1974 :
1975 0 : AutoScratchRegister scratch1(allocator, masm);
1976 0 : AutoScratchRegister scratch2(allocator, masm);
1977 :
1978 : FailurePath* failure;
1979 0 : if (!addFailurePath(&failure))
1980 : return false;
1981 :
1982 : // Bounds check.
1983 0 : Label done;
1984 0 : LoadTypedThingLength(masm, layout, obj, scratch1);
1985 0 : masm.spectreBoundsCheck32(index, scratch1, scratch2, handleOOB ? &done : failure->label());
1986 :
1987 : // Load the elements vector.
1988 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1989 :
1990 0 : BaseIndex dest(scratch1, index, ScaleFromElemWidth(Scalar::byteSize(arrayType)));
1991 :
1992 0 : FloatRegister maybeTempDouble = ic_->asSetPropertyIC()->maybeTempDouble();
1993 0 : FloatRegister maybeTempFloat32 = ic_->asSetPropertyIC()->maybeTempFloat32();
1994 0 : MOZ_ASSERT(maybeTempDouble != InvalidFloatReg);
1995 : MOZ_ASSERT_IF(jit::hasUnaliasedDouble(), maybeTempFloat32 != InvalidFloatReg);
1996 :
1997 0 : if (arrayType == Scalar::Float32) {
1998 0 : FloatRegister tempFloat = hasUnaliasedDouble() ? maybeTempFloat32 : maybeTempDouble;
1999 0 : if (!masm.convertConstantOrRegisterToFloat(cx_, val, tempFloat, failure->label()))
2000 0 : return false;
2001 0 : masm.storeToTypedFloatArray(arrayType, tempFloat, dest);
2002 0 : } else if (arrayType == Scalar::Float64) {
2003 0 : if (!masm.convertConstantOrRegisterToDouble(cx_, val, maybeTempDouble, failure->label()))
2004 : return false;
2005 0 : masm.storeToTypedFloatArray(arrayType, maybeTempDouble, dest);
2006 : } else {
2007 0 : Register valueToStore = scratch2;
2008 0 : if (arrayType == Scalar::Uint8Clamped) {
2009 0 : if (!masm.clampConstantOrRegisterToUint8(cx_, val, maybeTempDouble, valueToStore,
2010 : failure->label()))
2011 : {
2012 0 : return false;
2013 : }
2014 : } else {
2015 0 : if (!masm.truncateConstantOrRegisterToInt32(cx_, val, maybeTempDouble, valueToStore,
2016 : failure->label()))
2017 : {
2018 : return false;
2019 : }
2020 : }
2021 0 : masm.storeToTypedIntArray(arrayType, valueToStore, dest);
2022 : }
2023 :
2024 0 : masm.bind(&done);
2025 0 : return true;
2026 : }
2027 :
2028 : bool
2029 0 : IonCacheIRCompiler::emitCallNativeSetter()
2030 : {
2031 0 : AutoSaveLiveRegisters save(*this);
2032 :
2033 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2034 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
2035 0 : MOZ_ASSERT(target->isNative());
2036 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2037 :
2038 0 : AutoScratchRegister argJSContext(allocator, masm);
2039 0 : AutoScratchRegister argVp(allocator, masm);
2040 0 : AutoScratchRegister argUintN(allocator, masm);
2041 0 : AutoScratchRegister scratch(allocator, masm);
2042 :
2043 0 : allocator.discardStack(masm);
2044 :
2045 : // Set up the call:
2046 : // bool (*)(JSContext*, unsigned, Value* vp)
2047 : // vp[0] is callee/outparam
2048 : // vp[1] is |this|
2049 : // vp[2] is the value
2050 :
2051 : // Build vp and move the base into argVpReg.
2052 0 : masm.Push(val);
2053 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
2054 0 : masm.Push(ObjectValue(*target));
2055 0 : masm.moveStackPtrTo(argVp.get());
2056 :
2057 : // Preload other regs.
2058 0 : masm.loadJSContext(argJSContext);
2059 0 : masm.move32(Imm32(1), argUintN);
2060 :
2061 : // Push marking data for later use.
2062 0 : masm.Push(argUintN);
2063 0 : pushStubCodePointer();
2064 :
2065 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
2066 : return false;
2067 0 : masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLNative);
2068 :
2069 : // Make the call.
2070 0 : masm.setupUnalignedABICall(scratch);
2071 0 : masm.passABIArg(argJSContext);
2072 0 : masm.passABIArg(argUintN);
2073 0 : masm.passABIArg(argVp);
2074 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, target->native()), MoveOp::GENERAL,
2075 0 : CheckUnsafeCallWithABI::DontCheckHasExitFrame);
2076 :
2077 : // Test for failure.
2078 0 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
2079 :
2080 0 : masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1));
2081 0 : return true;
2082 : }
2083 :
2084 : bool
2085 0 : IonCacheIRCompiler::emitCallScriptedSetter()
2086 : {
2087 0 : AutoSaveLiveRegisters save(*this);
2088 :
2089 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2090 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
2091 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2092 :
2093 0 : AutoScratchRegister scratch(allocator, masm);
2094 :
2095 0 : allocator.discardStack(masm);
2096 :
2097 0 : uint32_t framePushedBefore = masm.framePushed();
2098 :
2099 : // Construct IonICCallFrameLayout.
2100 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
2101 0 : IonICCallFrameLayout::Size());
2102 0 : pushStubCodePointer();
2103 0 : masm.Push(Imm32(descriptor));
2104 0 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
2105 :
2106 : // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
2107 : // so we just have to make sure the stack is aligned after we push the
2108 : // |this| + argument Values.
2109 0 : size_t numArgs = Max<size_t>(1, target->nargs());
2110 0 : uint32_t argSize = (numArgs + 1) * sizeof(Value);
2111 0 : uint32_t padding = ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
2112 0 : MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
2113 0 : MOZ_ASSERT(padding < JitStackAlignment);
2114 0 : masm.reserveStack(padding);
2115 :
2116 0 : for (size_t i = 1; i < target->nargs(); i++)
2117 0 : masm.Push(UndefinedValue());
2118 0 : masm.Push(val);
2119 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
2120 :
2121 0 : masm.movePtr(ImmGCPtr(target), scratch);
2122 :
2123 0 : descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonICCall,
2124 0 : JitFrameLayout::Size());
2125 0 : masm.Push(Imm32(1)); // argc
2126 0 : masm.Push(scratch);
2127 0 : masm.Push(Imm32(descriptor));
2128 :
2129 : // Check stack alignment. Add sizeof(uintptr_t) for the return address.
2130 0 : MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
2131 :
2132 : // The setter currently has a jit entry or a non-lazy script. We will only
2133 : // relazify when we do a shrinking GC and when that happens we will also
2134 : // purge IC stubs.
2135 0 : MOZ_ASSERT(target->hasJitEntry());
2136 0 : masm.loadJitCodeRaw(scratch, scratch);
2137 0 : masm.callJit(scratch);
2138 :
2139 0 : masm.freeStack(masm.framePushed() - framePushedBefore);
2140 0 : return true;
2141 : }
2142 :
2143 : typedef bool (*SetArrayLengthFn)(JSContext*, HandleObject, HandleValue, bool);
2144 1 : static const VMFunction SetArrayLengthInfo =
2145 3 : FunctionInfo<SetArrayLengthFn>(SetArrayLength, "SetArrayLength");
2146 :
2147 : bool
2148 0 : IonCacheIRCompiler::emitCallSetArrayLength()
2149 : {
2150 0 : AutoSaveLiveRegisters save(*this);
2151 :
2152 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2153 0 : bool strict = reader.readBool();
2154 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2155 :
2156 0 : allocator.discardStack(masm);
2157 0 : prepareVMCall(masm);
2158 :
2159 0 : masm.Push(Imm32(strict));
2160 0 : masm.Push(val);
2161 0 : masm.Push(obj);
2162 :
2163 0 : return callVM(masm, SetArrayLengthInfo);
2164 : }
2165 :
2166 : typedef bool (*ProxySetPropertyFn)(JSContext*, HandleObject, HandleId, HandleValue, bool);
2167 1 : static const VMFunction ProxySetPropertyInfo =
2168 3 : FunctionInfo<ProxySetPropertyFn>(ProxySetProperty, "ProxySetProperty");
2169 :
2170 : bool
2171 0 : IonCacheIRCompiler::emitCallProxySet()
2172 : {
2173 0 : AutoSaveLiveRegisters save(*this);
2174 :
2175 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2176 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2177 0 : jsid id = idStubField(reader.stubOffset());
2178 0 : bool strict = reader.readBool();
2179 :
2180 0 : AutoScratchRegister scratch(allocator, masm);
2181 :
2182 0 : allocator.discardStack(masm);
2183 0 : prepareVMCall(masm);
2184 :
2185 0 : masm.Push(Imm32(strict));
2186 0 : masm.Push(val);
2187 0 : masm.Push(id, scratch);
2188 0 : masm.Push(obj);
2189 :
2190 0 : return callVM(masm, ProxySetPropertyInfo);
2191 : }
2192 :
2193 : typedef bool (*ProxySetPropertyByValueFn)(JSContext*, HandleObject, HandleValue, HandleValue, bool);
2194 1 : static const VMFunction ProxySetPropertyByValueInfo =
2195 3 : FunctionInfo<ProxySetPropertyByValueFn>(ProxySetPropertyByValue, "ProxySetPropertyByValue");
2196 :
2197 : bool
2198 0 : IonCacheIRCompiler::emitCallProxySetByValue()
2199 : {
2200 0 : AutoSaveLiveRegisters save(*this);
2201 :
2202 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2203 0 : ConstantOrRegister idVal = allocator.useConstantOrRegister(masm, reader.valOperandId());
2204 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2205 0 : bool strict = reader.readBool();
2206 :
2207 0 : allocator.discardStack(masm);
2208 0 : prepareVMCall(masm);
2209 :
2210 0 : masm.Push(Imm32(strict));
2211 0 : masm.Push(val);
2212 0 : masm.Push(idVal);
2213 0 : masm.Push(obj);
2214 :
2215 0 : return callVM(masm, ProxySetPropertyByValueInfo);
2216 : }
2217 :
2218 : bool
2219 7 : IonCacheIRCompiler::emitMegamorphicSetElement()
2220 : {
2221 14 : AutoSaveLiveRegisters save(*this);
2222 :
2223 21 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2224 0 : ConstantOrRegister idVal = allocator.useConstantOrRegister(masm, reader.valOperandId());
2225 14 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
2226 0 : bool strict = reader.readBool();
2227 :
2228 0 : allocator.discardStack(masm);
2229 0 : prepareVMCall(masm);
2230 :
2231 0 : masm.Push(Imm32(strict));
2232 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
2233 7 : masm.Push(val);
2234 0 : masm.Push(idVal);
2235 0 : masm.Push(obj);
2236 :
2237 0 : return callVM(masm, SetObjectElementInfo);
2238 : }
2239 :
2240 : bool
2241 0 : IonCacheIRCompiler::emitLoadTypedObjectResult()
2242 : {
2243 0 : AutoOutputRegister output(*this);
2244 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2245 0 : AutoScratchRegister scratch1(allocator, masm);
2246 0 : AutoScratchRegister scratch2(allocator, masm);
2247 :
2248 0 : TypedThingLayout layout = reader.typedThingLayout();
2249 0 : uint32_t typeDescr = reader.typeDescrKey();
2250 0 : uint32_t fieldOffset = int32StubField(reader.stubOffset());
2251 :
2252 : // Get the object's data pointer.
2253 0 : LoadTypedThingData(masm, layout, obj, scratch1);
2254 :
2255 0 : Address fieldAddr(scratch1, fieldOffset);
2256 0 : emitLoadTypedObjectResultShared(fieldAddr, scratch2, typeDescr, output);
2257 0 : return true;
2258 : }
2259 :
2260 : bool
2261 0 : IonCacheIRCompiler::emitTypeMonitorResult()
2262 : {
2263 321 : return emitReturnFromIC();
2264 : }
2265 :
2266 : bool
2267 549 : IonCacheIRCompiler::emitReturnFromIC()
2268 : {
2269 549 : if (!savedLiveRegs_)
2270 0 : allocator.restoreInputState(masm);
2271 :
2272 0 : RepatchLabel rejoin;
2273 0 : rejoinOffset_ = masm.jumpWithPatch(&rejoin);
2274 549 : masm.bind(&rejoin);
2275 0 : return true;
2276 : }
2277 :
2278 : bool
2279 0 : IonCacheIRCompiler::emitLoadStackValue()
2280 : {
2281 0 : MOZ_ASSERT_UNREACHABLE("emitLoadStackValue not supported for IonCaches.");
2282 : return false;
2283 : }
2284 :
2285 : bool
2286 0 : IonCacheIRCompiler::emitGuardAndGetIterator()
2287 : {
2288 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2289 :
2290 0 : AutoScratchRegister scratch1(allocator, masm);
2291 0 : AutoScratchRegister scratch2(allocator, masm);
2292 0 : AutoScratchRegister niScratch(allocator, masm);
2293 :
2294 : PropertyIteratorObject* iterobj =
2295 0 : &objectStubField(reader.stubOffset())->as<PropertyIteratorObject>();
2296 0 : NativeIterator** enumerators = rawWordStubField<NativeIterator**>(reader.stubOffset());
2297 :
2298 0 : Register output = allocator.defineRegister(masm, reader.objOperandId());
2299 :
2300 : FailurePath* failure;
2301 0 : if (!addFailurePath(&failure))
2302 : return false;
2303 :
2304 : // Load our PropertyIteratorObject* and its NativeIterator.
2305 0 : masm.movePtr(ImmGCPtr(iterobj), output);
2306 0 : masm.loadObjPrivate(output, JSObject::ITER_CLASS_NFIXED_SLOTS, niScratch);
2307 :
2308 : // Ensure the iterator is reusable: see NativeIterator::isReusable.
2309 0 : masm.branchIfNativeIteratorNotReusable(niScratch, failure->label());
2310 :
2311 : // Pre-write barrier for store to 'objectBeingIterated_'.
2312 0 : Address iterObjAddr(niScratch, NativeIterator::offsetOfObjectBeingIterated());
2313 0 : EmitPreBarrier(masm, iterObjAddr, MIRType::Object);
2314 :
2315 : // Mark iterator as active.
2316 0 : Address iterFlagsAddr(niScratch, NativeIterator::offsetOfFlags());
2317 0 : masm.storePtr(obj, iterObjAddr);
2318 0 : masm.or32(Imm32(NativeIterator::Flags::Active), iterFlagsAddr);
2319 :
2320 : // Post-write barrier for stores to 'objectBeingIterated_'.
2321 0 : emitPostBarrierSlot(output, TypedOrValueRegister(MIRType::Object, AnyRegister(obj)), scratch1);
2322 :
2323 : // Chain onto the active iterator stack.
2324 0 : masm.loadPtr(AbsoluteAddress(enumerators), scratch1);
2325 0 : emitRegisterEnumerator(scratch1, niScratch, scratch2);
2326 :
2327 0 : return true;
2328 : }
2329 :
2330 : bool
2331 0 : IonCacheIRCompiler::emitGuardDOMExpandoMissingOrGuardShape()
2332 : {
2333 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
2334 0 : Shape* shape = shapeStubField(reader.stubOffset());
2335 :
2336 0 : AutoScratchRegister objScratch(allocator, masm);
2337 :
2338 : FailurePath* failure;
2339 0 : if (!addFailurePath(&failure))
2340 : return false;
2341 :
2342 0 : Label done;
2343 0 : masm.branchTestUndefined(Assembler::Equal, val, &done);
2344 :
2345 0 : masm.debugAssertIsObject(val);
2346 0 : masm.unboxObject(val, objScratch);
2347 : // The expando object is not used in this case, so we don't need Spectre
2348 : // mitigations.
2349 0 : masm.branchTestObjShapeNoSpectreMitigations(Assembler::NotEqual, objScratch, shape,
2350 0 : failure->label());
2351 :
2352 0 : masm.bind(&done);
2353 : return true;
2354 : }
2355 :
2356 : bool
2357 0 : IonCacheIRCompiler::emitLoadDOMExpandoValueGuardGeneration()
2358 : {
2359 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2360 : ExpandoAndGeneration* expandoAndGeneration =
2361 0 : rawWordStubField<ExpandoAndGeneration*>(reader.stubOffset());
2362 0 : uint64_t* generationFieldPtr = expandoGenerationStubFieldPtr(reader.stubOffset());
2363 :
2364 0 : AutoScratchRegister scratch1(allocator, masm);
2365 0 : AutoScratchRegister scratch2(allocator, masm);
2366 0 : ValueOperand output = allocator.defineValueRegister(masm, reader.valOperandId());
2367 :
2368 : FailurePath* failure;
2369 0 : if (!addFailurePath(&failure))
2370 : return false;
2371 :
2372 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), scratch1);
2373 0 : Address expandoAddr(scratch1, detail::ProxyReservedSlots::offsetOfPrivateSlot());
2374 :
2375 : // Guard the ExpandoAndGeneration* matches the proxy's ExpandoAndGeneration.
2376 0 : masm.loadValue(expandoAddr, output);
2377 0 : masm.branchTestValue(Assembler::NotEqual, output, PrivateValue(expandoAndGeneration),
2378 0 : failure->label());
2379 :
2380 : // Guard expandoAndGeneration->generation matches the expected generation.
2381 0 : masm.movePtr(ImmPtr(expandoAndGeneration), output.scratchReg());
2382 0 : masm.movePtr(ImmPtr(generationFieldPtr), scratch1);
2383 0 : masm.branch64(Assembler::NotEqual,
2384 0 : Address(output.scratchReg(), ExpandoAndGeneration::offsetOfGeneration()),
2385 0 : Address(scratch1, 0),
2386 : scratch2,
2387 0 : failure->label());
2388 :
2389 : // Load expandoAndGeneration->expando into the output Value register.
2390 0 : masm.loadValue(Address(output.scratchReg(), ExpandoAndGeneration::offsetOfExpando()), output);
2391 0 : return true;
2392 : }
2393 :
2394 : void
2395 549 : IonIC::attachCacheIRStub(JSContext* cx, const CacheIRWriter& writer, CacheKind kind,
2396 : IonScript* ionScript, bool* attached,
2397 : const PropertyTypeCheckInfo* typeCheckInfo)
2398 : {
2399 : // We shouldn't GC or report OOM (or any other exception) here.
2400 1098 : AutoAssertNoPendingException aanpe(cx);
2401 1098 : JS::AutoCheckCannotGC nogc;
2402 :
2403 0 : MOZ_ASSERT(!*attached);
2404 :
2405 : // SetProp/SetElem stubs must have non-null typeCheckInfo.
2406 0 : MOZ_ASSERT(!!typeCheckInfo == (kind == CacheKind::SetProp || kind == CacheKind::SetElem));
2407 :
2408 : // Do nothing if the IR generator failed or triggered a GC that invalidated
2409 : // the script.
2410 549 : if (writer.failed() || ionScript->invalidated())
2411 0 : return;
2412 :
2413 0 : JitZone* jitZone = cx->zone()->jitZone();
2414 0 : uint32_t stubDataOffset = sizeof(IonICStub);
2415 :
2416 : // Try to reuse a previously-allocated CacheIRStubInfo.
2417 : CacheIRStubKey::Lookup lookup(kind, ICStubEngine::IonIC,
2418 1098 : writer.codeStart(), writer.codeLength());
2419 549 : CacheIRStubInfo* stubInfo = jitZone->getIonCacheIRStubInfo(lookup);
2420 549 : if (!stubInfo) {
2421 : // Allocate the shared CacheIRStubInfo. Note that the
2422 : // putIonCacheIRStubInfo call below will transfer ownership to
2423 : // the stub info HashSet, so we don't have to worry about freeing
2424 : // it below.
2425 :
2426 : // For Ion ICs, we don't track/use the makesGCCalls flag, so just pass true.
2427 44 : bool makesGCCalls = true;
2428 44 : stubInfo = CacheIRStubInfo::New(kind, ICStubEngine::IonIC, makesGCCalls,
2429 : stubDataOffset, writer);
2430 0 : if (!stubInfo)
2431 0 : return;
2432 :
2433 0 : CacheIRStubKey key(stubInfo);
2434 0 : if (!jitZone->putIonCacheIRStubInfo(lookup, key))
2435 : return;
2436 : }
2437 :
2438 549 : MOZ_ASSERT(stubInfo);
2439 :
2440 : // Ensure we don't attach duplicate stubs. This can happen if a stub failed
2441 : // for some reason and the IR generator doesn't check for exactly the same
2442 : // conditions.
2443 1341 : for (IonICStub* stub = firstStub_; stub; stub = stub->next()) {
2444 792 : if (stub->stubInfo() != stubInfo)
2445 792 : continue;
2446 0 : bool updated = false;
2447 0 : if (!writer.stubDataEqualsMaybeUpdate(stub->stubDataStart(), &updated))
2448 : continue;
2449 0 : if (updated || (typeCheckInfo && typeCheckInfo->needsTypeBarrier())) {
2450 : // We updated a stub or have a stub that requires property type
2451 : // checks. In this case the stub will likely handle more cases in
2452 : // the future and we shouldn't deoptimize.
2453 0 : *attached = true;
2454 : }
2455 0 : return;
2456 : }
2457 :
2458 0 : size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
2459 :
2460 : // Allocate the IonICStub in the optimized stub space. Ion stubs and
2461 : // CacheIRStubInfo instances for Ion stubs can be purged on GC. That's okay
2462 : // because the stub code is rooted separately when we make a VM call, and
2463 : // stub code should never access the IonICStub after making a VM call. The
2464 : // IonICStub::poison method poisons the stub to catch bugs in this area.
2465 1098 : ICStubSpace* stubSpace = cx->zone()->jitZone()->optimizedStubSpace();
2466 549 : void* newStubMem = stubSpace->alloc(bytesNeeded);
2467 549 : if (!newStubMem)
2468 : return;
2469 :
2470 0 : IonICStub* newStub = new(newStubMem) IonICStub(fallbackLabel_.raw(), stubInfo);
2471 549 : writer.copyStubData(newStub->stubDataStart());
2472 :
2473 0 : JitContext jctx(cx, nullptr);
2474 0 : IonCacheIRCompiler compiler(cx, writer, this, ionScript, newStub, typeCheckInfo, stubDataOffset);
2475 549 : if (!compiler.init())
2476 0 : return;
2477 :
2478 0 : JitCode* code = compiler.compile();
2479 0 : if (!code)
2480 : return;
2481 :
2482 0 : attachStub(newStub, code);
2483 549 : *attached = true;
2484 : }
|