1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : * David Mandelin <dmandelin@mozilla.com>
26 : *
27 : * Alternatively, the contents of this file may be used under the terms of
28 : * either of the GNU General Public License Version 2 or later (the "GPL"),
29 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 : * in which case the provisions of the GPL or the LGPL are applicable instead
31 : * of those above. If you wish to allow use of your version of this file only
32 : * under the terms of either the GPL or the LGPL, and not to allow others to
33 : * use your version of this file under the terms of the MPL, indicate your
34 : * decision by deleting the provisions above and replace them with the notice
35 : * and other provisions required by the GPL or the LGPL. If you do not delete
36 : * the provisions above, a recipient may use your version of this file under
37 : * the terms of any one of the MPL, the GPL or the LGPL.
38 : *
39 : * ***** END LICENSE BLOCK ***** */
40 : #include "jsscope.h"
41 : #include "jsnum.h"
42 : #include "MonoIC.h"
43 : #include "StubCalls.h"
44 : #include "StubCalls-inl.h"
45 : #include "assembler/assembler/LinkBuffer.h"
46 : #include "assembler/assembler/MacroAssembler.h"
47 : #include "assembler/assembler/CodeLocation.h"
48 : #include "methodjit/CodeGenIncludes.h"
49 : #include "methodjit/Compiler.h"
50 : #include "methodjit/ICRepatcher.h"
51 : #include "methodjit/PolyIC.h"
52 : #include "InlineFrameAssembler.h"
53 : #include "jsobj.h"
54 :
55 : #include "builtin/RegExp.h"
56 :
57 : #include "jsinterpinlines.h"
58 : #include "jsobjinlines.h"
59 : #include "jsscopeinlines.h"
60 : #include "jsscriptinlines.h"
61 :
62 : using namespace js;
63 : using namespace js::mjit;
64 : using namespace js::mjit::ic;
65 :
66 : typedef JSC::MacroAssembler::RegisterID RegisterID;
67 : typedef JSC::MacroAssembler::Address Address;
68 : typedef JSC::MacroAssembler::Jump Jump;
69 : typedef JSC::MacroAssembler::Imm32 Imm32;
70 : typedef JSC::MacroAssembler::ImmPtr ImmPtr;
71 : typedef JSC::MacroAssembler::Call Call;
72 : typedef JSC::MacroAssembler::Label Label;
73 : typedef JSC::MacroAssembler::DataLabel32 DataLabel32;
74 : typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
75 :
76 : #if defined JS_MONOIC
77 :
78 : static void
79 22 : PatchGetFallback(VMFrame &f, ic::GetGlobalNameIC *ic)
80 : {
81 44 : Repatcher repatch(f.chunk());
82 22 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::Name));
83 22 : repatch.relink(ic->slowPathCall, fptr);
84 22 : }
85 :
86 : void JS_FASTCALL
87 150717 : ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic)
88 : {
89 150717 : JSObject &obj = f.fp()->scopeChain().global();
90 150717 : PropertyName *name = f.script()->getName(GET_UINT32_INDEX(f.pc()));
91 :
92 150717 : RecompilationMonitor monitor(f.cx);
93 :
94 150717 : const Shape *shape = obj.nativeLookup(f.cx, js_CheckForStringIndex(ATOM_TO_JSID(name)));
95 :
96 150717 : if (monitor.recompiled()) {
97 2 : stubs::Name(f);
98 2 : return;
99 : }
100 :
101 448155 : if (!shape ||
102 148728 : !shape->hasDefaultGetter() ||
103 148712 : !shape->hasSlot())
104 : {
105 2009 : if (shape)
106 22 : PatchGetFallback(f, ic);
107 2009 : stubs::Name(f);
108 2009 : return;
109 : }
110 148706 : uint32_t slot = shape->slot();
111 :
112 : /* Patch shape guard. */
113 297412 : Repatcher repatcher(f.chunk());
114 148706 : repatcher.repatch(ic->fastPathStart.dataLabelPtrAtOffset(ic->shapeOffset), obj.lastProperty());
115 :
116 : /* Patch loads. */
117 148706 : uint32_t index = obj.dynamicSlotIndex(slot);
118 148706 : JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset);
119 148706 : repatcher.patchAddressOffsetForValueLoad(label, index * sizeof(Value));
120 :
121 : /* Do load anyway... this time. */
122 148706 : stubs::Name(f);
123 : }
124 :
125 : template <JSBool strict>
126 : static void JS_FASTCALL
127 386 : DisabledSetGlobal(VMFrame &f, ic::SetGlobalNameIC *ic)
128 : {
129 386 : stubs::SetGlobalName<strict>(f, f.script()->getName(GET_UINT32_INDEX(f.pc())));
130 386 : }
131 :
132 : template void JS_FASTCALL DisabledSetGlobal<true>(VMFrame &f, ic::SetGlobalNameIC *ic);
133 : template void JS_FASTCALL DisabledSetGlobal<false>(VMFrame &f, ic::SetGlobalNameIC *ic);
134 :
135 : static void
136 105 : PatchSetFallback(VMFrame &f, ic::SetGlobalNameIC *ic)
137 : {
138 105 : JSScript *script = f.script();
139 210 : Repatcher repatch(f.chunk());
140 105 : VoidStubSetGlobal stub = STRICT_VARIANT(DisabledSetGlobal);
141 105 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stub));
142 105 : repatch.relink(ic->slowPathCall, fptr);
143 105 : }
144 :
145 : void
146 0 : SetGlobalNameIC::patchExtraShapeGuard(Repatcher &repatcher, const Shape *shape)
147 : {
148 0 : JS_ASSERT(hasExtraStub);
149 :
150 0 : JSC::CodeLocationLabel label(JSC::MacroAssemblerCodePtr(extraStub.start()));
151 0 : repatcher.repatch(label.dataLabelPtrAtOffset(extraShapeGuard), shape);
152 0 : }
153 :
154 : void
155 44923 : SetGlobalNameIC::patchInlineShapeGuard(Repatcher &repatcher, const Shape *shape)
156 : {
157 44923 : JSC::CodeLocationDataLabelPtr label = fastPathStart.dataLabelPtrAtOffset(shapeOffset);
158 44923 : repatcher.repatch(label, shape);
159 44923 : }
160 :
161 : static LookupStatus
162 47320 : UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Shape *shape)
163 : {
164 : /* Give globals a chance to appear. */
165 47320 : if (!shape)
166 2292 : return Lookup_Uncacheable;
167 :
168 179952 : if (!shape->hasDefaultSetter() ||
169 45006 : !shape->writable() ||
170 44959 : !shape->hasSlot() ||
171 44959 : obj->watched())
172 : {
173 : /* Disable the IC for weird shape attributes and watchpoints. */
174 105 : PatchSetFallback(f, ic);
175 105 : return Lookup_Uncacheable;
176 : }
177 :
178 : /* Object is not branded, so we can use the inline path. */
179 89846 : Repatcher repatcher(f.chunk());
180 44923 : ic->patchInlineShapeGuard(repatcher, obj->lastProperty());
181 :
182 44923 : uint32_t index = obj->dynamicSlotIndex(shape->slot());
183 44923 : JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset);
184 : repatcher.patchAddressOffsetForValueStore(label, index * sizeof(Value),
185 44923 : ic->vr.isTypeKnown());
186 :
187 44923 : return Lookup_Cacheable;
188 : }
189 :
190 : void JS_FASTCALL
191 47320 : ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic)
192 : {
193 47320 : JSObject &obj = f.fp()->scopeChain().global();
194 47320 : JSScript *script = f.script();
195 47320 : PropertyName *name = script->getName(GET_UINT32_INDEX(f.pc()));
196 :
197 47320 : RecompilationMonitor monitor(f.cx);
198 :
199 47320 : const Shape *shape = obj.nativeLookup(f.cx, ATOM_TO_JSID(name));
200 :
201 47320 : if (!monitor.recompiled()) {
202 47320 : LookupStatus status = UpdateSetGlobalName(f, ic, &obj, shape);
203 47320 : if (status == Lookup_Error)
204 0 : THROW();
205 : }
206 :
207 47320 : STRICT_VARIANT(stubs::SetGlobalName)(f, name);
208 : }
209 :
210 : class EqualityICLinker : public LinkerHelper
211 407 : {
212 : VMFrame &f;
213 :
214 : public:
215 407 : EqualityICLinker(Assembler &masm, VMFrame &f)
216 407 : : LinkerHelper(masm, JSC::METHOD_CODE), f(f)
217 407 : { }
218 :
219 407 : bool init(JSContext *cx) {
220 407 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
221 407 : if (!pool)
222 0 : return false;
223 407 : JS_ASSERT(!f.regs.inlined());
224 407 : if (!f.chunk()->execPools.append(pool)) {
225 0 : pool->release();
226 0 : js_ReportOutOfMemory(cx);
227 0 : return false;
228 : }
229 407 : return true;
230 : }
231 : };
232 :
233 : /* Rough over-estimate of how much memory we need to unprotect. */
234 : static const uint32_t INLINE_PATH_LENGTH = 64;
235 :
236 : class EqualityCompiler : public BaseCompiler
237 75351 : {
238 : VMFrame &f;
239 : EqualityICInfo ⁣
240 :
241 : Vector<Jump, 4, SystemAllocPolicy> jumpList;
242 : Jump trueJump;
243 : Jump falseJump;
244 :
245 : public:
246 75351 : EqualityCompiler(VMFrame &f, EqualityICInfo &ic)
247 75351 : : BaseCompiler(f.cx), f(f), ic(ic), jumpList(SystemAllocPolicy())
248 : {
249 75351 : }
250 :
251 1080 : void linkToStub(Jump j)
252 : {
253 1080 : jumpList.append(j);
254 1080 : }
255 :
256 407 : void linkTrue(Jump j)
257 : {
258 407 : trueJump = j;
259 407 : }
260 :
261 407 : void linkFalse(Jump j)
262 : {
263 407 : falseJump = j;
264 407 : }
265 :
266 353 : void generateStringPath(Assembler &masm)
267 : {
268 353 : const ValueRemat &lvr = ic.lvr;
269 353 : const ValueRemat &rvr = ic.rvr;
270 :
271 353 : JS_ASSERT_IF(lvr.isConstant(), lvr.isType(JSVAL_TYPE_STRING));
272 353 : JS_ASSERT_IF(rvr.isConstant(), rvr.isType(JSVAL_TYPE_STRING));
273 :
274 353 : if (!lvr.isType(JSVAL_TYPE_STRING)) {
275 262 : Jump lhsFail = masm.testString(Assembler::NotEqual, lvr.typeReg());
276 262 : linkToStub(lhsFail);
277 : }
278 :
279 353 : if (!rvr.isType(JSVAL_TYPE_STRING)) {
280 139 : Jump rhsFail = masm.testString(Assembler::NotEqual, rvr.typeReg());
281 139 : linkToStub(rhsFail);
282 : }
283 :
284 353 : RegisterID tmp = ic.tempReg;
285 :
286 : /* JSString::isAtom === (lengthAndFlags & ATOM_MASK == 0) */
287 : JS_STATIC_ASSERT(JSString::ATOM_FLAGS == 0);
288 353 : Imm32 atomMask(JSString::ATOM_MASK);
289 :
290 353 : masm.load32(Address(lvr.dataReg(), JSString::offsetOfLengthAndFlags()), tmp);
291 353 : Jump lhsNotAtomized = masm.branchTest32(Assembler::NonZero, tmp, atomMask);
292 353 : linkToStub(lhsNotAtomized);
293 :
294 353 : if (!rvr.isConstant()) {
295 164 : masm.load32(Address(rvr.dataReg(), JSString::offsetOfLengthAndFlags()), tmp);
296 164 : Jump rhsNotAtomized = masm.branchTest32(Assembler::NonZero, tmp, atomMask);
297 164 : linkToStub(rhsNotAtomized);
298 : }
299 :
300 353 : if (rvr.isConstant()) {
301 189 : JSString *str = rvr.value().toString();
302 189 : JS_ASSERT(str->isAtom());
303 189 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), ImmPtr(str));
304 189 : linkTrue(test);
305 : } else {
306 164 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), rvr.dataReg());
307 164 : linkTrue(test);
308 : }
309 :
310 353 : Jump fallthrough = masm.jump();
311 353 : linkFalse(fallthrough);
312 353 : }
313 :
314 54 : void generateObjectPath(Assembler &masm)
315 : {
316 54 : ValueRemat &lvr = ic.lvr;
317 54 : ValueRemat &rvr = ic.rvr;
318 :
319 54 : if (!lvr.isConstant() && !lvr.isType(JSVAL_TYPE_OBJECT)) {
320 54 : Jump lhsFail = masm.testObject(Assembler::NotEqual, lvr.typeReg());
321 54 : linkToStub(lhsFail);
322 : }
323 :
324 54 : if (!rvr.isConstant() && !rvr.isType(JSVAL_TYPE_OBJECT)) {
325 54 : Jump rhsFail = masm.testObject(Assembler::NotEqual, rvr.typeReg());
326 54 : linkToStub(rhsFail);
327 : }
328 :
329 54 : masm.loadObjClass(lvr.dataReg(), ic.tempReg);
330 : Jump lhsHasEq = masm.branchPtr(Assembler::NotEqual,
331 : Address(ic.tempReg, offsetof(Class, ext.equality)),
332 54 : ImmPtr(NULL));
333 54 : linkToStub(lhsHasEq);
334 :
335 54 : if (rvr.isConstant()) {
336 0 : JSObject *obj = &rvr.value().toObject();
337 0 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), ImmPtr(obj));
338 0 : linkTrue(test);
339 : } else {
340 54 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), rvr.dataReg());
341 54 : linkTrue(test);
342 : }
343 :
344 54 : Jump fallthrough = masm.jump();
345 54 : linkFalse(fallthrough);
346 54 : }
347 :
348 407 : bool linkForIC(Assembler &masm)
349 : {
350 814 : EqualityICLinker buffer(masm, f);
351 407 : if (!buffer.init(cx))
352 0 : return false;
353 :
354 814 : Repatcher repatcher(f.chunk());
355 :
356 : /* Overwrite the call to the IC with a call to the stub. */
357 407 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic.stub));
358 407 : repatcher.relink(ic.stubCall, fptr);
359 :
360 : // Silently fail, the IC is disabled now.
361 407 : if (!buffer.verifyRange(f.chunk()))
362 0 : return true;
363 :
364 : /* Set the targets of all type test failures to go to the stub. */
365 1487 : for (size_t i = 0; i < jumpList.length(); i++)
366 1080 : buffer.link(jumpList[i], ic.stubEntry);
367 407 : jumpList.clear();
368 :
369 : /* Set the targets for the the success and failure of the actual equality test. */
370 407 : buffer.link(trueJump, ic.target);
371 407 : buffer.link(falseJump, ic.fallThrough);
372 :
373 407 : CodeLocationLabel cs = buffer.finalize(f);
374 :
375 : /* Jump to the newly generated code instead of to the IC. */
376 407 : repatcher.relink(ic.jumpToStub, cs);
377 :
378 407 : return true;
379 : }
380 :
381 75351 : bool update()
382 : {
383 75351 : if (!ic.generated) {
384 150702 : Assembler masm;
385 75351 : Value rval = f.regs.sp[-1];
386 75351 : Value lval = f.regs.sp[-2];
387 :
388 75351 : if (rval.isObject() && lval.isObject()) {
389 54 : generateObjectPath(masm);
390 54 : ic.generated = true;
391 75297 : } else if (rval.isString() && lval.isString()) {
392 353 : generateStringPath(masm);
393 353 : ic.generated = true;
394 : } else {
395 74944 : return true;
396 : }
397 :
398 407 : return linkForIC(masm);
399 : }
400 :
401 0 : return true;
402 : }
403 : };
404 :
405 : JSBool JS_FASTCALL
406 75351 : ic::Equality(VMFrame &f, ic::EqualityICInfo *ic)
407 : {
408 150702 : EqualityCompiler cc(f, *ic);
409 75351 : if (!cc.update())
410 0 : THROWV(JS_FALSE);
411 :
412 75351 : return ic->stub(f);
413 : }
414 :
415 : static void * JS_FASTCALL
416 2292 : SlowCallFromIC(VMFrame &f, ic::CallICInfo *ic)
417 : {
418 2292 : stubs::SlowCall(f, ic->frameSize.getArgc(f));
419 2292 : return NULL;
420 : }
421 :
422 : static void * JS_FASTCALL
423 0 : SlowNewFromIC(VMFrame &f, ic::CallICInfo *ic)
424 : {
425 0 : stubs::SlowNew(f, ic->frameSize.staticArgc());
426 0 : return NULL;
427 : }
428 :
429 : bool
430 17243 : NativeStubLinker::init(JSContext *cx)
431 : {
432 17243 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
433 17243 : if (!pool)
434 0 : return false;
435 :
436 17243 : NativeCallStub stub;
437 17243 : stub.pc = pc;
438 17243 : stub.pool = pool;
439 17243 : stub.jump = locationOf(done);
440 17243 : if (!chunk->nativeCallStubs.append(stub)) {
441 0 : pool->release();
442 0 : return false;
443 : }
444 :
445 17243 : return true;
446 : }
447 :
448 : /*
449 : * Generate epilogue code to run after a stub ABI call to a native or getter.
450 : * This checks for an exception, and either type checks the result against the
451 : * observed types for the opcode or loads the result into a register pair
452 : * (it will go through a type barrier afterwards).
453 : */
454 : bool
455 17243 : mjit::NativeStubEpilogue(VMFrame &f, Assembler &masm, NativeStubLinker::FinalJump *result,
456 : int32_t initialFrameDepth, int32_t vpOffset,
457 : MaybeRegisterID typeReg, MaybeRegisterID dataReg)
458 : {
459 : /* Reload fp, which may have been clobbered by restoreStackBase(). */
460 17243 : masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
461 :
462 : Jump hasException = masm.branchTest32(Assembler::Zero, Registers::ReturnReg,
463 17243 : Registers::ReturnReg);
464 :
465 17243 : Address resultAddress(JSFrameReg, vpOffset);
466 :
467 34486 : Vector<Jump> mismatches(f.cx);
468 17243 : if (f.cx->typeInferenceEnabled() && !typeReg.isSet()) {
469 : /*
470 : * Test the result of this native against the known result type set for
471 : * the call. We don't assume knowledge about the types that natives can
472 : * return, except when generating specialized paths in FastBuiltins.
473 : */
474 11006 : types::TypeSet *types = f.script()->analysis()->bytecodeTypes(f.pc());
475 11006 : if (!masm.generateTypeCheck(f.cx, resultAddress, types, &mismatches))
476 0 : THROWV(false);
477 : }
478 :
479 : /*
480 : * Can no longer trigger recompilation in this stub, clear the stub rejoin
481 : * on the VMFrame.
482 : */
483 17243 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
484 :
485 17243 : if (typeReg.isSet())
486 164 : masm.loadValueAsComponents(resultAddress, typeReg.reg(), dataReg.reg());
487 :
488 : /*
489 : * The final jump is a indirect on x64, so that we'll always be able
490 : * to repatch it to the interpoline later.
491 : */
492 17243 : Label finished = masm.label();
493 : #ifdef JS_CPU_X64
494 : JSC::MacroAssembler::DataLabelPtr done = masm.moveWithPatch(ImmPtr(NULL), Registers::ValueReg);
495 : masm.jump(Registers::ValueReg);
496 : #else
497 17243 : Jump done = masm.jump();
498 : #endif
499 :
500 : /* Generate a call for type check failures on the native result. */
501 17243 : if (!mismatches.empty()) {
502 22428 : for (unsigned i = 0; i < mismatches.length(); i++)
503 11430 : mismatches[i].linkTo(masm.label(), &masm);
504 10998 : masm.addPtr(Imm32(vpOffset), JSFrameReg, Registers::ArgReg1);
505 : masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::TypeBarrierReturn),
506 10998 : f.regs.pc, NULL, initialFrameDepth);
507 10998 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
508 10998 : masm.jump().linkTo(finished, &masm);
509 : }
510 :
511 : /* Move JaegerThrowpoline into register for very far jump on x64. */
512 17243 : hasException.linkTo(masm.label(), &masm);
513 17243 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
514 17243 : masm.throwInJIT();
515 :
516 17243 : *result = done;
517 17243 : return true;
518 : }
519 :
520 : /*
521 : * Calls have an inline path and an out-of-line path. The inline path is used
522 : * in the fastest case: the method has JIT'd code, and |argc == nargs|.
523 : *
524 : * The inline path and OOL path are separated by a guard on the identity of
525 : * the callee object. This guard starts as NULL and always fails on the first
526 : * hit. On the OOL path, the callee is verified to be both a function and a
527 : * scripted function. If these conditions hold, |ic::Call| is invoked.
528 : *
529 : * |ic::Call| first ensures that the callee has JIT code. If it doesn't, the
530 : * call to |ic::Call| is patched to a slow path. If it does have JIT'd code,
531 : * the following cases can occur:
532 : *
533 : * 1) args != nargs: The call to |ic::Call| is patched with a dynamically
534 : * generated stub. This stub inlines a path that looks like:
535 : * ----
536 : * push frame
537 : * if (callee is not compiled) {
538 : * Compile(callee);
539 : * }
540 : * call callee->arityLabel
541 : *
542 : * The arity label is a special entry point for correcting frames for
543 : * arity mismatches.
544 : *
545 : * 2) args == nargs, and the inline call site was not patched yet.
546 : * The guard dividing the two paths is patched to guard on the given
547 : * function object identity, and the proceeding call is patched to
548 : * directly call the JIT code.
549 : *
550 : * 3) args == nargs, and the inline call site was patched already.
551 : * A small stub is created which extends the original guard to also
552 : * guard on the JSFunction lying underneath the function object.
553 : *
554 : * If the OOL path does not have a scripted function, but does have a
555 : * scripted native, then a small stub is generated which inlines the native
556 : * invocation.
557 : */
558 : namespace js {
559 : namespace mjit {
560 :
561 : class CallCompiler : public BaseCompiler
562 : {
563 : VMFrame &f;
564 : CallICInfo ⁣
565 : bool callingNew;
566 :
567 : public:
568 93017 : CallCompiler(VMFrame &f, CallICInfo &ic, bool callingNew)
569 93017 : : BaseCompiler(f.cx), f(f), ic(ic), callingNew(callingNew)
570 : {
571 93017 : }
572 :
573 1766 : JSC::ExecutablePool *poolForSize(LinkerHelper &linker, CallICInfo::PoolIndex index)
574 : {
575 1766 : JSC::ExecutablePool *ep = linker.init(f.cx);
576 1766 : if (!ep)
577 0 : return NULL;
578 1766 : JS_ASSERT(!ic.pools[index]);
579 1766 : ic.pools[index] = ep;
580 1766 : return ep;
581 : }
582 :
583 209 : void disable()
584 : {
585 209 : JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
586 418 : Repatcher repatch(f.chunk());
587 : JSC::FunctionPtr fptr = callingNew
588 : ? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
589 209 : : JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
590 209 : repatch.relink(oolCall, fptr);
591 209 : }
592 :
593 1278 : bool generateFullCallStub(JSScript *script, uint32_t flags)
594 : {
595 : /*
596 : * Create a stub that works with arity mismatches. Like the fast-path,
597 : * this allocates a frame on the caller side, but also performs extra
598 : * checks for compilability. Perhaps this should be a separate, shared
599 : * trampoline, but for now we generate it dynamically.
600 : */
601 2556 : Assembler masm;
602 1278 : InlineFrameAssembler inlFrame(masm, ic, flags);
603 1278 : RegisterID t0 = inlFrame.tempRegs.takeAnyReg().reg();
604 :
605 : /* Generate the inline frame creation. */
606 1278 : void *ncode = ic.funGuard.labelAtOffset(ic.joinPointOffset).executableAddress();
607 1278 : inlFrame.assemble(ncode, f.pc());
608 :
609 : /* funObjReg is still valid. Check if a compilation is needed. */
610 1278 : Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript());
611 1278 : masm.loadPtr(scriptAddr, t0);
612 :
613 : // Test that:
614 : // - script->jitHandle{Ctor,Normal}->value is neither NULL nor UNJITTABLE, and
615 : // - script->jitHandle{Ctor,Normal}->value->arityCheckEntry is not NULL.
616 : //
617 1278 : size_t offset = JSScript::jitHandleOffset(callingNew);
618 1278 : masm.loadPtr(Address(t0, offset), t0);
619 : Jump hasNoJitCode = masm.branchPtr(Assembler::BelowOrEqual, t0,
620 1278 : ImmPtr(JSScript::JITScriptHandle::UNJITTABLE));
621 :
622 1278 : masm.loadPtr(Address(t0, offsetof(JITScript, arityCheckEntry)), t0);
623 :
624 1278 : Jump hasCode = masm.branchPtr(Assembler::NotEqual, t0, ImmPtr(0));
625 :
626 1278 : hasNoJitCode.linkTo(masm.label(), &masm);
627 :
628 : /*
629 : * Write the rejoin state to indicate this is a compilation call made
630 : * from an IC (the recompiler cannot detect calls made from ICs
631 : * automatically).
632 : */
633 1278 : masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)),
634 2556 : FrameAddress(offsetof(VMFrame, stubRejoin)));
635 :
636 1278 : masm.bumpStubCount(f.script(), f.pc(), Registers::tempCallReg());
637 :
638 : /* Try and compile. On success we get back the nmap pointer. */
639 1278 : void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
640 1278 : DataLabelPtr inlined;
641 1278 : if (ic.frameSize.isStatic()) {
642 957 : masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
643 957 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
644 1914 : compilePtr, f.regs.pc, &inlined, ic.frameSize.staticLocalSlots());
645 : } else {
646 321 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), Registers::ArgReg1);
647 321 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
648 642 : compilePtr, f.regs.pc, &inlined, -1);
649 : }
650 :
651 : Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
652 1278 : Registers::ReturnReg);
653 1278 : masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
654 :
655 : /* Compute the value of ncode to use at this call site. */
656 1278 : ncode = (uint8_t *) f.chunk()->code.m_code.executableAddress() + ic.call->codeOffset;
657 1278 : masm.storePtr(ImmPtr(ncode), Address(JSFrameReg, StackFrame::offsetOfNcode()));
658 :
659 1278 : masm.jump(Registers::ReturnReg);
660 :
661 1278 : hasCode.linkTo(masm.label(), &masm);
662 :
663 : /* Get nmap[ARITY], set argc, call. */
664 1278 : if (ic.frameSize.isStatic())
665 957 : masm.move(Imm32(ic.frameSize.staticArgc()), JSParamReg_Argc);
666 : else
667 321 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
668 1278 : masm.jump(t0);
669 :
670 2556 : LinkerHelper linker(masm, JSC::METHOD_CODE);
671 1278 : JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ScriptStub);
672 1278 : if (!ep)
673 0 : return false;
674 :
675 1278 : if (!linker.verifyRange(f.chunk())) {
676 0 : disable();
677 0 : return true;
678 : }
679 :
680 1278 : linker.link(notCompiled, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
681 1278 : JSC::CodeLocationLabel cs = linker.finalize(f);
682 :
683 : JaegerSpew(JSpew_PICs, "generated CALL stub %p (%lu bytes)\n", cs.executableAddress(),
684 1278 : (unsigned long) masm.size());
685 :
686 1278 : if (f.regs.inlined()) {
687 164 : JSC::LinkBuffer code((uint8_t *) cs.executableAddress(), masm.size(), JSC::METHOD_CODE);
688 82 : code.patch(inlined, f.regs.inlined());
689 : }
690 :
691 2556 : Repatcher repatch(f.chunk());
692 1278 : JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
693 1278 : repatch.relink(oolJump, cs);
694 :
695 1278 : return true;
696 : }
697 :
698 7386 : bool patchInlinePath(JSScript *script, JSObject *obj)
699 : {
700 7386 : JS_ASSERT(ic.frameSize.isStatic());
701 7386 : JITScript *jit = script->getJIT(callingNew);
702 :
703 : /* Very fast path. */
704 14772 : Repatcher repatch(f.chunk());
705 :
706 : /*
707 : * Use the arguments check entry if this is a monitored call, we might
708 : * not have accounted for all possible argument types.
709 : */
710 7386 : void *entry = ic.typeMonitored ? jit->argsCheckEntry : jit->fastEntry;
711 :
712 14772 : if (!repatch.canRelink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
713 14772 : JSC::CodeLocationLabel(entry))) {
714 0 : return false;
715 : }
716 :
717 7386 : ic.fastGuardedObject = obj;
718 7386 : JS_APPEND_LINK(&ic.links, &jit->callers);
719 :
720 7386 : repatch.repatch(ic.funGuard, obj);
721 7386 : repatch.relink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
722 14772 : JSC::CodeLocationLabel(entry));
723 :
724 : JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n",
725 : ic.funGuard.executableAddress(),
726 7386 : static_cast<void*>(ic.fastGuardedObject));
727 :
728 7386 : return true;
729 : }
730 :
731 488 : bool generateStubForClosures(JSObject *obj)
732 : {
733 488 : JS_ASSERT(ic.frameSize.isStatic());
734 :
735 : /* Slightly less fast path - guard on fun->script() instead. */
736 976 : Assembler masm;
737 :
738 488 : Registers tempRegs(Registers::AvailRegs);
739 488 : tempRegs.takeReg(ic.funObjReg);
740 :
741 488 : RegisterID t0 = tempRegs.takeAnyReg().reg();
742 :
743 : /* Guard that it's actually a function object. */
744 488 : Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, t0, &FunctionClass);
745 :
746 : /* Guard that it's the same script. */
747 488 : Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript());
748 : Jump funGuard = masm.branchPtr(Assembler::NotEqual, scriptAddr,
749 488 : ImmPtr(obj->toFunction()->script()));
750 488 : Jump done = masm.jump();
751 :
752 976 : LinkerHelper linker(masm, JSC::METHOD_CODE);
753 488 : JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ClosureStub);
754 488 : if (!ep)
755 0 : return false;
756 :
757 488 : ic.hasJsFunCheck = true;
758 :
759 488 : if (!linker.verifyRange(f.chunk())) {
760 0 : disable();
761 0 : return true;
762 : }
763 :
764 488 : linker.link(claspGuard, ic.slowPathStart);
765 488 : linker.link(funGuard, ic.slowPathStart);
766 488 : linker.link(done, ic.funGuard.labelAtOffset(ic.hotPathOffset));
767 488 : JSC::CodeLocationLabel cs = linker.finalize(f);
768 :
769 : JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%lu bytes)\n",
770 488 : cs.executableAddress(), (unsigned long) masm.size());
771 :
772 976 : Repatcher repatch(f.chunk());
773 488 : repatch.relink(ic.funJump, cs);
774 :
775 488 : return true;
776 : }
777 :
778 60489 : bool generateNativeStub()
779 : {
780 : /* Snapshot the frameDepth before SplatApplyArgs modifies it. */
781 60489 : unsigned initialFrameDepth = f.regs.sp - f.fp()->slots();
782 :
783 : /*
784 : * SplatApplyArgs has not been called, so we call it here before
785 : * potentially touching f.u.call.dynamicArgc.
786 : */
787 : CallArgs args;
788 60489 : if (ic.frameSize.isStatic()) {
789 54494 : JS_ASSERT(f.regs.sp - f.fp()->slots() == (int)ic.frameSize.staticLocalSlots());
790 54494 : args = CallArgsFromSp(ic.frameSize.staticArgc(), f.regs.sp);
791 : } else {
792 5995 : JS_ASSERT(!f.regs.inlined());
793 5995 : JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2);
794 5995 : if (!ic::SplatApplyArgs(f)) /* updates regs.sp */
795 0 : THROWV(true);
796 5995 : args = CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp);
797 : }
798 :
799 : JSFunction *fun;
800 60489 : if (!IsFunctionObject(args.calleev(), &fun))
801 3225 : return false;
802 :
803 57264 : if ((!callingNew && !fun->isNative()) || (callingNew && !fun->isNativeConstructor()))
804 3 : return false;
805 :
806 57261 : if (callingNew)
807 2035 : args.thisv().setMagic(JS_IS_CONSTRUCTING);
808 :
809 57261 : RecompilationMonitor monitor(cx);
810 :
811 57261 : if (!CallJSNative(cx, fun->native(), args))
812 2847 : THROWV(true);
813 :
814 54414 : types::TypeScript::Monitor(f.cx, f.script(), f.pc(), args.rval());
815 :
816 : /*
817 : * Native stubs are not generated for inline frames. The overhead of
818 : * bailing out from the IC is far greater than the time saved by
819 : * inlining the parent frame in the first place, so mark the immediate
820 : * caller as uninlineable.
821 : */
822 54414 : if (f.script()->function()) {
823 46032 : f.script()->uninlineable = true;
824 46032 : MarkTypeObjectFlags(cx, f.script()->function(), types::OBJECT_FLAG_UNINLINEABLE);
825 : }
826 :
827 : /* Don't touch the IC if the call triggered a recompilation. */
828 54414 : if (monitor.recompiled())
829 3563 : return true;
830 :
831 50851 : JS_ASSERT(!f.regs.inlined());
832 :
833 : /* Right now, take slow-path for IC misses or multiple stubs. */
834 50851 : if (ic.fastGuardedNative || ic.hasJsFunCheck)
835 8277 : return true;
836 :
837 : /* Native MIC needs to warm up first. */
838 42574 : if (!ic.hit) {
839 25495 : ic.hit = true;
840 25495 : return true;
841 : }
842 :
843 : /* Generate fast-path for calling this native. */
844 34158 : Assembler masm;
845 :
846 : /* Guard on the function object identity, for now. */
847 17079 : Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(fun));
848 :
849 : /*
850 : * Write the rejoin state for the recompiler to use if this call
851 : * triggers recompilation. Natives use a different stack address to
852 : * store the return value than FASTCALLs, and without additional
853 : * information we cannot tell which one is active on a VMFrame.
854 : */
855 17079 : masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), true)),
856 34158 : FrameAddress(offsetof(VMFrame, stubRejoin)));
857 :
858 : /* N.B. After this call, the frame will have a dynamic frame size. */
859 17079 : if (ic.frameSize.isDynamic()) {
860 170 : masm.bumpStubCount(f.script(), f.pc(), Registers::tempCallReg());
861 170 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
862 : JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs),
863 340 : f.regs.pc, NULL, initialFrameDepth);
864 : }
865 :
866 17079 : Registers tempRegs = Registers::tempCallRegMask();
867 17079 : RegisterID t0 = tempRegs.takeAnyReg().reg();
868 17079 : masm.bumpStubCount(f.script(), f.pc(), t0);
869 :
870 17079 : int32_t storeFrameDepth = ic.frameSize.isStatic() ? initialFrameDepth : -1;
871 17079 : masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, storeFrameDepth);
872 :
873 : /* Grab cx. */
874 : #ifdef JS_CPU_X86
875 17079 : RegisterID cxReg = tempRegs.takeAnyReg().reg();
876 : #else
877 : RegisterID cxReg = Registers::ArgReg0;
878 : #endif
879 17079 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
880 :
881 : /*
882 : * Compute vp. This will always be at the same offset from fp for a
883 : * given callsite, regardless of any dynamically computed argc,
884 : * so get that offset from the active call.
885 : */
886 : #ifdef JS_CPU_X86
887 17079 : RegisterID vpReg = t0;
888 : #else
889 : RegisterID vpReg = Registers::ArgReg2;
890 : #endif
891 17079 : uint32_t vpOffset = (uint32_t) ((char *) args.base() - (char *) f.fp());
892 17079 : masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
893 :
894 : /* Compute argc. */
895 17079 : MaybeRegisterID argcReg;
896 17079 : if (!ic.frameSize.isStatic()) {
897 170 : argcReg = tempRegs.takeAnyReg().reg();
898 170 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), argcReg.reg());
899 : }
900 :
901 : /* Mark vp[1] as magic for |new|. */
902 17079 : if (callingNew)
903 323 : masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(vpReg, sizeof(Value)));
904 :
905 17079 : masm.restoreStackBase();
906 17079 : masm.setupABICall(Registers::NormalCall, 3);
907 17079 : masm.storeArg(2, vpReg);
908 17079 : if (ic.frameSize.isStatic())
909 16909 : masm.storeArg(1, ImmIntPtr(intptr_t(ic.frameSize.staticArgc())));
910 : else
911 170 : masm.storeArg(1, argcReg.reg());
912 17079 : masm.storeArg(0, cxReg);
913 :
914 17079 : js::Native native = fun->native();
915 :
916 : /*
917 : * Call RegExp.test instead of exec if the result will not be used or
918 : * will only be used to test for existence. Note that this will not
919 : * break inferred types for the call's result and any subsequent test,
920 : * as RegExp.exec has a type handler with unknown result.
921 : */
922 17079 : if (native == regexp_exec && !CallResultEscapes(f.pc()))
923 4375 : native = regexp_test;
924 :
925 17079 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, native), false);
926 :
927 17079 : NativeStubLinker::FinalJump done;
928 17079 : if (!NativeStubEpilogue(f, masm, &done, initialFrameDepth, vpOffset, MaybeRegisterID(), MaybeRegisterID()))
929 0 : return false;
930 34158 : NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
931 17079 : if (!linker.init(f.cx))
932 0 : THROWV(true);
933 :
934 17079 : if (!linker.verifyRange(f.chunk())) {
935 0 : disable();
936 0 : return true;
937 : }
938 :
939 17079 : linker.patchJump(ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
940 :
941 17079 : ic.fastGuardedNative = fun;
942 :
943 17079 : linker.link(funGuard, ic.slowPathStart);
944 17079 : JSC::CodeLocationLabel start = linker.finalize(f);
945 :
946 : JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%lu bytes)\n",
947 17079 : start.executableAddress(), (unsigned long) masm.size());
948 :
949 34158 : Repatcher repatch(f.chunk());
950 17079 : repatch.relink(ic.funJump, start);
951 :
952 17079 : return true;
953 : }
954 :
955 32528 : void *update()
956 : {
957 32528 : RecompilationMonitor monitor(cx);
958 :
959 32528 : bool lowered = ic.frameSize.lowered(f.pc());
960 32528 : JS_ASSERT_IF(lowered, !callingNew);
961 :
962 : stubs::UncachedCallResult ucr;
963 32528 : if (callingNew)
964 3815 : stubs::UncachedNewHelper(f, ic.frameSize.staticArgc(), &ucr);
965 : else
966 28713 : stubs::UncachedCallHelper(f, ic.frameSize.getArgc(f), lowered, &ucr);
967 :
968 : // Watch out in case the IC was invalidated by a recompilation on the calling
969 : // script. This can happen either if the callee is executed or if it compiles
970 : // and the compilation has a static overflow.
971 32528 : if (monitor.recompiled())
972 648 : return ucr.codeAddr;
973 :
974 : // If the function cannot be jitted (generally unjittable or empty script),
975 : // patch this site to go to a slow path always.
976 31880 : if (!ucr.codeAddr) {
977 9105 : if (ucr.unjittable)
978 209 : disable();
979 9105 : return NULL;
980 : }
981 :
982 22775 : JSFunction *fun = ucr.fun;
983 22775 : JS_ASSERT(fun);
984 22775 : JSScript *script = fun->script();
985 22775 : JS_ASSERT(script);
986 :
987 22775 : uint32_t flags = callingNew ? StackFrame::CONSTRUCTING : 0;
988 :
989 22775 : if (!ic.hit) {
990 13623 : ic.hit = true;
991 13623 : return ucr.codeAddr;
992 : }
993 :
994 9152 : if (!ic.frameSize.isStatic() || ic.frameSize.staticArgc() != fun->nargs) {
995 1024 : if (!generateFullCallStub(script, flags))
996 0 : THROWV(NULL);
997 : } else {
998 8128 : if (!ic.fastGuardedObject && patchInlinePath(script, fun)) {
999 : // Nothing, done.
1000 2963 : } else if (ic.fastGuardedObject &&
1001 742 : !ic.hasJsFunCheck &&
1002 740 : !ic.fastGuardedNative &&
1003 739 : ic.fastGuardedObject->toFunction()->script() == fun->script()) {
1004 : /*
1005 : * Note: Multiple "function guard" stubs are not yet
1006 : * supported, thus the fastGuardedNative check.
1007 : */
1008 488 : if (!generateStubForClosures(fun))
1009 0 : THROWV(NULL);
1010 : } else {
1011 254 : if (!generateFullCallStub(script, flags))
1012 0 : THROWV(NULL);
1013 : }
1014 : }
1015 :
1016 9152 : return ucr.codeAddr;
1017 : }
1018 : };
1019 :
1020 : } // namespace mjit
1021 : } // namespace js
1022 :
1023 : void * JS_FASTCALL
1024 28713 : ic::Call(VMFrame &f, CallICInfo *ic)
1025 : {
1026 28713 : CallCompiler cc(f, *ic, false);
1027 28713 : return cc.update();
1028 : }
1029 :
1030 : void * JS_FASTCALL
1031 3815 : ic::New(VMFrame &f, CallICInfo *ic)
1032 : {
1033 3815 : CallCompiler cc(f, *ic, true);
1034 3815 : return cc.update();
1035 : }
1036 :
1037 : void * JS_FASTCALL
1038 58430 : ic::NativeCall(VMFrame &f, CallICInfo *ic)
1039 : {
1040 58430 : CallCompiler cc(f, *ic, false);
1041 58430 : if (!cc.generateNativeStub())
1042 3204 : stubs::SlowCall(f, ic->frameSize.getArgc(f));
1043 58430 : return NULL;
1044 : }
1045 :
1046 : void * JS_FASTCALL
1047 2059 : ic::NativeNew(VMFrame &f, CallICInfo *ic)
1048 : {
1049 2059 : CallCompiler cc(f, *ic, true);
1050 2059 : if (!cc.generateNativeStub())
1051 24 : stubs::SlowNew(f, ic->frameSize.staticArgc());
1052 2059 : return NULL;
1053 : }
1054 :
1055 : static JS_ALWAYS_INLINE bool
1056 297533 : BumpStack(VMFrame &f, unsigned inc)
1057 : {
1058 297533 : if (f.regs.sp + inc < f.stackLimit)
1059 297533 : return true;
1060 0 : return f.cx->stack.space().tryBumpLimit(f.cx, f.regs.sp, inc, &f.stackLimit);
1061 : }
1062 :
1063 : /*
1064 : * SplatApplyArgs is only called for expressions of the form |f.apply(x, y)|.
1065 : * Additionally, the callee has already been checked to be the native apply.
1066 : * All successful paths through SplatApplyArgs must set f.u.call.dynamicArgc
1067 : * and f.regs.sp.
1068 : */
1069 : JSBool JS_FASTCALL
1070 357806 : ic::SplatApplyArgs(VMFrame &f)
1071 : {
1072 357806 : JSContext *cx = f.cx;
1073 357806 : JS_ASSERT(!f.regs.inlined());
1074 357806 : JS_ASSERT(GET_ARGC(f.regs.pc) == 2);
1075 :
1076 : /*
1077 : * The lazyArgsObj flag indicates an optimized call |f.apply(x, arguments)|
1078 : * where the args obj has not been created or pushed on the stack. Thus,
1079 : * if lazyArgsObj is set, the stack for |f.apply(x, arguments)| is:
1080 : *
1081 : * | Function.prototype.apply | f | x |
1082 : *
1083 : * Otherwise, if !lazyArgsObj, the stack is a normal 2-argument apply:
1084 : *
1085 : * | Function.prototype.apply | f | x | arguments |
1086 : */
1087 357806 : if (f.u.call.lazyArgsObj) {
1088 : /* Mirror isMagic(JS_OPTIMIZED_ARGUMENTS) case in js_fun_apply. */
1089 : /* Steps 4-6. */
1090 281229 : unsigned length = f.regs.fp()->numActualArgs();
1091 281229 : JS_ASSERT(length <= StackSpace::ARGS_LENGTH_MAX);
1092 :
1093 281229 : if (!BumpStack(f, length))
1094 0 : THROWV(false);
1095 :
1096 : /* Steps 7-8. */
1097 281229 : f.regs.fp()->forEachCanonicalActualArg(CopyTo(f.regs.sp));
1098 :
1099 281229 : f.regs.sp += length;
1100 281229 : f.u.call.dynamicArgc = length;
1101 281229 : return true;
1102 : }
1103 :
1104 76577 : Value *vp = f.regs.sp - 4;
1105 76577 : JS_ASSERT(JS_CALLEE(cx, vp).toObject().toFunction()->native() == js_fun_apply);
1106 :
1107 : /*
1108 : * This stub should mimic the steps taken by js_fun_apply. Step 1 and part
1109 : * of Step 2 have already been taken care of by calling jit code.
1110 : */
1111 :
1112 : /* Step 2 (part 2). */
1113 76577 : if (vp[3].isNullOrUndefined()) {
1114 1 : f.regs.sp--;
1115 1 : f.u.call.dynamicArgc = 0;
1116 1 : return true;
1117 : }
1118 :
1119 : /* Step 3. */
1120 76576 : if (!vp[3].isObject()) {
1121 0 : JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_APPLY_ARGS, js_apply_str);
1122 0 : THROWV(false);
1123 : }
1124 :
1125 : /* Steps 4-5. */
1126 76576 : JSObject *aobj = &vp[3].toObject();
1127 : uint32_t length;
1128 76576 : if (!js_GetLengthProperty(cx, aobj, &length))
1129 0 : THROWV(false);
1130 :
1131 : /* Step 6. */
1132 76576 : if (length > StackSpace::ARGS_LENGTH_MAX) {
1133 : JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1134 5 : JSMSG_TOO_MANY_FUN_APPLY_ARGS);
1135 5 : THROWV(false);
1136 : }
1137 :
1138 76571 : int delta = length - 1;
1139 76571 : if (delta > 0 && !BumpStack(f, delta))
1140 0 : THROWV(false);
1141 76571 : f.regs.sp += delta;
1142 :
1143 : /* Steps 7-8. */
1144 76571 : if (!GetElements(cx, aobj, length, f.regs.sp - length))
1145 0 : THROWV(false);
1146 :
1147 76571 : f.u.call.dynamicArgc = length;
1148 76571 : return true;
1149 : }
1150 :
1151 : void
1152 893 : ic::GenerateArgumentCheckStub(VMFrame &f)
1153 : {
1154 893 : JS_ASSERT(f.cx->typeInferenceEnabled());
1155 :
1156 893 : JITScript *jit = f.jit();
1157 893 : StackFrame *fp = f.fp();
1158 893 : JSFunction *fun = fp->fun();
1159 893 : JSScript *script = fun->script();
1160 :
1161 893 : if (jit->argsCheckPool)
1162 314 : jit->resetArgsCheck();
1163 :
1164 1786 : Assembler masm;
1165 1786 : Vector<Jump> mismatches(f.cx);
1166 :
1167 893 : if (!f.fp()->isConstructing()) {
1168 875 : types::TypeSet *types = types::TypeScript::ThisTypes(script);
1169 875 : Address address(JSFrameReg, StackFrame::offsetOfThis(fun));
1170 875 : if (!masm.generateTypeCheck(f.cx, address, types, &mismatches))
1171 : return;
1172 : }
1173 :
1174 2261 : for (unsigned i = 0; i < fun->nargs; i++) {
1175 1368 : types::TypeSet *types = types::TypeScript::ArgTypes(script, i);
1176 1368 : Address address(JSFrameReg, StackFrame::offsetOfFormalArg(fun, i));
1177 1368 : if (!masm.generateTypeCheck(f.cx, address, types, &mismatches))
1178 : return;
1179 : }
1180 :
1181 893 : Jump done = masm.jump();
1182 :
1183 1786 : LinkerHelper linker(masm, JSC::METHOD_CODE);
1184 893 : JSC::ExecutablePool *ep = linker.init(f.cx);
1185 893 : if (!ep)
1186 : return;
1187 893 : jit->argsCheckPool = ep;
1188 :
1189 893 : if (!linker.verifyRange(f.chunk())) {
1190 0 : jit->resetArgsCheck();
1191 : return;
1192 : }
1193 :
1194 3785 : for (unsigned i = 0; i < mismatches.length(); i++)
1195 2892 : linker.link(mismatches[i], jit->argsCheckStub);
1196 893 : linker.link(done, jit->argsCheckFallthrough);
1197 :
1198 893 : JSC::CodeLocationLabel cs = linker.finalize(f);
1199 :
1200 : JaegerSpew(JSpew_PICs, "generated ARGS CHECK stub %p (%lu bytes)\n",
1201 893 : cs.executableAddress(), (unsigned long)masm.size());
1202 :
1203 2679 : Repatcher repatch(f.chunk());
1204 893 : repatch.relink(jit->argsCheckJump, cs);
1205 : }
1206 :
1207 : void
1208 314 : JITScript::resetArgsCheck()
1209 : {
1210 314 : argsCheckPool->release();
1211 314 : argsCheckPool = NULL;
1212 :
1213 628 : Repatcher repatch(chunk(script->code));
1214 314 : repatch.relink(argsCheckJump, argsCheckStub);
1215 314 : }
1216 :
1217 : #endif /* JS_MONOIC */
1218 :
|