1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Mandelin <dmandelin@mozilla.com>
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either of the GNU General Public License Version 2 or later (the "GPL"),
28 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 : #include "PolyIC.h"
40 : #include "StubCalls.h"
41 : #include "CodeGenIncludes.h"
42 : #include "StubCalls-inl.h"
43 : #include "BaseCompiler.h"
44 : #include "assembler/assembler/LinkBuffer.h"
45 : #include "TypedArrayIC.h"
46 : #include "jsscope.h"
47 : #include "jsnum.h"
48 : #include "jstypedarray.h"
49 : #include "jsatominlines.h"
50 : #include "jsobjinlines.h"
51 : #include "jsscopeinlines.h"
52 : #include "jsinterpinlines.h"
53 : #include "jsautooplen.h"
54 :
55 : #include "vm/ScopeObject-inl.h"
56 : #include "vm/StringObject-inl.h"
57 :
58 : #if defined JS_POLYIC
59 :
60 : using namespace js;
61 : using namespace js::mjit;
62 : using namespace js::mjit::ic;
63 :
64 : typedef JSC::FunctionPtr FunctionPtr;
65 : typedef JSC::MacroAssembler::RegisterID RegisterID;
66 : typedef JSC::MacroAssembler::Jump Jump;
67 : typedef JSC::MacroAssembler::Imm32 Imm32;
68 :
69 : /* Rough over-estimate of how much memory we need to unprotect. */
70 : static const uint32_t INLINE_PATH_LENGTH = 64;
71 :
72 : // Helper class to simplify LinkBuffer usage in PIC stub generators.
73 : // This guarantees correct OOM and refcount handling for buffers while they
74 : // are instantiated and rooted.
75 : class PICLinker : public LinkerHelper
76 47565 : {
77 : ic::BasePolyIC ⁣
78 :
79 : public:
80 47565 : PICLinker(Assembler &masm, ic::BasePolyIC &ic)
81 47565 : : LinkerHelper(masm, JSC::METHOD_CODE), ic(ic)
82 47565 : { }
83 :
84 47565 : bool init(JSContext *cx) {
85 47565 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
86 47565 : if (!pool)
87 0 : return false;
88 47565 : if (!ic.addPool(cx, pool)) {
89 0 : pool->release();
90 0 : js_ReportOutOfMemory(cx);
91 0 : return false;
92 : }
93 47565 : return true;
94 : }
95 : };
96 :
97 : class PICStubCompiler : public BaseCompiler
98 : {
99 : protected:
100 : const char *type;
101 : VMFrame &f;
102 : JSScript *script;
103 : ic::PICInfo &pic;
104 : void *stub;
105 : uint64_t gcNumber;
106 :
107 : public:
108 : bool canCallHook;
109 :
110 62683 : PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
111 : : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub),
112 62683 : gcNumber(f.cx->runtime->gcNumber), canCallHook(pic.canCallHook)
113 62683 : { }
114 :
115 0 : LookupStatus error() {
116 : /*
117 : * N.B. Do not try to disable the IC, we do not want to guard on
118 : * whether the IC has been recompiled when propagating errors.
119 : */
120 0 : return Lookup_Error;
121 : }
122 :
123 0 : LookupStatus error(JSContext *cx) {
124 0 : return error();
125 : }
126 :
127 9849 : LookupStatus disable(const char *reason) {
128 9849 : return disable(f.cx, reason);
129 : }
130 :
131 9929 : LookupStatus disable(JSContext *cx, const char *reason) {
132 9929 : return pic.disable(f, reason, stub);
133 : }
134 :
135 2547 : LookupStatus disable(VMFrame &f, const char *reason) {
136 2547 : return pic.disable(f, reason, stub);
137 : }
138 :
139 27090 : bool hadGC() {
140 27090 : return gcNumber != f.cx->runtime->gcNumber;
141 : }
142 :
143 : protected:
144 30465 : void spew(const char *event, const char *op) {
145 : #ifdef JS_METHODJIT_SPEW
146 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
147 30465 : type, event, op, script->filename, CurrentLine(cx));
148 : #endif
149 30465 : }
150 : };
151 :
152 : static bool
153 9237 : GeneratePrototypeGuards(JSContext *cx, Vector<JSC::MacroAssembler::Jump,8> &mismatches, Assembler &masm,
154 : JSObject *obj, JSObject *holder,
155 : JSC::MacroAssembler::RegisterID objReg,
156 : JSC::MacroAssembler::RegisterID scratchReg)
157 : {
158 : typedef JSC::MacroAssembler::Address Address;
159 : typedef JSC::MacroAssembler::AbsoluteAddress AbsoluteAddress;
160 : typedef JSC::MacroAssembler::ImmPtr ImmPtr;
161 : typedef JSC::MacroAssembler::Jump Jump;
162 :
163 9237 : if (obj->hasUncacheableProto()) {
164 4 : masm.loadPtr(Address(objReg, JSObject::offsetOfType()), scratchReg);
165 : Jump j = masm.branchPtr(Assembler::NotEqual,
166 : Address(scratchReg, offsetof(types::TypeObject, proto)),
167 4 : ImmPtr(obj->getProto()));
168 4 : if (!mismatches.append(j))
169 0 : return false;
170 : }
171 :
172 9237 : JSObject *pobj = obj->getProto();
173 22604 : while (pobj != holder) {
174 4130 : if (pobj->hasUncacheableProto()) {
175 1231 : Jump j;
176 1231 : if (pobj->hasSingletonType()) {
177 0 : types::TypeObject *type = pobj->getType(cx);
178 : j = masm.branchPtr(Assembler::NotEqual,
179 : AbsoluteAddress(&type->proto),
180 0 : ImmPtr(pobj->getProto()),
181 0 : scratchReg);
182 : } else {
183 : j = masm.branchPtr(Assembler::NotEqual,
184 1231 : AbsoluteAddress(pobj->addressOfType()),
185 1231 : ImmPtr(pobj->type()),
186 2462 : scratchReg);
187 : }
188 1231 : if (!mismatches.append(j))
189 0 : return false;
190 : }
191 4130 : pobj = pobj->getProto();
192 : }
193 :
194 9237 : return true;
195 : }
196 :
197 : class SetPropCompiler : public PICStubCompiler
198 : {
199 : JSObject *obj;
200 : PropertyName *name;
201 : int lastStubSecondShapeGuard;
202 :
203 : public:
204 4900 : SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
205 : VoidStubPIC stub)
206 : : PICStubCompiler("setprop", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
207 4900 : obj(obj), name(name), lastStubSecondShapeGuard(pic.secondShapeGuard)
208 4900 : { }
209 :
210 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
211 : {
212 : SetPropLabels &labels = pic.setPropLabels();
213 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr));
214 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
215 : NULL);
216 : repatcher.relink(labels.getInlineShapeJump(pic.fastPathStart.labelAtOffset(pic.shapeGuard)),
217 : pic.slowPathStart);
218 :
219 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
220 : repatcher.relink(pic.slowPathCall, target);
221 : }
222 :
223 1485 : LookupStatus patchInline(const Shape *shape)
224 : {
225 1485 : JS_ASSERT(!pic.inlinePathPatched);
226 1485 : JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
227 :
228 2970 : Repatcher repatcher(f.chunk());
229 1485 : SetPropLabels &labels = pic.setPropLabels();
230 :
231 : int32_t offset;
232 1485 : if (obj->isFixedSlot(shape->slot())) {
233 1069 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr);
234 1069 : repatcher.repatchLoadPtrToLEA(istr);
235 :
236 : //
237 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
238 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
239 : //
240 : // Because the offset is wrong, it's necessary to correct it
241 : // below.
242 : //
243 1069 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
244 1069 : int32_t(JSObject::offsetOfSlots());
245 1069 : JS_ASSERT(diff != 0);
246 1069 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
247 : } else {
248 416 : offset = obj->dynamicSlotIndex(shape->slot()) * sizeof(Value);
249 : }
250 :
251 1485 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
252 2970 : obj->lastProperty());
253 1485 : repatcher.patchAddressOffsetForValueStore(labels.getInlineValueStore(pic.fastPathRejoin),
254 2970 : offset, pic.u.vr.isTypeKnown());
255 :
256 1485 : pic.inlinePathPatched = true;
257 :
258 1485 : return Lookup_Cacheable;
259 : }
260 :
261 2582 : int getLastStubSecondShapeGuard() const {
262 2582 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
263 : }
264 :
265 2582 : void patchPreviousToHere(CodeLocationLabel cs)
266 : {
267 5164 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
268 2582 : CodeLocationLabel label = pic.lastPathStart();
269 :
270 : // Patch either the inline fast path or a generated stub. The stub
271 : // omits the prefix of the inline fast path that loads the shape, so
272 : // the offsets are different.
273 2582 : if (pic.stubsGenerated) {
274 499 : repatcher.relink(pic.setPropLabels().getStubShapeJump(label), cs);
275 : } else {
276 2083 : CodeLocationLabel shapeGuard = label.labelAtOffset(pic.shapeGuard);
277 2083 : repatcher.relink(pic.setPropLabels().getInlineShapeJump(shapeGuard), cs);
278 : }
279 2582 : if (int secondGuardOffset = getLastStubSecondShapeGuard())
280 294 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
281 2582 : }
282 :
283 2582 : LookupStatus generateStub(const Shape *initialShape, const Shape *shape, bool adding)
284 : {
285 2582 : if (hadGC())
286 0 : return Lookup_Uncacheable;
287 :
288 : /* Exits to the slow path. */
289 5164 : Vector<Jump, 8> slowExits(cx);
290 5164 : Vector<Jump, 8> otherGuards(cx);
291 :
292 5164 : Assembler masm;
293 :
294 : // Shape guard.
295 2582 : if (pic.shapeNeedsRemat()) {
296 230 : masm.loadShape(pic.objReg, pic.shapeReg);
297 230 : pic.shapeRegHasBaseShape = true;
298 : }
299 :
300 2582 : Label start = masm.label();
301 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
302 2582 : ImmPtr(initialShape));
303 :
304 2582 : Label stubShapeJumpLabel = masm.label();
305 :
306 2582 : pic.setPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
307 :
308 2582 : if (pic.typeMonitored) {
309 : /*
310 : * Inference does not know the type of the object being updated,
311 : * and we need to make sure that the updateMonitoredTypes() call
312 : * covers this stub, i.e. we will be writing to an object with the
313 : * same type. Add a type guard in addition to the shape guard.
314 : * Note: it is possible that this test gets a spurious hit if the
315 : * object has a lazy type, but in such cases no analyzed scripts
316 : * depend on the object and we will reconstruct its type from the
317 : * value being written here.
318 : */
319 : Jump typeGuard = masm.branchPtr(Assembler::NotEqual,
320 662 : Address(pic.objReg, JSObject::offsetOfType()),
321 1324 : ImmPtr(obj->getType(cx)));
322 662 : if (!otherGuards.append(typeGuard))
323 0 : return error();
324 : }
325 :
326 2582 : JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
327 :
328 2582 : MaybeJump skipOver;
329 :
330 2582 : if (adding) {
331 1244 : JS_ASSERT(shape->hasSlot());
332 1244 : pic.shapeRegHasBaseShape = false;
333 :
334 1244 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, NULL,
335 1244 : pic.objReg, pic.shapeReg)) {
336 0 : return error();
337 : }
338 :
339 : /* Emit shape guards for the object's prototype chain. */
340 1244 : JSObject *proto = obj->getProto();
341 1244 : RegisterID lastReg = pic.objReg;
342 4934 : while (proto) {
343 2446 : masm.loadPtr(Address(lastReg, JSObject::offsetOfType()), pic.shapeReg);
344 2446 : masm.loadPtr(Address(pic.shapeReg, offsetof(types::TypeObject, proto)), pic.shapeReg);
345 2446 : Jump protoGuard = masm.guardShape(pic.shapeReg, proto);
346 2446 : if (!otherGuards.append(protoGuard))
347 0 : return error();
348 :
349 2446 : proto = proto->getProto();
350 2446 : lastReg = pic.shapeReg;
351 : }
352 :
353 1244 : if (obj->isFixedSlot(shape->slot())) {
354 : Address address(pic.objReg,
355 1053 : JSObject::getFixedSlotOffset(shape->slot()));
356 1053 : masm.storeValue(pic.u.vr, address);
357 : } else {
358 : /*
359 : * Note: the guard on the initial shape determines the object's
360 : * number of fixed slots and slot span, which in turn determine
361 : * the number of dynamic slots allocated for the object.
362 : * We don't need to check capacity here.
363 : */
364 191 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfSlots()), pic.shapeReg);
365 191 : Address address(pic.shapeReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
366 191 : masm.storeValue(pic.u.vr, address);
367 : }
368 :
369 1244 : JS_ASSERT(shape == obj->lastProperty());
370 1244 : JS_ASSERT(shape != initialShape);
371 :
372 : /* Write the object's new shape. */
373 1244 : masm.storePtr(ImmPtr(shape), Address(pic.objReg, JSObject::offsetOfShape()));
374 1338 : } else if (shape->hasDefaultSetter()) {
375 827 : Address address = masm.objPropAddress(obj, pic.objReg, shape->slot());
376 827 : masm.storeValue(pic.u.vr, address);
377 : } else {
378 : // \ / In general, two function objects with different JSFunctions
379 : // # can have the same shape, thus we must not rely on the identity
380 : // >--+--< of 'fun' remaining the same. However, since:
381 : // ||| 1. the shape includes all arguments and locals and their setters
382 : // \\ V and getters, and
383 : // \===/ 2. arguments and locals have different getters
384 : // then we can rely on fun->nargs remaining invariant.
385 511 : JSFunction *fun = obj->asCall().getCalleeFunction();
386 511 : uint16_t slot = uint16_t(shape->shortid());
387 :
388 : /* Guard that the call object has a frame. */
389 511 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
390 511 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
391 :
392 : {
393 511 : Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
394 31 : ? StackFrame::offsetOfFormalArg(fun, slot)
395 542 : : StackFrame::offsetOfFixed(slot));
396 511 : masm.storeValue(pic.u.vr, addr);
397 511 : skipOver = masm.jump();
398 : }
399 :
400 511 : escapedFrame.linkTo(masm.label(), &masm);
401 : {
402 511 : if (shape->setterOp() == CallObject::setVarOp)
403 480 : slot += fun->nargs;
404 :
405 511 : slot += CallObject::RESERVED_SLOTS;
406 511 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
407 :
408 511 : masm.storeValue(pic.u.vr, address);
409 : }
410 :
411 511 : pic.shapeRegHasBaseShape = false;
412 : }
413 :
414 2582 : Jump done = masm.jump();
415 :
416 : // Common all secondary guards into one big exit.
417 2582 : MaybeJump slowExit;
418 2582 : if (otherGuards.length()) {
419 5617 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
420 3814 : pj->linkTo(masm.label(), &masm);
421 1803 : slowExit = masm.jump();
422 1803 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
423 : } else {
424 779 : pic.secondShapeGuard = 0;
425 : }
426 :
427 2582 : pic.updatePCCounters(f, masm);
428 :
429 5164 : PICLinker buffer(masm, pic);
430 2582 : if (!buffer.init(cx))
431 0 : return error();
432 :
433 5164 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
434 2582 : !buffer.verifyRange(f.chunk())) {
435 0 : return disable("code memory is out of range");
436 : }
437 :
438 2582 : buffer.link(shapeGuard, pic.slowPathStart);
439 2582 : if (slowExit.isSet())
440 1803 : buffer.link(slowExit.get(), pic.slowPathStart);
441 2582 : for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
442 0 : buffer.link(*pj, pic.slowPathStart);
443 2582 : buffer.link(done, pic.fastPathRejoin);
444 2582 : if (skipOver.isSet())
445 511 : buffer.link(skipOver.get(), pic.fastPathRejoin);
446 2582 : CodeLocationLabel cs = buffer.finalize(f);
447 : JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
448 : (void*)&pic,
449 : (void*)initialShape,
450 : pic.stubsGenerated,
451 2582 : cs.executableAddress());
452 :
453 : // This function can patch either the inline fast path for a generated
454 : // stub. The stub omits the prefix of the inline fast path that loads
455 : // the shape, so the offsets are different.
456 2582 : patchPreviousToHere(cs);
457 :
458 2582 : pic.stubsGenerated++;
459 2582 : pic.updateLastPath(buffer, start);
460 :
461 2582 : if (pic.stubsGenerated == MAX_PIC_STUBS)
462 6 : disable("max stubs reached");
463 :
464 2582 : return Lookup_Cacheable;
465 : }
466 :
467 515 : bool updateMonitoredTypes()
468 : {
469 515 : JS_ASSERT(pic.typeMonitored);
470 :
471 515 : RecompilationMonitor monitor(cx);
472 515 : jsid id = ATOM_TO_JSID(name);
473 :
474 515 : if (!obj->getType(cx)->unknownProperties()) {
475 946 : types::AutoEnterTypeInference enter(cx);
476 473 : types::TypeSet *types = obj->getType(cx)->getProperty(cx, types::MakeTypeId(cx, id), true);
477 473 : if (!types)
478 0 : return false;
479 946 : pic.rhsTypes->addSubset(cx, types);
480 : }
481 :
482 515 : return !monitor.recompiled();
483 : }
484 :
485 4900 : LookupStatus update()
486 : {
487 4900 : JS_ASSERT(pic.hit);
488 :
489 4900 : if (obj->isDenseArray())
490 67 : return disable("dense array");
491 4833 : if (!obj->isNative())
492 148 : return disable("non-native");
493 4685 : if (obj->watched())
494 47 : return disable("watchpoint");
495 :
496 4638 : Class *clasp = obj->getClass();
497 :
498 4638 : if (clasp->setProperty != JS_StrictPropertyStub)
499 0 : return disable("set property hook");
500 4638 : if (clasp->ops.lookupProperty)
501 4 : return disable("ops lookup property hook");
502 4634 : if (clasp->ops.setProperty)
503 0 : return disable("ops set property hook");
504 :
505 : JSObject *holder;
506 4634 : JSProperty *prop = NULL;
507 :
508 : /* lookupProperty can trigger recompilations. */
509 4634 : RecompilationMonitor monitor(cx);
510 4634 : if (!obj->lookupProperty(cx, name, &holder, &prop))
511 0 : return error();
512 4634 : if (monitor.recompiled())
513 0 : return Lookup_Uncacheable;
514 :
515 : /* If the property exists but is on a prototype, treat as addprop. */
516 4634 : if (prop && holder != obj) {
517 795 : const Shape *shape = (const Shape *) prop;
518 :
519 795 : if (!holder->isNative())
520 0 : return disable("non-native holder");
521 :
522 795 : if (!shape->writable())
523 0 : return disable("readonly");
524 795 : if (!shape->hasDefaultSetter() || !shape->hasDefaultGetter())
525 406 : return disable("getter/setter in prototype");
526 389 : if (shape->hasShortID())
527 0 : return disable("short ID in prototype");
528 389 : if (!shape->hasSlot())
529 0 : return disable("missing slot");
530 :
531 389 : prop = NULL;
532 : }
533 :
534 4228 : if (!prop) {
535 : /* Adding a property to the object. */
536 1368 : if (obj->isDelegate())
537 8 : return disable("delegate");
538 1360 : if (!obj->isExtensible())
539 0 : return disable("not extensible");
540 :
541 1360 : if (clasp->addProperty != JS_PropertyStub)
542 4 : return disable("add property hook");
543 1356 : if (clasp->ops.defineProperty)
544 0 : return disable("ops define property hook");
545 :
546 : /*
547 : * Don't add properties for SETNAME, which requires checks in
548 : * strict mode code.
549 : */
550 1356 : if (JSOp(*f.pc()) == JSOP_SETNAME)
551 2 : return disable("add property under SETNAME");
552 :
553 : /*
554 : * When adding a property we need to check shapes along the entire
555 : * prototype chain to watch for an added setter.
556 : */
557 1354 : JSObject *proto = obj;
558 6713 : while (proto) {
559 4009 : if (!proto->isNative())
560 4 : return disable("non-native proto");
561 4005 : proto = proto->getProto();
562 : }
563 :
564 1350 : const Shape *initialShape = obj->lastProperty();
565 1350 : uint32_t slots = obj->numDynamicSlots();
566 :
567 1350 : unsigned flags = 0;
568 1350 : PropertyOp getter = clasp->getProperty;
569 :
570 : /*
571 : * Define the property but do not set it yet. For setmethod,
572 : * populate the slot to satisfy the method invariant (in case we
573 : * hit an early return below).
574 : */
575 : const Shape *shape =
576 : obj->putProperty(cx, name, getter, clasp->setProperty,
577 1350 : SHAPE_INVALID_SLOT, JSPROP_ENUMERATE, flags, 0);
578 1350 : if (!shape)
579 0 : return error();
580 :
581 1350 : if (monitor.recompiled())
582 0 : return Lookup_Uncacheable;
583 :
584 : /*
585 : * Test after calling putProperty since it can switch obj into
586 : * dictionary mode, specifically if the shape tree ancestor line
587 : * exceeds PropertyTree::MAX_HEIGHT.
588 : */
589 1350 : if (obj->inDictionaryMode())
590 2 : return disable("dictionary");
591 :
592 1348 : if (!shape->hasDefaultSetter())
593 0 : return disable("adding non-default setter");
594 1348 : if (!shape->hasSlot())
595 0 : return disable("adding invalid slot");
596 :
597 : /*
598 : * Watch for cases where the object reallocated its slots when
599 : * adding the property, and disable the PIC. Otherwise we will
600 : * keep generating identical PICs as side exits are taken on the
601 : * capacity checks. Alternatively, we could avoid the disable
602 : * and just not generate a stub in case there are multiple shapes
603 : * that can flow here which don't all require reallocation.
604 : * Doing this would cause us to walk down this same update path
605 : * every time a reallocation is needed, however, which will
606 : * usually be a slowdown even if there *are* other shapes that
607 : * don't realloc.
608 : */
609 1348 : if (obj->numDynamicSlots() != slots)
610 104 : return disable("insufficient slot capacity");
611 :
612 1244 : if (pic.typeMonitored && !updateMonitoredTypes())
613 0 : return Lookup_Uncacheable;
614 :
615 1244 : return generateStub(initialShape, shape, true);
616 : }
617 :
618 2860 : const Shape *shape = (const Shape *) prop;
619 2860 : if (!shape->writable())
620 2 : return disable("readonly");
621 2858 : if (shape->hasDefaultSetter()) {
622 2312 : if (!shape->hasSlot())
623 0 : return disable("invalid slot");
624 2312 : if (pic.typeMonitored && !updateMonitoredTypes())
625 0 : return Lookup_Uncacheable;
626 : } else {
627 546 : if (shape->hasSetterValue())
628 11 : return disable("scripted setter");
629 1039 : if (shape->setterOp() != CallObject::setArgOp &&
630 504 : shape->setterOp() != CallObject::setVarOp) {
631 24 : return disable("setter");
632 : }
633 511 : JS_ASSERT(obj->isCall());
634 511 : if (pic.typeMonitored) {
635 : /*
636 : * Update the types of the locals/args in the script according
637 : * to the possible RHS types of the assignment. Note that the
638 : * shape guards we have performed do not by themselves
639 : * guarantee that future call objects hit will be for the same
640 : * script. We also depend on the fact that the scope chains hit
641 : * at the same bytecode are all isomorphic: the same scripts,
642 : * in the same order (though the properties on their call
643 : * objects may differ due to eval(), DEFFUN, etc.).
644 : */
645 147 : RecompilationMonitor monitor(cx);
646 147 : JSFunction *fun = obj->asCall().getCalleeFunction();
647 147 : JSScript *script = fun->script();
648 147 : uint16_t slot = uint16_t(shape->shortid());
649 147 : if (!script->ensureHasTypes(cx))
650 0 : return error();
651 : {
652 294 : types::AutoEnterTypeInference enter(cx);
653 147 : if (shape->setterOp() == CallObject::setArgOp)
654 9 : pic.rhsTypes->addSubset(cx, types::TypeScript::ArgTypes(script, slot));
655 : else
656 138 : pic.rhsTypes->addSubset(cx, types::TypeScript::LocalTypes(script, slot));
657 : }
658 147 : if (monitor.recompiled())
659 0 : return Lookup_Uncacheable;
660 : }
661 : }
662 :
663 2823 : JS_ASSERT(obj == holder);
664 8613 : if (!pic.inlinePathPatched &&
665 2408 : shape->hasDefaultSetter() &&
666 1897 : !pic.typeMonitored &&
667 1485 : !obj->isDenseArray()) {
668 1485 : return patchInline(shape);
669 : }
670 :
671 1338 : return generateStub(obj->lastProperty(), shape, false);
672 : }
673 : };
674 :
675 : static bool
676 54988 : IsCacheableProtoChain(JSObject *obj, JSObject *holder)
677 : {
678 119254 : while (obj != holder) {
679 : /*
680 : * We cannot assume that we find the holder object on the prototype
681 : * chain and must check for null proto. The prototype chain can be
682 : * altered during the lookupProperty call.
683 : */
684 9300 : JSObject *proto = obj->getProto();
685 9300 : if (!proto || !proto->isNative())
686 22 : return false;
687 9278 : obj = proto;
688 : }
689 54966 : return true;
690 : }
691 :
692 : template <typename IC>
693 : struct GetPropHelper {
694 : // These fields are set in the constructor and describe a property lookup.
695 : JSContext *cx;
696 : JSObject *obj;
697 : PropertyName *name;
698 : IC ⁣
699 : VMFrame &f;
700 :
701 : // These fields are set by |bind| and |lookup|. After a call to either
702 : // function, these are set exactly as they are in JSOP_GETPROP or JSOP_NAME.
703 : JSObject *aobj;
704 : JSObject *holder;
705 : JSProperty *prop;
706 :
707 : // This field is set by |bind| and |lookup| only if they returned
708 : // Lookup_Cacheable, otherwise it is NULL.
709 : const Shape *shape;
710 :
711 55982 : GetPropHelper(JSContext *cx, JSObject *obj, PropertyName *name, IC &ic, VMFrame &f)
712 55982 : : cx(cx), obj(obj), name(name), ic(ic), f(f), holder(NULL), prop(NULL), shape(NULL)
713 55982 : { }
714 :
715 : public:
716 22938 : LookupStatus bind() {
717 22938 : RecompilationMonitor monitor(cx);
718 22938 : JSObject *scopeChain = cx->stack.currentScriptedScopeChain();
719 22938 : if (js_CodeSpec[*f.pc()].format & JOF_GNAME)
720 0 : scopeChain = &scopeChain->global();
721 22938 : if (!FindProperty(cx, name, scopeChain, &obj, &holder, &prop))
722 0 : return ic.error(cx);
723 22938 : if (monitor.recompiled())
724 2 : return Lookup_Uncacheable;
725 22936 : if (!prop)
726 76 : return ic.disable(cx, "lookup failed");
727 22860 : if (!obj->isNative())
728 0 : return ic.disable(cx, "non-native");
729 22860 : if (!IsCacheableProtoChain(obj, holder))
730 4 : return ic.disable(cx, "non-native holder");
731 22856 : shape = (const Shape *)prop;
732 22856 : return Lookup_Cacheable;
733 : }
734 :
735 33044 : LookupStatus lookup() {
736 33044 : JSObject *aobj = js_GetProtoIfDenseArray(obj);
737 33044 : if (!aobj->isNative())
738 749 : return ic.disable(f, "non-native");
739 :
740 32295 : RecompilationMonitor monitor(cx);
741 32295 : if (!aobj->lookupProperty(cx, name, &holder, &prop))
742 0 : return ic.error(cx);
743 32295 : if (monitor.recompiled())
744 0 : return Lookup_Uncacheable;
745 :
746 32295 : if (!prop)
747 167 : return ic.disable(f, "lookup failed");
748 32128 : if (!IsCacheableProtoChain(obj, holder))
749 18 : return ic.disable(f, "non-native holder");
750 32110 : shape = (const Shape *)prop;
751 32110 : return Lookup_Cacheable;
752 : }
753 :
754 47127 : LookupStatus testForGet() {
755 47127 : if (!shape->hasDefaultGetter()) {
756 1808 : if (shape->hasGetterValue())
757 1194 : return ic.disable(f, "getter value shape");
758 614 : if (shape->hasSlot() && holder != obj)
759 0 : return ic.disable(f, "slotful getter hook through prototype");
760 614 : if (!ic.canCallHook)
761 450 : return ic.disable(f, "can't call getter hook");
762 164 : if (f.regs.inlined()) {
763 : /*
764 : * As with native stubs, getter hook stubs can't be
765 : * generated for inline frames. Mark the inner function
766 : * as uninlineable and recompile.
767 : */
768 0 : f.script()->uninlineable = true;
769 0 : MarkTypeObjectFlags(cx, f.script()->function(),
770 : types::OBJECT_FLAG_UNINLINEABLE);
771 0 : return Lookup_Uncacheable;
772 : }
773 45319 : } else if (!shape->hasSlot()) {
774 29 : return ic.disable(f, "no slot");
775 : }
776 :
777 45454 : return Lookup_Cacheable;
778 : }
779 :
780 32789 : LookupStatus lookupAndTest() {
781 32789 : LookupStatus status = lookup();
782 32789 : if (status != Lookup_Cacheable)
783 934 : return status;
784 31855 : return testForGet();
785 : }
786 : };
787 :
788 : class GetPropCompiler : public PICStubCompiler
789 : {
790 : JSObject *obj;
791 : PropertyName *name;
792 : int lastStubSecondShapeGuard;
793 :
794 : public:
795 32186 : GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
796 : VoidStubPIC stub)
797 : : PICStubCompiler("getprop", f, script, pic,
798 : JS_FUNC_TO_DATA_PTR(void *, stub)),
799 : obj(obj),
800 : name(name),
801 32186 : lastStubSecondShapeGuard(pic.secondShapeGuard)
802 32186 : { }
803 :
804 14849 : int getLastStubSecondShapeGuard() const {
805 14849 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
806 : }
807 :
808 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
809 : {
810 : GetPropLabels &labels = pic.getPropLabels();
811 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin));
812 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), NULL);
813 : repatcher.relink(labels.getInlineShapeJump(pic.getFastShapeGuard()), pic.slowPathStart);
814 :
815 : if (pic.hasTypeCheck()) {
816 : /* TODO: combine pic.u.get into ICLabels? */
817 : repatcher.relink(labels.getInlineTypeJump(pic.fastPathStart), pic.getSlowTypeCheck());
818 : }
819 :
820 : JS_ASSERT(pic.kind == ic::PICInfo::GET);
821 :
822 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::GetProp));
823 : repatcher.relink(pic.slowPathCall, target);
824 : }
825 :
826 5199 : LookupStatus generateArrayLengthStub()
827 : {
828 10398 : Assembler masm;
829 :
830 5199 : masm.loadObjClass(pic.objReg, pic.shapeReg);
831 5199 : Jump isDense = masm.testClass(Assembler::Equal, pic.shapeReg, &ArrayClass);
832 5199 : Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &SlowArrayClass);
833 :
834 5199 : isDense.linkTo(masm.label(), &masm);
835 5199 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfElements()), pic.objReg);
836 5199 : masm.load32(Address(pic.objReg, ObjectElements::offsetOfLength()), pic.objReg);
837 5199 : Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
838 5199 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
839 5199 : Jump done = masm.jump();
840 :
841 5199 : pic.updatePCCounters(f, masm);
842 :
843 10398 : PICLinker buffer(masm, pic);
844 5199 : if (!buffer.init(cx))
845 0 : return error();
846 :
847 10398 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
848 5199 : !buffer.verifyRange(f.chunk())) {
849 0 : return disable("code memory is out of range");
850 : }
851 :
852 5199 : buffer.link(notArray, pic.slowPathStart);
853 5199 : buffer.link(oob, pic.slowPathStart);
854 5199 : buffer.link(done, pic.fastPathRejoin);
855 :
856 5199 : CodeLocationLabel start = buffer.finalize(f);
857 : JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
858 5199 : start.executableAddress());
859 :
860 5199 : patchPreviousToHere(start);
861 :
862 5199 : disable("array length done");
863 :
864 5199 : return Lookup_Cacheable;
865 : }
866 :
867 69 : LookupStatus generateStringObjLengthStub()
868 : {
869 138 : Assembler masm;
870 :
871 69 : Jump notStringObj = masm.guardShape(pic.objReg, obj);
872 :
873 69 : masm.loadPayload(Address(pic.objReg, StringObject::getPrimitiveValueOffset()), pic.objReg);
874 69 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
875 69 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
876 69 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
877 69 : Jump done = masm.jump();
878 :
879 69 : pic.updatePCCounters(f, masm);
880 :
881 138 : PICLinker buffer(masm, pic);
882 69 : if (!buffer.init(cx))
883 0 : return error();
884 :
885 138 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
886 69 : !buffer.verifyRange(f.chunk())) {
887 0 : return disable("code memory is out of range");
888 : }
889 :
890 69 : buffer.link(notStringObj, pic.slowPathStart);
891 69 : buffer.link(done, pic.fastPathRejoin);
892 :
893 69 : CodeLocationLabel start = buffer.finalize(f);
894 : JaegerSpew(JSpew_PICs, "generate string object length stub at %p\n",
895 69 : start.executableAddress());
896 :
897 69 : patchPreviousToHere(start);
898 :
899 69 : disable("string object length done");
900 :
901 69 : return Lookup_Cacheable;
902 : }
903 :
904 879 : LookupStatus generateStringPropertyStub()
905 : {
906 879 : if (!f.fp()->script()->hasGlobal())
907 36 : return disable("String.prototype without compile-and-go global");
908 :
909 843 : RecompilationMonitor monitor(f.cx);
910 :
911 843 : JSObject *obj = f.fp()->scopeChain().global().getOrCreateStringPrototype(f.cx);
912 843 : if (!obj)
913 0 : return error();
914 :
915 843 : if (monitor.recompiled())
916 7 : return Lookup_Uncacheable;
917 :
918 836 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
919 836 : LookupStatus status = getprop.lookupAndTest();
920 836 : if (status != Lookup_Cacheable)
921 4 : return status;
922 832 : if (getprop.obj != getprop.holder)
923 2 : return disable("proto walk on String.prototype");
924 830 : if (!getprop.shape->hasDefaultGetter())
925 0 : return disable("getter hook on String.prototype");
926 830 : if (hadGC())
927 0 : return Lookup_Uncacheable;
928 :
929 1660 : Assembler masm;
930 :
931 : /* Only strings are allowed. */
932 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
933 830 : ImmType(JSVAL_TYPE_STRING));
934 :
935 : /*
936 : * Clobber objReg with String.prototype and do some PIC stuff. Well,
937 : * really this is now a MIC, except it won't ever be patched, so we
938 : * just disable the PIC at the end. :FIXME:? String.prototype probably
939 : * does not get random shape changes.
940 : */
941 830 : masm.move(ImmPtr(obj), pic.objReg);
942 830 : masm.loadShape(pic.objReg, pic.shapeReg);
943 : Jump shapeMismatch = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
944 830 : ImmPtr(obj->lastProperty()));
945 830 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
946 :
947 830 : Jump done = masm.jump();
948 :
949 830 : pic.updatePCCounters(f, masm);
950 :
951 1660 : PICLinker buffer(masm, pic);
952 830 : if (!buffer.init(cx))
953 0 : return error();
954 :
955 1660 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
956 830 : !buffer.verifyRange(f.chunk())) {
957 0 : return disable("code memory is out of range");
958 : }
959 :
960 830 : buffer.link(notString, pic.getSlowTypeCheck());
961 830 : buffer.link(shapeMismatch, pic.slowPathStart);
962 830 : buffer.link(done, pic.fastPathRejoin);
963 :
964 830 : CodeLocationLabel cs = buffer.finalize(f);
965 : JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
966 830 : cs.executableAddress());
967 :
968 : /* Patch the type check to jump here. */
969 830 : if (pic.hasTypeCheck()) {
970 1660 : Repatcher repatcher(f.chunk());
971 830 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), cs);
972 : }
973 :
974 : /* Disable the PIC so we don't keep generating stubs on the above shape mismatch. */
975 830 : disable("generated string call stub");
976 830 : return Lookup_Cacheable;
977 : }
978 :
979 121 : LookupStatus generateStringLengthStub()
980 : {
981 121 : JS_ASSERT(pic.hasTypeCheck());
982 :
983 242 : Assembler masm;
984 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
985 121 : ImmType(JSVAL_TYPE_STRING));
986 121 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
987 : // String length is guaranteed to be no more than 2**28, so the 32-bit operation is OK.
988 121 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
989 121 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
990 121 : Jump done = masm.jump();
991 :
992 121 : pic.updatePCCounters(f, masm);
993 :
994 242 : PICLinker buffer(masm, pic);
995 121 : if (!buffer.init(cx))
996 0 : return error();
997 :
998 242 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
999 121 : !buffer.verifyRange(f.chunk())) {
1000 0 : return disable("code memory is out of range");
1001 : }
1002 :
1003 121 : buffer.link(notString, pic.getSlowTypeCheck());
1004 121 : buffer.link(done, pic.fastPathRejoin);
1005 :
1006 121 : CodeLocationLabel start = buffer.finalize(f);
1007 : JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
1008 121 : start.executableAddress());
1009 :
1010 121 : if (pic.hasTypeCheck()) {
1011 242 : Repatcher repatcher(f.chunk());
1012 121 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), start);
1013 : }
1014 :
1015 121 : disable("generated string length stub");
1016 :
1017 121 : return Lookup_Cacheable;
1018 : }
1019 :
1020 14097 : LookupStatus patchInline(JSObject *holder, const Shape *shape)
1021 : {
1022 14097 : spew("patch", "inline");
1023 28194 : Repatcher repatcher(f.chunk());
1024 14097 : GetPropLabels &labels = pic.getPropLabels();
1025 :
1026 : int32_t offset;
1027 14097 : if (holder->isFixedSlot(shape->slot())) {
1028 8908 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin);
1029 8908 : repatcher.repatchLoadPtrToLEA(istr);
1030 :
1031 : //
1032 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
1033 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
1034 : //
1035 : // Because the offset is wrong, it's necessary to correct it
1036 : // below.
1037 : //
1038 8908 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
1039 8908 : int32_t(JSObject::offsetOfSlots());
1040 8908 : JS_ASSERT(diff != 0);
1041 8908 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
1042 : } else {
1043 5189 : offset = holder->dynamicSlotIndex(shape->slot()) * sizeof(Value);
1044 : }
1045 :
1046 14097 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), obj->lastProperty());
1047 14097 : repatcher.patchAddressOffsetForValueLoad(labels.getValueLoad(pic.fastPathRejoin), offset);
1048 :
1049 14097 : pic.inlinePathPatched = true;
1050 :
1051 14097 : return Lookup_Cacheable;
1052 : }
1053 :
1054 164 : void generateGetterStub(Assembler &masm, const Shape *shape,
1055 : Label start, Vector<Jump, 8> &shapeMismatches)
1056 : {
1057 : /*
1058 : * Getter hook needs to be called from the stub. The state is fully
1059 : * synced and no registers are live except the result registers.
1060 : */
1061 164 : JS_ASSERT(pic.canCallHook);
1062 164 : PropertyOp getter = shape->getterOp();
1063 :
1064 : masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER),
1065 164 : FrameAddress(offsetof(VMFrame, stubRejoin)));
1066 :
1067 164 : Registers tempRegs = Registers::tempCallRegMask();
1068 164 : if (tempRegs.hasReg(Registers::ClobberInCall))
1069 164 : tempRegs.takeReg(Registers::ClobberInCall);
1070 :
1071 : /* Get a register to hold obj while we set up the rest of the frame. */
1072 164 : RegisterID holdObjReg = pic.objReg;
1073 164 : if (tempRegs.hasReg(pic.objReg)) {
1074 142 : tempRegs.takeReg(pic.objReg);
1075 : } else {
1076 22 : holdObjReg = tempRegs.takeAnyReg().reg();
1077 22 : masm.move(pic.objReg, holdObjReg);
1078 : }
1079 :
1080 164 : RegisterID t0 = tempRegs.takeAnyReg().reg();
1081 164 : masm.bumpStubCount(f.script(), f.pc(), t0);
1082 :
1083 : /*
1084 : * Initialize vp, which is either a slot in the object (the holder,
1085 : * actually, which must equal the object here) or undefined.
1086 : * Use vp == sp (which for CALLPROP will actually be the original
1087 : * sp + 1), to avoid clobbering stack values.
1088 : */
1089 164 : int32_t vpOffset = (char *) f.regs.sp - (char *) f.fp();
1090 164 : if (shape->hasSlot()) {
1091 : masm.loadObjProp(obj, holdObjReg, shape,
1092 5 : Registers::ClobberInCall, t0);
1093 5 : masm.storeValueFromComponents(Registers::ClobberInCall, t0, Address(JSFrameReg, vpOffset));
1094 : } else {
1095 159 : masm.storeValue(UndefinedValue(), Address(JSFrameReg, vpOffset));
1096 : }
1097 :
1098 164 : int32_t initialFrameDepth = f.regs.sp - f.fp()->slots();
1099 164 : masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, initialFrameDepth);
1100 :
1101 : /* Grab cx. */
1102 : #ifdef JS_CPU_X86
1103 164 : RegisterID cxReg = tempRegs.takeAnyReg().reg();
1104 : #else
1105 : RegisterID cxReg = Registers::ArgReg0;
1106 : #endif
1107 164 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
1108 :
1109 : /* Grap vp. */
1110 164 : RegisterID vpReg = t0;
1111 164 : masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
1112 :
1113 164 : masm.restoreStackBase();
1114 164 : masm.setupABICall(Registers::NormalCall, 4);
1115 164 : masm.storeArg(3, vpReg);
1116 164 : masm.storeArg(2, ImmPtr((void *) JSID_BITS(shape->getUserId())));
1117 164 : masm.storeArg(1, holdObjReg);
1118 164 : masm.storeArg(0, cxReg);
1119 :
1120 164 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, getter), false);
1121 :
1122 164 : NativeStubLinker::FinalJump done;
1123 164 : if (!NativeStubEpilogue(f, masm, &done, 0, vpOffset, pic.shapeReg, pic.objReg))
1124 0 : return;
1125 328 : NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
1126 164 : if (!linker.init(f.cx))
1127 0 : THROW();
1128 :
1129 328 : if (!linker.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1130 164 : !linker.verifyRange(f.chunk())) {
1131 0 : disable("code memory is out of range");
1132 : return;
1133 : }
1134 :
1135 164 : linker.patchJump(pic.fastPathRejoin);
1136 :
1137 164 : linkerEpilogue(linker, start, shapeMismatches);
1138 : }
1139 :
1140 9581 : LookupStatus generateStub(JSObject *holder, const Shape *shape)
1141 : {
1142 19162 : Vector<Jump, 8> shapeMismatches(cx);
1143 :
1144 19162 : Assembler masm;
1145 :
1146 9581 : Label start;
1147 9581 : Jump shapeGuardJump;
1148 9581 : Jump argsLenGuard;
1149 :
1150 9581 : bool setStubShapeOffset = true;
1151 9581 : if (obj->isDenseArray()) {
1152 1760 : start = masm.label();
1153 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual,
1154 1760 : Address(pic.objReg, JSObject::offsetOfShape()),
1155 3520 : ImmPtr(obj->lastProperty()));
1156 :
1157 : /*
1158 : * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case:
1159 : * the IC is disabled after a dense array hit, so no patching can occur.
1160 : */
1161 : #ifndef JS_HAS_IC_LABELS
1162 1760 : setStubShapeOffset = false;
1163 : #endif
1164 : } else {
1165 7821 : if (pic.shapeNeedsRemat()) {
1166 1259 : masm.loadShape(pic.objReg, pic.shapeReg);
1167 1259 : pic.shapeRegHasBaseShape = true;
1168 : }
1169 :
1170 7821 : start = masm.label();
1171 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1172 7821 : ImmPtr(obj->lastProperty()));
1173 : }
1174 9581 : Label stubShapeJumpLabel = masm.label();
1175 :
1176 9581 : if (!shapeMismatches.append(shapeGuardJump))
1177 0 : return error();
1178 :
1179 9581 : RegisterID holderReg = pic.objReg;
1180 9581 : if (obj != holder) {
1181 7373 : if (!GeneratePrototypeGuards(cx, shapeMismatches, masm, obj, holder,
1182 7373 : pic.objReg, pic.shapeReg)) {
1183 0 : return error();
1184 : }
1185 :
1186 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
1187 7373 : holderReg = pic.shapeReg;
1188 7373 : masm.move(ImmPtr(holder), holderReg);
1189 7373 : pic.shapeRegHasBaseShape = false;
1190 :
1191 : // Guard on the holder's shape.
1192 7373 : Jump j = masm.guardShape(holderReg, holder);
1193 7373 : if (!shapeMismatches.append(j))
1194 0 : return error();
1195 :
1196 7373 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
1197 : } else {
1198 2208 : pic.secondShapeGuard = 0;
1199 : }
1200 :
1201 9581 : if (!shape->hasDefaultGetter()) {
1202 164 : generateGetterStub(masm, shape, start, shapeMismatches);
1203 164 : if (setStubShapeOffset)
1204 164 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1205 164 : return Lookup_Cacheable;
1206 : }
1207 :
1208 : /* Load the value out of the object. */
1209 9417 : masm.loadObjProp(holder, holderReg, shape, pic.shapeReg, pic.objReg);
1210 9417 : Jump done = masm.jump();
1211 :
1212 9417 : pic.updatePCCounters(f, masm);
1213 :
1214 18834 : PICLinker buffer(masm, pic);
1215 9417 : if (!buffer.init(cx))
1216 0 : return error();
1217 :
1218 18834 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1219 9417 : !buffer.verifyRange(f.chunk())) {
1220 0 : return disable("code memory is out of range");
1221 : }
1222 :
1223 : // The final exit jumps to the store-back in the inline stub.
1224 9417 : buffer.link(done, pic.fastPathRejoin);
1225 :
1226 9417 : linkerEpilogue(buffer, start, shapeMismatches);
1227 :
1228 9417 : if (setStubShapeOffset)
1229 7657 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1230 9417 : return Lookup_Cacheable;
1231 : }
1232 :
1233 9581 : void linkerEpilogue(LinkerHelper &buffer, Label start, Vector<Jump, 8> &shapeMismatches)
1234 : {
1235 : // The guard exit jumps to the original slow case.
1236 26539 : for (Jump *pj = shapeMismatches.begin(); pj != shapeMismatches.end(); ++pj)
1237 16958 : buffer.link(*pj, pic.slowPathStart);
1238 :
1239 9581 : CodeLocationLabel cs = buffer.finalize(f);
1240 9581 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1241 :
1242 9581 : patchPreviousToHere(cs);
1243 :
1244 9581 : pic.stubsGenerated++;
1245 9581 : pic.updateLastPath(buffer, start);
1246 :
1247 9581 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1248 27 : disable("max stubs reached");
1249 9581 : if (obj->isDenseArray())
1250 1760 : disable("dense array");
1251 9581 : }
1252 :
1253 14849 : void patchPreviousToHere(CodeLocationLabel cs)
1254 : {
1255 29698 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1256 14849 : CodeLocationLabel label = pic.lastPathStart();
1257 :
1258 : // Patch either the inline fast path or a generated stub. The stub
1259 : // omits the prefix of the inline fast path that loads the shape, so
1260 : // the offsets are different.
1261 : int shapeGuardJumpOffset;
1262 14849 : if (pic.stubsGenerated)
1263 1900 : shapeGuardJumpOffset = pic.getPropLabels().getStubShapeJumpOffset();
1264 : else
1265 12949 : shapeGuardJumpOffset = pic.shapeGuard + pic.getPropLabels().getInlineShapeJumpOffset();
1266 14849 : int secondGuardOffset = getLastStubSecondShapeGuard();
1267 :
1268 : JaegerSpew(JSpew_PICs, "Patching previous (%d stubs) (start %p) (offset %d) (second %d)\n",
1269 : (int) pic.stubsGenerated, label.executableAddress(),
1270 14849 : shapeGuardJumpOffset, secondGuardOffset);
1271 :
1272 14849 : repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
1273 14849 : if (secondGuardOffset)
1274 1259 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
1275 14849 : }
1276 :
1277 25918 : LookupStatus update()
1278 : {
1279 25918 : JS_ASSERT(pic.hit);
1280 :
1281 25918 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
1282 25918 : LookupStatus status = getprop.lookupAndTest();
1283 25918 : if (status != Lookup_Cacheable)
1284 2240 : return status;
1285 23678 : if (hadGC())
1286 0 : return Lookup_Uncacheable;
1287 :
1288 56124 : if (obj == getprop.holder &&
1289 16305 : getprop.shape->hasDefaultGetter() &&
1290 16141 : !pic.inlinePathPatched) {
1291 14097 : return patchInline(getprop.holder, getprop.shape);
1292 : }
1293 :
1294 9581 : return generateStub(getprop.holder, getprop.shape);
1295 : }
1296 : };
1297 :
1298 : class ScopeNameCompiler : public PICStubCompiler
1299 : {
1300 : private:
1301 : typedef Vector<Jump, 8> JumpList;
1302 :
1303 : JSObject *scopeChain;
1304 : PropertyName *name;
1305 : GetPropHelper<ScopeNameCompiler> getprop;
1306 23193 : ScopeNameCompiler *thisFromCtor() { return this; }
1307 :
1308 22022 : void patchPreviousToHere(CodeLocationLabel cs)
1309 : {
1310 22022 : ScopeNameLabels & labels = pic.scopeNameLabels();
1311 44044 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1312 22022 : CodeLocationLabel start = pic.lastPathStart();
1313 22022 : JSC::CodeLocationJump jump;
1314 :
1315 : // Patch either the inline fast path or a generated stub.
1316 22022 : if (pic.stubsGenerated)
1317 689 : jump = labels.getStubJump(start);
1318 : else
1319 21333 : jump = labels.getInlineJump(start);
1320 22022 : repatcher.relink(jump, cs);
1321 22022 : }
1322 :
1323 22132 : LookupStatus walkScopeChain(Assembler &masm, JumpList &fails)
1324 : {
1325 : /* Walk the scope chain. */
1326 22132 : JSObject *tobj = scopeChain;
1327 :
1328 : /* For GETXPROP, we'll never enter this loop. */
1329 22132 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, tobj && tobj == getprop.holder);
1330 22132 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, getprop.obj == tobj);
1331 :
1332 69119 : while (tobj && tobj != getprop.holder) {
1333 24965 : if (!IsCacheableNonGlobalScope(tobj))
1334 110 : return disable("non-cacheable scope chain object");
1335 24855 : JS_ASSERT(tobj->isNative());
1336 :
1337 : /* Guard on intervening shapes. */
1338 24855 : masm.loadShape(pic.objReg, pic.shapeReg);
1339 : Jump j = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1340 24855 : ImmPtr(tobj->lastProperty()));
1341 24855 : if (!fails.append(j))
1342 0 : return error();
1343 :
1344 : /* Load the next link in the scope chain. */
1345 24855 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1346 24855 : masm.loadPayload(parent, pic.objReg);
1347 :
1348 24855 : tobj = &tobj->asScope().enclosingScope();
1349 : }
1350 :
1351 22022 : if (tobj != getprop.holder)
1352 0 : return disable("scope chain walk terminated early");
1353 :
1354 22022 : return Lookup_Cacheable;
1355 : }
1356 :
1357 : public:
1358 23193 : ScopeNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1359 : PropertyName *name, VoidStubPIC stub)
1360 : : PICStubCompiler("name", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1361 : scopeChain(scopeChain), name(name),
1362 23193 : getprop(f.cx, NULL, name, *thisFromCtor(), f)
1363 23193 : { }
1364 :
1365 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1366 : {
1367 : ScopeNameLabels &labels = pic.scopeNameLabels();
1368 :
1369 : /* Link the inline path back to the slow path. */
1370 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.fastPathStart);
1371 : repatcher.relink(inlineJump, pic.slowPathStart);
1372 :
1373 : VoidStubPIC stub;
1374 : switch (pic.kind) {
1375 : case ic::PICInfo::NAME:
1376 : stub = ic::Name;
1377 : break;
1378 : case ic::PICInfo::XNAME:
1379 : stub = ic::XName;
1380 : break;
1381 : default:
1382 : JS_NOT_REACHED("Invalid pic kind in ScopeNameCompiler::reset");
1383 : return;
1384 : }
1385 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
1386 : repatcher.relink(pic.slowPathCall, target);
1387 : }
1388 :
1389 14762 : LookupStatus generateGlobalStub(JSObject *obj)
1390 : {
1391 29524 : Assembler masm;
1392 29524 : JumpList fails(cx);
1393 14762 : ScopeNameLabels &labels = pic.scopeNameLabels();
1394 :
1395 : /* For GETXPROP, the object is already in objReg. */
1396 14762 : if (pic.kind == ic::PICInfo::NAME)
1397 14756 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1398 :
1399 14762 : JS_ASSERT(obj == getprop.holder);
1400 14762 : JS_ASSERT(getprop.holder == &scopeChain->global());
1401 :
1402 14762 : LookupStatus status = walkScopeChain(masm, fails);
1403 14762 : if (status != Lookup_Cacheable)
1404 64 : return status;
1405 :
1406 : /* If a scope chain walk was required, the final object needs a NULL test. */
1407 14698 : MaybeJump finalNull;
1408 14698 : if (pic.kind == ic::PICInfo::NAME)
1409 14692 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1410 14698 : masm.loadShape(pic.objReg, pic.shapeReg);
1411 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1412 14698 : ImmPtr(getprop.holder->lastProperty()));
1413 :
1414 14698 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1415 :
1416 14698 : Jump done = masm.jump();
1417 :
1418 : /* All failures flow to here, so there is a common point to patch. */
1419 29396 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1420 14698 : pj->linkTo(masm.label(), &masm);
1421 14698 : if (finalNull.isSet())
1422 14692 : finalNull.get().linkTo(masm.label(), &masm);
1423 14698 : finalShape.linkTo(masm.label(), &masm);
1424 14698 : Label failLabel = masm.label();
1425 14698 : Jump failJump = masm.jump();
1426 :
1427 14698 : pic.updatePCCounters(f, masm);
1428 :
1429 29396 : PICLinker buffer(masm, pic);
1430 14698 : if (!buffer.init(cx))
1431 0 : return error();
1432 :
1433 29396 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1434 14698 : !buffer.verifyRange(f.chunk())) {
1435 0 : return disable("code memory is out of range");
1436 : }
1437 :
1438 14698 : buffer.link(failJump, pic.slowPathStart);
1439 14698 : buffer.link(done, pic.fastPathRejoin);
1440 14698 : CodeLocationLabel cs = buffer.finalize(f);
1441 14698 : JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
1442 14698 : spew("NAME stub", "global");
1443 :
1444 14698 : patchPreviousToHere(cs);
1445 :
1446 14698 : pic.stubsGenerated++;
1447 14698 : pic.updateLastPath(buffer, failLabel);
1448 14698 : labels.setStubJump(masm, failLabel, failJump);
1449 :
1450 14698 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1451 13 : disable("max stubs reached");
1452 :
1453 14698 : return Lookup_Cacheable;
1454 : }
1455 :
1456 : enum CallObjPropKind {
1457 : ARG,
1458 : VAR
1459 : };
1460 :
1461 7795 : LookupStatus generateCallStub(JSObject *obj)
1462 : {
1463 15590 : Assembler masm;
1464 15590 : Vector<Jump, 8> fails(cx);
1465 7795 : ScopeNameLabels &labels = pic.scopeNameLabels();
1466 :
1467 : /* For GETXPROP, the object is already in objReg. */
1468 7795 : if (pic.kind == ic::PICInfo::NAME)
1469 7554 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1470 :
1471 7795 : JS_ASSERT(obj == getprop.holder);
1472 7795 : JS_ASSERT(getprop.holder != &scopeChain->global());
1473 :
1474 : CallObjPropKind kind;
1475 7795 : const Shape *shape = getprop.shape;
1476 7795 : if (shape->getterOp() == CallObject::getArgOp) {
1477 1409 : kind = ARG;
1478 6386 : } else if (shape->getterOp() == CallObject::getVarOp) {
1479 5961 : kind = VAR;
1480 : } else {
1481 425 : return disable("unhandled callobj sprop getter");
1482 : }
1483 :
1484 7370 : LookupStatus status = walkScopeChain(masm, fails);
1485 7370 : if (status != Lookup_Cacheable)
1486 46 : return status;
1487 :
1488 : /* If a scope chain walk was required, the final object needs a NULL test. */
1489 7324 : MaybeJump finalNull;
1490 7324 : if (pic.kind == ic::PICInfo::NAME)
1491 7083 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1492 7324 : masm.loadShape(pic.objReg, pic.shapeReg);
1493 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1494 7324 : ImmPtr(getprop.holder->lastProperty()));
1495 :
1496 : /* Get callobj's stack frame. */
1497 7324 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
1498 :
1499 7324 : JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
1500 7324 : uint16_t slot = uint16_t(shape->shortid());
1501 :
1502 7324 : Jump skipOver;
1503 7324 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
1504 :
1505 : /* Not-escaped case. */
1506 : {
1507 1395 : Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
1508 8719 : : StackFrame::offsetOfFixed(slot));
1509 7324 : masm.loadPayload(addr, pic.objReg);
1510 7324 : masm.loadTypeTag(addr, pic.shapeReg);
1511 7324 : skipOver = masm.jump();
1512 : }
1513 :
1514 7324 : escapedFrame.linkTo(masm.label(), &masm);
1515 :
1516 : {
1517 7324 : if (kind == VAR)
1518 5929 : slot += fun->nargs;
1519 :
1520 7324 : slot += CallObject::RESERVED_SLOTS;
1521 7324 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
1522 :
1523 : /* Safe because type is loaded first. */
1524 7324 : masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
1525 : }
1526 :
1527 7324 : skipOver.linkTo(masm.label(), &masm);
1528 7324 : Jump done = masm.jump();
1529 :
1530 : // All failures flow to here, so there is a common point to patch.
1531 17473 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1532 10149 : pj->linkTo(masm.label(), &masm);
1533 7324 : if (finalNull.isSet())
1534 7083 : finalNull.get().linkTo(masm.label(), &masm);
1535 7324 : finalShape.linkTo(masm.label(), &masm);
1536 7324 : Label failLabel = masm.label();
1537 7324 : Jump failJump = masm.jump();
1538 :
1539 7324 : pic.updatePCCounters(f, masm);
1540 :
1541 14648 : PICLinker buffer(masm, pic);
1542 7324 : if (!buffer.init(cx))
1543 0 : return error();
1544 :
1545 14648 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1546 7324 : !buffer.verifyRange(f.chunk())) {
1547 0 : return disable("code memory is out of range");
1548 : }
1549 :
1550 7324 : buffer.link(failJump, pic.slowPathStart);
1551 7324 : buffer.link(done, pic.fastPathRejoin);
1552 7324 : CodeLocationLabel cs = buffer.finalize(f);
1553 7324 : JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
1554 :
1555 7324 : patchPreviousToHere(cs);
1556 :
1557 7324 : pic.stubsGenerated++;
1558 7324 : pic.updateLastPath(buffer, failLabel);
1559 7324 : labels.setStubJump(masm, failLabel, failJump);
1560 :
1561 7324 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1562 0 : disable("max stubs reached");
1563 :
1564 7324 : return Lookup_Cacheable;
1565 : }
1566 :
1567 22938 : LookupStatus updateForName()
1568 : {
1569 : // |getprop.obj| is filled by bind()
1570 22938 : LookupStatus status = getprop.bind();
1571 22938 : if (status != Lookup_Cacheable)
1572 82 : return status;
1573 :
1574 22856 : return update(getprop.obj);
1575 : }
1576 :
1577 255 : LookupStatus updateForXName()
1578 : {
1579 : // |obj| and |getprop.obj| are NULL, but should be the given scopeChain.
1580 255 : getprop.obj = scopeChain;
1581 255 : LookupStatus status = getprop.lookup();
1582 255 : if (status != Lookup_Cacheable)
1583 0 : return status;
1584 :
1585 255 : return update(getprop.obj);
1586 : }
1587 :
1588 23111 : LookupStatus update(JSObject *obj)
1589 : {
1590 23111 : if (obj != getprop.holder)
1591 44 : return disable("property is on proto of a scope object");
1592 :
1593 23067 : if (obj->isCall())
1594 7795 : return generateCallStub(obj);
1595 :
1596 15272 : LookupStatus status = getprop.testForGet();
1597 15272 : if (status != Lookup_Cacheable)
1598 303 : return status;
1599 :
1600 14969 : if (obj->isGlobal())
1601 14762 : return generateGlobalStub(obj);
1602 :
1603 207 : return disable("scope object not handled yet");
1604 : }
1605 :
1606 23193 : bool retrieve(Value *vp, PICInfo::Kind kind)
1607 : {
1608 23193 : JSObject *obj = getprop.obj;
1609 23193 : JSObject *holder = getprop.holder;
1610 23193 : const JSProperty *prop = getprop.prop;
1611 :
1612 23193 : if (!prop) {
1613 : /* Kludge to allow (typeof foo == "undefined") tests. */
1614 78 : if (kind == ic::PICInfo::NAME) {
1615 78 : JSOp op2 = JSOp(f.pc()[JSOP_NAME_LENGTH]);
1616 78 : if (op2 == JSOP_TYPEOF) {
1617 2 : vp->setUndefined();
1618 2 : return true;
1619 : }
1620 : }
1621 76 : ReportAtomNotDefined(cx, name);
1622 76 : return false;
1623 : }
1624 :
1625 : // If the property was found, but we decided not to cache it, then
1626 : // take a slow path and do a full property fetch.
1627 23115 : if (!getprop.shape) {
1628 4 : if (!obj->getProperty(cx, name, vp))
1629 0 : return false;
1630 4 : return true;
1631 : }
1632 :
1633 23111 : const Shape *shape = getprop.shape;
1634 23111 : JSObject *normalized = obj;
1635 23111 : if (obj->isWith() && !shape->hasDefaultGetter())
1636 8 : normalized = &obj->asWith().object();
1637 23111 : NATIVE_GET(cx, normalized, holder, shape, 0, vp, return false);
1638 23102 : return true;
1639 : }
1640 : };
1641 :
1642 : class BindNameCompiler : public PICStubCompiler
1643 : {
1644 : JSObject *scopeChain;
1645 : PropertyName *name;
1646 :
1647 : public:
1648 2404 : BindNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1649 : PropertyName *name, VoidStubPIC stub)
1650 : : PICStubCompiler("bind", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1651 2404 : scopeChain(scopeChain), name(name)
1652 2404 : { }
1653 :
1654 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1655 : {
1656 : BindNameLabels &labels = pic.bindNameLabels();
1657 :
1658 : /* Link the inline jump back to the slow path. */
1659 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.getFastShapeGuard());
1660 : repatcher.relink(inlineJump, pic.slowPathStart);
1661 :
1662 : /* Link the slow path to call the IC entry point. */
1663 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
1664 : repatcher.relink(pic.slowPathCall, target);
1665 : }
1666 :
1667 565 : void patchPreviousToHere(CodeLocationLabel cs)
1668 : {
1669 565 : BindNameLabels &labels = pic.bindNameLabels();
1670 1130 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1671 565 : JSC::CodeLocationJump jump;
1672 :
1673 : /* Patch either the inline fast path or a generated stub. */
1674 565 : if (pic.stubsGenerated)
1675 8 : jump = labels.getStubJump(pic.lastPathStart());
1676 : else
1677 557 : jump = labels.getInlineJump(pic.getFastShapeGuard());
1678 565 : repatcher.relink(jump, cs);
1679 565 : }
1680 :
1681 732 : LookupStatus generateStub(JSObject *obj)
1682 : {
1683 1464 : Assembler masm;
1684 1464 : Vector<Jump, 8> fails(cx);
1685 :
1686 732 : BindNameLabels &labels = pic.bindNameLabels();
1687 :
1688 732 : if (!IsCacheableNonGlobalScope(scopeChain))
1689 9 : return disable("non-cacheable obj at start of scope chain");
1690 :
1691 : /* Guard on the shape of the scope chain. */
1692 723 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1693 723 : masm.loadShape(pic.objReg, pic.shapeReg);
1694 : Jump firstShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1695 723 : ImmPtr(scopeChain->lastProperty()));
1696 :
1697 723 : if (scopeChain != obj) {
1698 : /* Walk up the scope chain. */
1699 224 : JSObject *tobj = &scopeChain->asScope().enclosingScope();
1700 224 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1701 494 : while (tobj) {
1702 270 : if (!IsCacheableNonGlobalScope(tobj))
1703 158 : return disable("non-cacheable obj in scope chain");
1704 112 : masm.loadPayload(parent, pic.objReg);
1705 112 : masm.loadShape(pic.objReg, pic.shapeReg);
1706 : Jump shapeTest = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1707 112 : ImmPtr(tobj->lastProperty()));
1708 112 : if (!fails.append(shapeTest))
1709 0 : return error();
1710 112 : if (tobj == obj)
1711 66 : break;
1712 46 : tobj = &tobj->asScope().enclosingScope();
1713 : }
1714 66 : if (tobj != obj)
1715 0 : return disable("indirect hit");
1716 : }
1717 :
1718 565 : Jump done = masm.jump();
1719 :
1720 : // All failures flow to here, so there is a common point to patch.
1721 645 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1722 80 : pj->linkTo(masm.label(), &masm);
1723 565 : firstShape.linkTo(masm.label(), &masm);
1724 565 : Label failLabel = masm.label();
1725 565 : Jump failJump = masm.jump();
1726 :
1727 565 : pic.updatePCCounters(f, masm);
1728 :
1729 1130 : PICLinker buffer(masm, pic);
1730 565 : if (!buffer.init(cx))
1731 0 : return error();
1732 :
1733 1130 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1734 565 : !buffer.verifyRange(f.chunk())) {
1735 0 : return disable("code memory is out of range");
1736 : }
1737 :
1738 565 : buffer.link(failJump, pic.slowPathStart);
1739 565 : buffer.link(done, pic.fastPathRejoin);
1740 565 : CodeLocationLabel cs = buffer.finalize(f);
1741 565 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1742 :
1743 565 : patchPreviousToHere(cs);
1744 :
1745 565 : pic.stubsGenerated++;
1746 565 : pic.updateLastPath(buffer, failLabel);
1747 565 : labels.setStubJump(masm, failLabel, failJump);
1748 :
1749 565 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1750 0 : disable("max stubs reached");
1751 :
1752 565 : return Lookup_Cacheable;
1753 : }
1754 :
1755 2404 : JSObject *update()
1756 : {
1757 2404 : RecompilationMonitor monitor(cx);
1758 :
1759 2404 : JSObject *obj = FindIdentifierBase(cx, scopeChain, name);
1760 2404 : if (!obj || monitor.recompiled())
1761 2 : return obj;
1762 :
1763 2402 : if (!pic.hit) {
1764 1670 : spew("first hit", "nop");
1765 1670 : pic.hit = true;
1766 1670 : return obj;
1767 : }
1768 :
1769 732 : LookupStatus status = generateStub(obj);
1770 732 : if (status == Lookup_Error)
1771 0 : return NULL;
1772 :
1773 732 : return obj;
1774 : }
1775 : };
1776 :
1777 : static void JS_FASTCALL
1778 187080 : DisabledGetPropIC(VMFrame &f, ic::PICInfo *pic)
1779 : {
1780 187080 : stubs::GetProp(f, pic->name);
1781 187080 : }
1782 :
1783 : static void JS_FASTCALL
1784 0 : DisabledGetPropNoCacheIC(VMFrame &f, ic::PICInfo *pic)
1785 : {
1786 0 : stubs::GetPropNoCache(f, pic->name);
1787 0 : }
1788 :
1789 : static inline void
1790 68278 : GetPropMaybeCached(VMFrame &f, ic::PICInfo *pic, bool cached)
1791 : {
1792 68278 : VoidStubPIC stub = cached ? DisabledGetPropIC : DisabledGetPropNoCacheIC;
1793 :
1794 68278 : JSScript *script = f.fp()->script();
1795 :
1796 68278 : PropertyName *name = pic->name;
1797 68278 : if (name == f.cx->runtime->atomState.lengthAtom) {
1798 6656 : if (f.regs.sp[-1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
1799 20 : f.regs.sp[-1].setInt32(f.regs.fp()->numActualArgs());
1800 20 : return;
1801 : }
1802 6636 : if (!f.regs.sp[-1].isPrimitive()) {
1803 6503 : JSObject *obj = &f.regs.sp[-1].toObject();
1804 6503 : if (obj->isArray() || obj->isString()) {
1805 5268 : GetPropCompiler cc(f, script, obj, *pic, NULL, stub);
1806 5268 : if (obj->isArray()) {
1807 5199 : LookupStatus status = cc.generateArrayLengthStub();
1808 5199 : if (status == Lookup_Error)
1809 0 : THROW();
1810 5199 : f.regs.sp[-1].setNumber(obj->getArrayLength());
1811 : } else {
1812 69 : LookupStatus status = cc.generateStringObjLengthStub();
1813 69 : if (status == Lookup_Error)
1814 0 : THROW();
1815 69 : JSString *str = obj->asString().unbox();
1816 69 : f.regs.sp[-1].setInt32(str->length());
1817 : }
1818 5268 : return;
1819 : }
1820 : }
1821 : }
1822 :
1823 62990 : if (f.regs.sp[-1].isString()) {
1824 1000 : GetPropCompiler cc(f, script, NULL, *pic, name, stub);
1825 1000 : if (name == f.cx->runtime->atomState.lengthAtom) {
1826 121 : LookupStatus status = cc.generateStringLengthStub();
1827 121 : if (status == Lookup_Error)
1828 0 : THROW();
1829 121 : JSString *str = f.regs.sp[-1].toString();
1830 121 : f.regs.sp[-1].setInt32(str->length());
1831 : } else {
1832 879 : LookupStatus status = cc.generateStringPropertyStub();
1833 879 : if (status == Lookup_Error)
1834 0 : THROW();
1835 879 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1836 879 : if (!obj)
1837 0 : THROW();
1838 879 : if (!obj->getProperty(f.cx, name, &f.regs.sp[-1]))
1839 0 : THROW();
1840 : }
1841 1000 : return;
1842 : }
1843 :
1844 61990 : RecompilationMonitor monitor(f.cx);
1845 :
1846 61990 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1847 61990 : if (!obj)
1848 18 : THROW();
1849 :
1850 61972 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1851 25918 : GetPropCompiler cc(f, script, obj, *pic, name, stub);
1852 25918 : if (!cc.update())
1853 0 : THROW();
1854 : }
1855 :
1856 : Value v;
1857 61972 : if (cached) {
1858 60697 : if (!GetPropertyOperation(f.cx, f.pc(), f.regs.sp[-1], &v))
1859 23 : THROW();
1860 : } else {
1861 1275 : if (!obj->getProperty(f.cx, name, &v))
1862 0 : THROW();
1863 : }
1864 :
1865 61949 : f.regs.sp[-1] = v;
1866 : }
1867 :
1868 : void JS_FASTCALL
1869 67003 : ic::GetProp(VMFrame &f, ic::PICInfo *pic)
1870 : {
1871 67003 : GetPropMaybeCached(f, pic, /* cache = */ true);
1872 67003 : }
1873 :
1874 : void JS_FASTCALL
1875 1275 : ic::GetPropNoCache(VMFrame &f, ic::PICInfo *pic)
1876 : {
1877 1275 : GetPropMaybeCached(f, pic, /* cache = */ false);
1878 1275 : }
1879 :
1880 : template <JSBool strict>
1881 : static void JS_FASTCALL
1882 15267 : DisabledSetPropIC(VMFrame &f, ic::PICInfo *pic)
1883 : {
1884 15267 : stubs::SetName<strict>(f, pic->name);
1885 15267 : }
1886 :
1887 : void JS_FASTCALL
1888 13180 : ic::SetProp(VMFrame &f, ic::PICInfo *pic)
1889 : {
1890 13180 : JSScript *script = f.fp()->script();
1891 13180 : JS_ASSERT(pic->isSet());
1892 :
1893 13180 : VoidStubPIC stub = STRICT_VARIANT(DisabledSetPropIC);
1894 :
1895 : // Save this in case the compiler triggers a recompilation of this script.
1896 13180 : PropertyName *name = pic->name;
1897 13180 : VoidStubName nstub = STRICT_VARIANT(stubs::SetName);
1898 :
1899 13180 : RecompilationMonitor monitor(f.cx);
1900 :
1901 13180 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-2]);
1902 13180 : if (!obj)
1903 0 : THROW();
1904 :
1905 : // Note, we can't use SetName for PROPINC PICs because the property
1906 : // cache can't handle a GET and SET from the same scripted PC.
1907 13180 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1908 4900 : SetPropCompiler cc(f, script, obj, *pic, name, stub);
1909 4900 : LookupStatus status = cc.update();
1910 4900 : if (status == Lookup_Error)
1911 0 : THROW();
1912 : }
1913 :
1914 13180 : nstub(f, name);
1915 : }
1916 :
1917 : static void JS_FASTCALL
1918 6858 : DisabledNameIC(VMFrame &f, ic::PICInfo *pic)
1919 : {
1920 6858 : stubs::Name(f);
1921 6858 : }
1922 :
1923 : static void JS_FASTCALL
1924 0 : DisabledXNameIC(VMFrame &f, ic::PICInfo *pic)
1925 : {
1926 0 : stubs::GetProp(f, pic->name);
1927 0 : }
1928 :
1929 : void JS_FASTCALL
1930 255 : ic::XName(VMFrame &f, ic::PICInfo *pic)
1931 : {
1932 255 : JSScript *script = f.fp()->script();
1933 :
1934 : /* GETXPROP is guaranteed to have an object. */
1935 255 : JSObject *obj = &f.regs.sp[-1].toObject();
1936 :
1937 255 : ScopeNameCompiler cc(f, script, obj, *pic, pic->name, DisabledXNameIC);
1938 :
1939 255 : LookupStatus status = cc.updateForXName();
1940 255 : if (status == Lookup_Error)
1941 0 : THROW();
1942 :
1943 : Value rval;
1944 255 : if (!cc.retrieve(&rval, PICInfo::XNAME))
1945 0 : THROW();
1946 255 : f.regs.sp[-1] = rval;
1947 : }
1948 :
1949 : void JS_FASTCALL
1950 22938 : ic::Name(VMFrame &f, ic::PICInfo *pic)
1951 : {
1952 22938 : JSScript *script = f.fp()->script();
1953 :
1954 22938 : ScopeNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, DisabledNameIC);
1955 :
1956 22938 : LookupStatus status = cc.updateForName();
1957 22938 : if (status == Lookup_Error)
1958 0 : THROW();
1959 :
1960 : Value rval;
1961 22938 : if (!cc.retrieve(&rval, PICInfo::NAME))
1962 85 : THROW();
1963 22853 : f.regs.sp[0] = rval;
1964 : }
1965 :
1966 : static void JS_FASTCALL
1967 1180 : DisabledBindNameIC(VMFrame &f, ic::PICInfo *pic)
1968 : {
1969 1180 : stubs::BindName(f, pic->name);
1970 1180 : }
1971 :
1972 : void JS_FASTCALL
1973 2404 : ic::BindName(VMFrame &f, ic::PICInfo *pic)
1974 : {
1975 2404 : JSScript *script = f.fp()->script();
1976 :
1977 2404 : VoidStubPIC stub = DisabledBindNameIC;
1978 2404 : BindNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, stub);
1979 :
1980 2404 : JSObject *obj = cc.update();
1981 2404 : if (!obj)
1982 0 : THROW();
1983 :
1984 2404 : f.regs.sp[0].setObject(*obj);
1985 : }
1986 :
1987 : void
1988 68728 : BaseIC::spew(JSContext *cx, const char *event, const char *message)
1989 : {
1990 : #ifdef JS_METHODJIT_SPEW
1991 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
1992 68728 : js_CodeName[op], event, message, cx->fp()->script()->filename, CurrentLine(cx));
1993 : #endif
1994 68728 : }
1995 :
1996 : /* Total length of scripts preceding a frame. */
1997 0 : inline uint32_t frameCountersOffset(VMFrame &f)
1998 : {
1999 0 : JSContext *cx = f.cx;
2000 :
2001 0 : uint32_t offset = 0;
2002 0 : if (cx->regs().inlined()) {
2003 0 : offset += cx->fp()->script()->length;
2004 0 : uint32_t index = cx->regs().inlined()->inlineIndex;
2005 0 : InlineFrame *frames = f.chunk()->inlineFrames();
2006 0 : for (unsigned i = 0; i < index; i++)
2007 0 : offset += frames[i].fun->script()->length;
2008 : }
2009 :
2010 : jsbytecode *pc;
2011 0 : JSScript *script = cx->stack.currentScript(&pc);
2012 0 : offset += pc - script->code;
2013 :
2014 0 : return offset;
2015 : }
2016 :
2017 : LookupStatus
2018 17296 : BaseIC::disable(VMFrame &f, const char *reason, void *stub)
2019 : {
2020 17296 : if (f.chunk()->pcLengths) {
2021 0 : uint32_t offset = frameCountersOffset(f);
2022 0 : f.chunk()->pcLengths[offset].picsLength = 0;
2023 : }
2024 :
2025 17296 : spew(f.cx, "disabled", reason);
2026 34592 : Repatcher repatcher(f.chunk());
2027 17296 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2028 17296 : return Lookup_Uncacheable;
2029 : }
2030 :
2031 : void
2032 47565 : BaseIC::updatePCCounters(VMFrame &f, Assembler &masm)
2033 : {
2034 47565 : if (f.chunk()->pcLengths) {
2035 0 : uint32_t offset = frameCountersOffset(f);
2036 0 : f.chunk()->pcLengths[offset].picsLength += masm.size();
2037 : }
2038 47565 : }
2039 :
2040 : bool
2041 75152 : BaseIC::shouldUpdate(JSContext *cx)
2042 : {
2043 75152 : if (!hit) {
2044 44334 : hit = true;
2045 44334 : spew(cx, "ignored", "first hit");
2046 44334 : return false;
2047 : }
2048 30818 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2049 30818 : return true;
2050 : }
2051 :
2052 : static void JS_FASTCALL
2053 820639 : DisabledGetElem(VMFrame &f, ic::GetElementIC *ic)
2054 : {
2055 820639 : stubs::GetElem(f);
2056 820639 : }
2057 :
2058 : bool
2059 13414 : GetElementIC::shouldUpdate(JSContext *cx)
2060 : {
2061 13414 : if (!hit) {
2062 4826 : hit = true;
2063 4826 : spew(cx, "ignored", "first hit");
2064 4826 : return false;
2065 : }
2066 8588 : JS_ASSERT(stubsGenerated < MAX_GETELEM_IC_STUBS);
2067 8588 : return true;
2068 : }
2069 :
2070 : LookupStatus
2071 2918 : GetElementIC::disable(VMFrame &f, const char *reason)
2072 : {
2073 2918 : slowCallPatched = true;
2074 2918 : void *stub = JS_FUNC_TO_DATA_PTR(void *, DisabledGetElem);
2075 2918 : BaseIC::disable(f, reason, stub);
2076 2918 : return Lookup_Uncacheable;
2077 : }
2078 :
2079 : LookupStatus
2080 0 : GetElementIC::error(JSContext *cx)
2081 : {
2082 0 : return Lookup_Error;
2083 : }
2084 :
2085 : void
2086 0 : GetElementIC::purge(Repatcher &repatcher)
2087 : {
2088 : // Repatch the inline jumps.
2089 0 : if (inlineTypeGuardPatched)
2090 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
2091 0 : if (inlineShapeGuardPatched)
2092 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2093 :
2094 0 : if (slowCallPatched) {
2095 : repatcher.relink(slowPathCall,
2096 0 : FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
2097 : }
2098 :
2099 0 : reset();
2100 0 : }
2101 :
2102 : LookupStatus
2103 6035 : GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyName *name,
2104 : Value *vp)
2105 : {
2106 6035 : JS_ASSERT(v.isString());
2107 6035 : JSContext *cx = f.cx;
2108 :
2109 6035 : GetPropHelper<GetElementIC> getprop(cx, obj, name, *this, f);
2110 6035 : LookupStatus status = getprop.lookupAndTest();
2111 6035 : if (status != Lookup_Cacheable)
2112 60 : return status;
2113 :
2114 : // With TI enabled, string property stubs can only be added to an opcode if
2115 : // the value read will go through a type barrier afterwards. TI only
2116 : // accounts for integer-valued properties accessed by GETELEM/CALLELEM.
2117 5975 : if (cx->typeInferenceEnabled() && !forcedTypeBarrier)
2118 31 : return disable(f, "string element access may not have type barrier");
2119 :
2120 11888 : Assembler masm;
2121 :
2122 : // Guard on the string's type and identity.
2123 5944 : MaybeJump atomTypeGuard;
2124 5944 : if (hasInlineTypeGuard() && !inlineTypeGuardPatched) {
2125 : // We link all string-key dependent stubs together, and store the
2126 : // first set of guards in the IC, separately, from int-key dependent
2127 : // stubs. As long as we guarantee that the first string-key dependent
2128 : // stub guards on the key type, then all other string-key stubs can
2129 : // omit the guard.
2130 767 : JS_ASSERT(!idRemat.isTypeKnown());
2131 767 : atomTypeGuard = masm.testString(Assembler::NotEqual, typeReg);
2132 : } else {
2133 : // If there was no inline type guard, then a string type is guaranteed.
2134 : // Otherwise, we are guaranteed the type has already been checked, via
2135 : // the comment above.
2136 5177 : JS_ASSERT_IF(!hasInlineTypeGuard(), idRemat.knownType() == JSVAL_TYPE_STRING);
2137 : }
2138 :
2139 : // Reify the shape before guards that could flow into shape guarding stubs.
2140 5944 : if (!obj->isDenseArray() && !typeRegHasBaseShape) {
2141 1453 : masm.loadShape(objReg, typeReg);
2142 1453 : typeRegHasBaseShape = true;
2143 : }
2144 :
2145 5944 : MaybeJump atomIdGuard;
2146 5944 : if (!idRemat.isConstant())
2147 5930 : atomIdGuard = masm.branchPtr(Assembler::NotEqual, idRemat.dataReg(), ImmPtr(v.toString()));
2148 :
2149 : // Guard on the base shape.
2150 5944 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, typeReg, ImmPtr(obj->lastProperty()));
2151 :
2152 11888 : Vector<Jump, 8> otherGuards(cx);
2153 :
2154 : // Guard on the prototype, if applicable.
2155 5944 : MaybeJump protoGuard;
2156 5944 : JSObject *holder = getprop.holder;
2157 5944 : RegisterID holderReg = objReg;
2158 5944 : if (obj != holder) {
2159 81 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, holder, objReg, typeReg))
2160 0 : return error(cx);
2161 :
2162 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
2163 81 : holderReg = typeReg;
2164 81 : masm.move(ImmPtr(holder), holderReg);
2165 81 : typeRegHasBaseShape = false;
2166 :
2167 : // Guard on the holder's shape.
2168 81 : protoGuard = masm.guardShape(holderReg, holder);
2169 : }
2170 :
2171 5944 : if (op == JSOP_CALLELEM) {
2172 : // Emit a write of |obj| to the top of the stack, before we lose it.
2173 135 : Value *thisVp = &cx->regs().sp[-1];
2174 135 : Address thisSlot(JSFrameReg, StackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
2175 135 : masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot);
2176 : }
2177 :
2178 : // Load the value.
2179 5944 : const Shape *shape = getprop.shape;
2180 5944 : masm.loadObjProp(holder, holderReg, shape, typeReg, objReg);
2181 :
2182 5944 : Jump done = masm.jump();
2183 :
2184 5944 : updatePCCounters(f, masm);
2185 :
2186 11888 : PICLinker buffer(masm, *this);
2187 5944 : if (!buffer.init(cx))
2188 0 : return error(cx);
2189 :
2190 5944 : if (hasLastStringStub && !buffer.verifyRange(lastStringStub))
2191 0 : return disable(f, "code memory is out of range");
2192 5944 : if (!buffer.verifyRange(f.chunk()))
2193 0 : return disable(f, "code memory is out of range");
2194 :
2195 : // Patch all guards.
2196 5944 : buffer.maybeLink(atomIdGuard, slowPathStart);
2197 5944 : buffer.maybeLink(atomTypeGuard, slowPathStart);
2198 5944 : buffer.link(shapeGuard, slowPathStart);
2199 5944 : buffer.maybeLink(protoGuard, slowPathStart);
2200 5944 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
2201 0 : buffer.link(*pj, slowPathStart);
2202 5944 : buffer.link(done, fastPathRejoin);
2203 :
2204 5944 : CodeLocationLabel cs = buffer.finalize(f);
2205 : #if DEBUG
2206 5944 : char *chars = DeflateString(cx, v.toString()->getChars(cx), v.toString()->length());
2207 : JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom %p (\"%s\") shape %p (%s: %d)\n",
2208 : js_CodeName[op], cs.executableAddress(), (void*)name, chars,
2209 5944 : (void*)holder->lastProperty(), cx->fp()->script()->filename, CurrentLine(cx));
2210 5944 : cx->free_(chars);
2211 : #endif
2212 :
2213 : // Update the inline guards, if needed.
2214 5944 : if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalShapeGuard()) {
2215 2872 : Repatcher repatcher(f.chunk());
2216 :
2217 1436 : if (shouldPatchInlineTypeGuard()) {
2218 : // A type guard is present in the inline path, and this is the
2219 : // first string stub, so patch it now.
2220 767 : JS_ASSERT(!inlineTypeGuardPatched);
2221 767 : JS_ASSERT(atomTypeGuard.isSet());
2222 :
2223 767 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), cs);
2224 767 : inlineTypeGuardPatched = true;
2225 : }
2226 :
2227 1436 : if (shouldPatchUnconditionalShapeGuard()) {
2228 : // The shape guard is unconditional, meaning there is no type
2229 : // check. This is the first stub, so it has to be patched. Note
2230 : // that it is wrong to patch the inline shape guard otherwise,
2231 : // because it follows an integer-id guard.
2232 669 : JS_ASSERT(!hasInlineTypeGuard());
2233 :
2234 669 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2235 669 : inlineShapeGuardPatched = true;
2236 : }
2237 : }
2238 :
2239 : // If there were previous stub guards, patch them now.
2240 5944 : if (hasLastStringStub) {
2241 9016 : Repatcher repatcher(lastStringStub);
2242 4508 : CodeLocationLabel stub(lastStringStub.start());
2243 4508 : if (atomGuard)
2244 4508 : repatcher.relink(stub.jumpAtOffset(atomGuard), cs);
2245 4508 : repatcher.relink(stub.jumpAtOffset(firstShapeGuard), cs);
2246 4508 : if (secondShapeGuard)
2247 57 : repatcher.relink(stub.jumpAtOffset(secondShapeGuard), cs);
2248 : }
2249 :
2250 : // Update state.
2251 5944 : hasLastStringStub = true;
2252 5944 : lastStringStub = JITCode(cs.executableAddress(), buffer.size());
2253 5944 : if (atomIdGuard.isSet()) {
2254 5930 : atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
2255 5930 : JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
2256 5930 : JS_ASSERT(atomGuard);
2257 : } else {
2258 14 : atomGuard = 0;
2259 : }
2260 5944 : if (protoGuard.isSet()) {
2261 81 : secondShapeGuard = buffer.locationOf(protoGuard.get()) - cs;
2262 81 : JS_ASSERT(secondShapeGuard == buffer.locationOf(protoGuard.get()) - cs);
2263 81 : JS_ASSERT(secondShapeGuard);
2264 : } else {
2265 5863 : secondShapeGuard = 0;
2266 : }
2267 5944 : firstShapeGuard = buffer.locationOf(shapeGuard) - cs;
2268 5944 : JS_ASSERT(firstShapeGuard == buffer.locationOf(shapeGuard) - cs);
2269 5944 : JS_ASSERT(firstShapeGuard);
2270 :
2271 5944 : stubsGenerated++;
2272 :
2273 5944 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2274 38 : disable(f, "max stubs reached");
2275 :
2276 : // Finally, fetch the value to avoid redoing the property lookup.
2277 5944 : *vp = holder->getSlot(shape->slot());
2278 :
2279 5944 : return Lookup_Cacheable;
2280 : }
2281 :
2282 : #if defined JS_METHODJIT_TYPED_ARRAY
2283 : LookupStatus
2284 816 : GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2285 : {
2286 816 : JSContext *cx = f.cx;
2287 :
2288 816 : if (!v.isInt32())
2289 0 : return disable(f, "typed array with string key");
2290 :
2291 816 : if (op == JSOP_CALLELEM)
2292 0 : return disable(f, "typed array with call");
2293 :
2294 : // The fast-path guarantees that after the dense shape guard, the type is
2295 : // known to be int32, either via type inference or the inline type check.
2296 816 : JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
2297 :
2298 1632 : Assembler masm;
2299 :
2300 : // Guard on this typed array's shape/class.
2301 816 : Jump shapeGuard = masm.guardShape(objReg, obj);
2302 :
2303 : // Bounds check.
2304 816 : Jump outOfBounds;
2305 816 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2306 816 : if (idRemat.isConstant()) {
2307 643 : JS_ASSERT(idRemat.value().toInt32() == v.toInt32());
2308 643 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(v.toInt32()));
2309 : } else {
2310 173 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, idRemat.dataReg());
2311 : }
2312 :
2313 : // Load the array's packed data vector.
2314 816 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2315 :
2316 816 : Int32Key key = idRemat.isConstant()
2317 643 : ? Int32Key::FromConstant(v.toInt32())
2318 1459 : : Int32Key::FromRegister(idRemat.dataReg());
2319 :
2320 816 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2321 816 : if (!masm.supportsFloatingPoint() &&
2322 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2323 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64 ||
2324 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_UINT32))
2325 : {
2326 0 : return disable(f, "fpu not supported");
2327 : }
2328 :
2329 816 : MaybeRegisterID tempReg;
2330 816 : masm.loadFromTypedArray(TypedArray::getType(tarray), objReg, key, typeReg, objReg, tempReg);
2331 :
2332 816 : Jump done = masm.jump();
2333 :
2334 816 : updatePCCounters(f, masm);
2335 :
2336 1632 : PICLinker buffer(masm, *this);
2337 816 : if (!buffer.init(cx))
2338 0 : return error(cx);
2339 :
2340 816 : if (!buffer.verifyRange(f.chunk()))
2341 0 : return disable(f, "code memory is out of range");
2342 :
2343 816 : buffer.link(shapeGuard, slowPathStart);
2344 816 : buffer.link(outOfBounds, slowPathStart);
2345 816 : buffer.link(done, fastPathRejoin);
2346 :
2347 816 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2348 816 : JaegerSpew(JSpew_PICs, "generated getelem typed array stub at %p\n", cs.executableAddress());
2349 :
2350 : // If we can generate a typed array stub, the shape guard is conditional.
2351 : // Also, we only support one typed array.
2352 816 : JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
2353 816 : JS_ASSERT(!inlineShapeGuardPatched);
2354 :
2355 1632 : Repatcher repatcher(f.chunk());
2356 816 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2357 816 : inlineShapeGuardPatched = true;
2358 :
2359 816 : stubsGenerated++;
2360 :
2361 : // In the future, it might make sense to attach multiple typed array stubs.
2362 : // For simplicitly, they are currently monomorphic.
2363 816 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2364 0 : disable(f, "max stubs reached");
2365 :
2366 816 : disable(f, "generated typed array stub");
2367 :
2368 : // Fetch the value as expected of Lookup_Cacheable for GetElement.
2369 816 : if (!obj->getGeneric(cx, id, vp))
2370 0 : return Lookup_Error;
2371 :
2372 816 : return Lookup_Cacheable;
2373 : }
2374 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2375 :
2376 : LookupStatus
2377 8588 : GetElementIC::update(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2378 : {
2379 : /*
2380 : * Only treat this as a GETPROP for non-numeric string identifiers. The
2381 : * GETPROP IC assumes the id has already gone through filtering for string
2382 : * indexes in the emitter, i.e. js_GetProtoIfDenseArray is only valid to
2383 : * use when looking up non-integer identifiers.
2384 : */
2385 : uint32_t dummy;
2386 8588 : if (v.isString() && JSID_IS_ATOM(id) && !JSID_TO_ATOM(id)->isIndex(&dummy))
2387 6035 : return attachGetProp(f, obj, v, JSID_TO_ATOM(id)->asPropertyName(), vp);
2388 :
2389 : #if defined JS_METHODJIT_TYPED_ARRAY
2390 : /*
2391 : * Typed array ICs can make stub calls, and need to know which registers
2392 : * are in use and need to be restored after the call. If type inference is
2393 : * enabled then we don't necessarily know the full set of such registers
2394 : * when generating the IC (loop-carried registers may be allocated later),
2395 : * and additionally the push/pop instructions used to save/restore in the
2396 : * IC are not compatible with carrying entries in floating point registers.
2397 : * Since we can use type information to generate inline paths for typed
2398 : * arrays, just don't generate these ICs with inference enabled.
2399 : */
2400 2553 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2401 816 : return attachTypedArray(f, obj, v, id, vp);
2402 : #endif
2403 :
2404 1737 : return disable(f, "unhandled object and key type");
2405 : }
2406 :
2407 : void JS_FASTCALL
2408 13650 : ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
2409 : {
2410 13650 : JSContext *cx = f.cx;
2411 :
2412 : // Right now, we don't optimize for strings or lazy arguments.
2413 13650 : if (!f.regs.sp[-2].isObject()) {
2414 220 : ic->disable(f, "non-object");
2415 220 : stubs::GetElem(f);
2416 220 : return;
2417 : }
2418 :
2419 13430 : Value idval = f.regs.sp[-1];
2420 :
2421 13430 : RecompilationMonitor monitor(cx);
2422 :
2423 13430 : JSObject *obj = ValueToObject(cx, f.regs.sp[-2]);
2424 13430 : if (!obj)
2425 0 : THROW();
2426 :
2427 : #if JS_HAS_XML_SUPPORT
2428 : // Some XML properties behave differently when accessed in a call vs. normal
2429 : // context, so we fall back to stubs::GetElem.
2430 13430 : if (obj->isXML()) {
2431 16 : ic->disable(f, "XML object");
2432 16 : stubs::GetElem(f);
2433 16 : return;
2434 : }
2435 : #endif
2436 :
2437 : jsid id;
2438 13414 : if (idval.isInt32() && INT_FITS_IN_JSID(idval.toInt32())) {
2439 4887 : id = INT_TO_JSID(idval.toInt32());
2440 : } else {
2441 8527 : if (!js_InternNonIntElementId(cx, obj, idval, &id))
2442 0 : THROW();
2443 : }
2444 :
2445 13414 : if (!monitor.recompiled() && ic->shouldUpdate(cx)) {
2446 : #ifdef DEBUG
2447 8588 : f.regs.sp[-2] = MagicValue(JS_GENERIC_MAGIC);
2448 : #endif
2449 8588 : LookupStatus status = ic->update(f, obj, idval, id, &f.regs.sp[-2]);
2450 8588 : if (status != Lookup_Uncacheable) {
2451 6760 : if (status == Lookup_Error)
2452 0 : THROW();
2453 :
2454 : // If the result can be cached, the value was already retrieved.
2455 6760 : JS_ASSERT(!f.regs.sp[-2].isMagic());
2456 6760 : return;
2457 : }
2458 : }
2459 :
2460 6654 : if (!obj->getGeneric(cx, id, &f.regs.sp[-2]))
2461 0 : THROW();
2462 :
2463 : #if JS_HAS_NO_SUCH_METHOD
2464 6654 : if (*f.pc() == JSOP_CALLELEM && JS_UNLIKELY(f.regs.sp[-2].isPrimitive())) {
2465 13 : if (!OnUnknownMethod(cx, obj, idval, &f.regs.sp[-2]))
2466 0 : THROW();
2467 : }
2468 : #endif
2469 : }
2470 :
2471 : #define APPLY_STRICTNESS(f, s) \
2472 : (FunctionTemplateConditional(s, f<true>, f<false>))
2473 :
2474 : LookupStatus
2475 1902 : SetElementIC::disable(VMFrame &f, const char *reason)
2476 : {
2477 1902 : slowCallPatched = true;
2478 1902 : VoidStub stub = APPLY_STRICTNESS(stubs::SetElem, strictMode);
2479 1902 : BaseIC::disable(f, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
2480 1902 : return Lookup_Uncacheable;
2481 : }
2482 :
2483 : LookupStatus
2484 0 : SetElementIC::error(JSContext *cx)
2485 : {
2486 0 : return Lookup_Error;
2487 : }
2488 :
2489 : void
2490 0 : SetElementIC::purge(Repatcher &repatcher)
2491 : {
2492 : // Repatch the inline jumps.
2493 0 : if (inlineShapeGuardPatched)
2494 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2495 0 : if (inlineHoleGuardPatched)
2496 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
2497 :
2498 0 : if (slowCallPatched) {
2499 0 : void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
2500 0 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2501 : }
2502 :
2503 0 : reset();
2504 0 : }
2505 :
2506 : LookupStatus
2507 547 : SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
2508 : {
2509 547 : JSContext *cx = f.cx;
2510 :
2511 547 : if (keyval < 0)
2512 4 : return disable(f, "negative key index");
2513 :
2514 : // We may have failed a capacity check instead of a dense array check.
2515 : // However we should still build the IC in this case, since it could
2516 : // be in a loop that is filling in the array.
2517 :
2518 543 : if (js_PrototypeHasIndexedProperties(cx, obj))
2519 4 : return disable(f, "prototype has indexed properties");
2520 :
2521 1078 : Assembler masm;
2522 :
2523 1078 : Vector<Jump, 8> fails(cx);
2524 :
2525 539 : if (!GeneratePrototypeGuards(cx, fails, masm, obj, NULL, objReg, objReg))
2526 0 : return error(cx);
2527 :
2528 : // Test for indexed properties in Array.prototype. We test each shape
2529 : // along the proto chain. This affords us two optimizations:
2530 : // 1) Loading the prototype can be avoided because the shape would change;
2531 : // instead we can bake in their identities.
2532 : // 2) We only have to test the shape, rather than INDEXED.
2533 1613 : for (JSObject *pobj = obj->getProto(); pobj; pobj = pobj->getProto()) {
2534 1074 : if (!pobj->isNative())
2535 0 : return disable(f, "non-native array prototype");
2536 1074 : masm.move(ImmPtr(pobj), objReg);
2537 1074 : Jump j = masm.guardShape(objReg, pobj);
2538 1074 : if (!fails.append(j))
2539 0 : return error(cx);
2540 : }
2541 :
2542 : // Restore |obj|.
2543 539 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2544 :
2545 : // Load the elements.
2546 539 : masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), objReg);
2547 :
2548 539 : Int32Key key = hasConstantKey ? Int32Key::FromConstant(keyValue) : Int32Key::FromRegister(keyReg);
2549 :
2550 : // Guard that the initialized length is being updated exactly.
2551 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
2552 539 : objReg, key, Assembler::NotEqual));
2553 :
2554 : // Check the array capacity.
2555 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfCapacity(),
2556 539 : objReg, key, Assembler::BelowOrEqual));
2557 :
2558 539 : masm.bumpKey(key, 1);
2559 :
2560 : // Update the length and initialized length.
2561 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfInitializedLength()));
2562 : Jump lengthGuard = masm.guardArrayExtent(ObjectElements::offsetOfLength(),
2563 539 : objReg, key, Assembler::AboveOrEqual);
2564 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfLength()));
2565 539 : lengthGuard.linkTo(masm.label(), &masm);
2566 :
2567 539 : masm.bumpKey(key, -1);
2568 :
2569 : // Store the value back.
2570 539 : if (hasConstantKey) {
2571 55 : Address slot(objReg, keyValue * sizeof(Value));
2572 55 : masm.storeValue(vr, slot);
2573 : } else {
2574 484 : BaseIndex slot(objReg, keyReg, Assembler::JSVAL_SCALE);
2575 484 : masm.storeValue(vr, slot);
2576 : }
2577 :
2578 539 : Jump done = masm.jump();
2579 :
2580 539 : JS_ASSERT(!execPool);
2581 539 : JS_ASSERT(!inlineHoleGuardPatched);
2582 :
2583 1078 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2584 539 : execPool = buffer.init(cx);
2585 539 : if (!execPool)
2586 0 : return error(cx);
2587 :
2588 539 : if (!buffer.verifyRange(f.chunk()))
2589 0 : return disable(f, "code memory is out of range");
2590 :
2591 : // Patch all guards.
2592 3216 : for (size_t i = 0; i < fails.length(); i++)
2593 2677 : buffer.link(fails[i], slowPathStart);
2594 539 : buffer.link(done, fastPathRejoin);
2595 :
2596 539 : CodeLocationLabel cs = buffer.finalize(f);
2597 539 : JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
2598 :
2599 1078 : Repatcher repatcher(f.chunk());
2600 539 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
2601 539 : inlineHoleGuardPatched = true;
2602 :
2603 539 : disable(f, "generated dense array hole stub");
2604 :
2605 539 : return Lookup_Cacheable;
2606 : }
2607 :
2608 : #if defined JS_METHODJIT_TYPED_ARRAY
2609 : LookupStatus
2610 640 : SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
2611 : {
2612 : // Right now, only one shape guard extension is supported.
2613 640 : JS_ASSERT(!inlineShapeGuardPatched);
2614 :
2615 1280 : Assembler masm;
2616 640 : JSContext *cx = f.cx;
2617 :
2618 : // Restore |obj|.
2619 640 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2620 :
2621 : // Guard on this typed array's shape.
2622 640 : Jump shapeGuard = masm.guardShape(objReg, obj);
2623 :
2624 : // Bounds check.
2625 640 : Jump outOfBounds;
2626 640 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2627 640 : if (hasConstantKey)
2628 149 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(keyValue));
2629 : else
2630 491 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, keyReg);
2631 :
2632 : // Load the array's packed data vector.
2633 640 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2634 :
2635 640 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2636 640 : if (!masm.supportsFloatingPoint() &&
2637 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2638 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64))
2639 : {
2640 0 : return disable(f, "fpu not supported");
2641 : }
2642 :
2643 640 : int shift = js::TypedArray::slotWidth(obj);
2644 640 : if (hasConstantKey) {
2645 149 : Address addr(objReg, keyValue * shift);
2646 149 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2647 0 : return error(cx);
2648 : } else {
2649 491 : Assembler::Scale scale = Assembler::TimesOne;
2650 491 : switch (shift) {
2651 : case 2:
2652 104 : scale = Assembler::TimesTwo;
2653 104 : break;
2654 : case 4:
2655 148 : scale = Assembler::TimesFour;
2656 148 : break;
2657 : case 8:
2658 32 : scale = Assembler::TimesEight;
2659 32 : break;
2660 : }
2661 491 : BaseIndex addr(objReg, keyReg, scale);
2662 491 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2663 0 : return error(cx);
2664 : }
2665 :
2666 640 : Jump done = masm.jump();
2667 :
2668 : // The stub does not rely on any pointers or numbers that could be ruined
2669 : // by a GC or shape regenerated GC. We let this stub live for the lifetime
2670 : // of the script.
2671 640 : JS_ASSERT(!execPool);
2672 1280 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2673 640 : execPool = buffer.init(cx);
2674 640 : if (!execPool)
2675 0 : return error(cx);
2676 :
2677 640 : if (!buffer.verifyRange(f.chunk()))
2678 0 : return disable(f, "code memory is out of range");
2679 :
2680 : // Note that the out-of-bounds path simply does nothing.
2681 640 : buffer.link(shapeGuard, slowPathStart);
2682 640 : buffer.link(outOfBounds, fastPathRejoin);
2683 640 : buffer.link(done, fastPathRejoin);
2684 640 : masm.finalize(buffer);
2685 :
2686 640 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2687 640 : JaegerSpew(JSpew_PICs, "generated setelem typed array stub at %p\n", cs.executableAddress());
2688 :
2689 1280 : Repatcher repatcher(f.chunk());
2690 640 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2691 640 : inlineShapeGuardPatched = true;
2692 :
2693 640 : stubsGenerated++;
2694 :
2695 : // In the future, it might make sense to attach multiple typed array stubs.
2696 : // For simplicitly, they are currently monomorphic.
2697 640 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2698 0 : disable(f, "max stubs reached");
2699 :
2700 640 : disable(f, "generated typed array stub");
2701 :
2702 640 : return Lookup_Cacheable;
2703 : }
2704 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2705 :
2706 : LookupStatus
2707 1902 : SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
2708 : {
2709 1902 : if (!objval.isObject())
2710 0 : return disable(f, "primitive lval");
2711 1902 : if (!idval.isInt32())
2712 183 : return disable(f, "non-int32 key");
2713 :
2714 1719 : JSObject *obj = &objval.toObject();
2715 1719 : int32_t key = idval.toInt32();
2716 :
2717 1719 : if (obj->isDenseArray())
2718 547 : return attachHoleStub(f, obj, key);
2719 :
2720 : #if defined JS_METHODJIT_TYPED_ARRAY
2721 : /* Not attaching typed array stubs with linear scan allocator, see GetElementIC. */
2722 1172 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2723 640 : return attachTypedArray(f, obj, key);
2724 : #endif
2725 :
2726 532 : return disable(f, "unsupported object type");
2727 : }
2728 :
2729 : bool
2730 4174 : SetElementIC::shouldUpdate(JSContext *cx)
2731 : {
2732 4174 : if (!hit) {
2733 2272 : hit = true;
2734 2272 : spew(cx, "ignored", "first hit");
2735 2272 : return false;
2736 : }
2737 : #ifdef JSGC_INCREMENTAL_MJ
2738 1902 : JS_ASSERT(!cx->compartment->needsBarrier());
2739 : #endif
2740 1902 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2741 1902 : return true;
2742 : }
2743 :
2744 : template<JSBool strict>
2745 : void JS_FASTCALL
2746 : ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
2747 : {
2748 4174 : JSContext *cx = f.cx;
2749 :
2750 4174 : if (ic->shouldUpdate(cx)) {
2751 1902 : LookupStatus status = ic->update(f, f.regs.sp[-3], f.regs.sp[-2]);
2752 1902 : if (status == Lookup_Error)
2753 0 : THROW();
2754 : }
2755 :
2756 4174 : stubs::SetElem<strict>(f);
2757 : }
2758 :
2759 : template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
2760 : template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
2761 :
2762 : #endif /* JS_POLYIC */
2763 :
|