1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : * David Mandelin <dmandelin@mozilla.com>
26 : * Jan de Mooij <jandemooij@gmail.com>
27 : *
28 : * Alternatively, the contents of this file may be used under the terms of
29 : * either of the GNU General Public License Version 2 or later (the "GPL"),
30 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 : * in which case the provisions of the GPL or the LGPL are applicable instead
32 : * of those above. If you wish to allow use of your version of this file only
33 : * under the terms of either the GPL or the LGPL, and not to allow others to
34 : * use your version of this file under the terms of the MPL, indicate your
35 : * decision by deleting the provisions above and replace them with the notice
36 : * and other provisions required by the GPL or the LGPL. If you do not delete
37 : * the provisions above, a recipient may use your version of this file under
38 : * the terms of any one of the MPL, the GPL or the LGPL.
39 : *
40 : * ***** END LICENSE BLOCK ***** */
41 :
42 : #include "MethodJIT.h"
43 : #include "jsnum.h"
44 : #include "jsbool.h"
45 : #include "jsiter.h"
46 : #include "Compiler.h"
47 : #include "StubCalls.h"
48 : #include "MonoIC.h"
49 : #include "PolyIC.h"
50 : #include "ICChecker.h"
51 : #include "Retcon.h"
52 : #include "assembler/jit/ExecutableAllocator.h"
53 : #include "assembler/assembler/LinkBuffer.h"
54 : #include "FrameState-inl.h"
55 : #include "jsobjinlines.h"
56 : #include "jsscriptinlines.h"
57 : #include "InlineFrameAssembler.h"
58 : #include "jscompartment.h"
59 : #include "jsopcodeinlines.h"
60 :
61 : #include "builtin/RegExp.h"
62 : #include "frontend/BytecodeEmitter.h"
63 : #include "vm/RegExpStatics.h"
64 : #include "vm/RegExpObject.h"
65 :
66 : #include "jsautooplen.h"
67 : #include "jstypedarrayinlines.h"
68 : #include "vm/RegExpObject-inl.h"
69 :
70 : using namespace js;
71 : using namespace js::mjit;
72 : #if defined(JS_POLYIC) || defined(JS_MONOIC)
73 : using namespace js::mjit::ic;
74 : #endif
75 : using namespace js::analyze;
76 :
77 : #define RETURN_IF_OOM(retval) \
78 : JS_BEGIN_MACRO \
79 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
80 : return retval; \
81 : JS_END_MACRO
82 :
83 : /*
84 : * Number of times a script must be called or had a backedge before we try to
85 : * inline its calls.
86 : */
87 : static const size_t USES_BEFORE_INLINING = 10000;
88 :
89 134254 : mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript,
90 : unsigned chunkIndex, bool isConstructing)
91 : : BaseCompiler(cx),
92 : outerScript(outerScript),
93 : chunkIndex(chunkIndex),
94 : isConstructing(isConstructing),
95 134254 : outerChunk(outerJIT()->chunkDescriptor(chunkIndex)),
96 : ssa(cx, outerScript),
97 134254 : globalObj(outerScript->hasGlobal() ? outerScript->global() : NULL),
98 91814 : globalSlots(globalObj ? globalObj->getRawSlots() : NULL),
99 134254 : frame(cx, *thisFromCtor(), masm, stubcc),
100 : a(NULL), outer(NULL), script(NULL), PC(NULL), loop(NULL),
101 134254 : inlineFrames(CompilerAllocPolicy(cx, *thisFromCtor())),
102 134254 : branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
103 : #if defined JS_MONOIC
104 134254 : getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
105 134254 : setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
106 134254 : callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
107 134254 : equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
108 : #endif
109 : #if defined JS_POLYIC
110 134254 : pics(CompilerAllocPolicy(cx, *thisFromCtor())),
111 134254 : getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
112 134254 : setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
113 : #endif
114 134254 : callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
115 134254 : callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
116 134254 : doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
117 134254 : fixedIntToDoubleEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
118 134254 : fixedDoubleToAnyEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
119 134254 : jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
120 134254 : jumpTableEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
121 134254 : loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
122 134254 : chunkEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
123 134254 : stubcc(cx, *thisFromCtor(), frame),
124 134254 : debugMode_(cx->compartment->debugMode()),
125 : inlining_(false),
126 : hasGlobalReallocation(false),
127 : oomInVector(false),
128 : overflowICSpace(false),
129 : gcNumber(cx->runtime->gcNumber),
130 : applyTricks(NoApplyTricks),
131 3313910 : pcLengths(NULL)
132 : {
133 : /* Once a script starts getting really hot we will inline calls in it. */
134 186686 : if (!debugMode() && cx->typeInferenceEnabled() && globalObj &&
135 26315 : (outerScript->getUseCount() >= USES_BEFORE_INLINING ||
136 26117 : cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS))) {
137 24976 : inlining_ = true;
138 : }
139 134254 : }
140 :
141 : CompileStatus
142 134254 : mjit::Compiler::compile()
143 : {
144 134254 : JS_ASSERT(!outerChunkRef().chunk);
145 :
146 : void **checkAddr = isConstructing
147 : ? &outerScript->jitArityCheckCtor
148 134254 : : &outerScript->jitArityCheckNormal;
149 :
150 134254 : CompileStatus status = performCompilation();
151 134254 : if (status != Compile_Okay && status != Compile_Retry) {
152 5205 : *checkAddr = JS_UNJITTABLE_SCRIPT;
153 5205 : if (outerScript->function()) {
154 4560 : outerScript->uninlineable = true;
155 4560 : types::MarkTypeObjectFlags(cx, outerScript->function(),
156 4560 : types::OBJECT_FLAG_UNINLINEABLE);
157 : }
158 : }
159 :
160 134254 : return status;
161 : }
162 :
163 : CompileStatus
164 141672 : mjit::Compiler::checkAnalysis(JSScript *script)
165 : {
166 141672 : if (script->hasClearedGlobal()) {
167 0 : JaegerSpew(JSpew_Abort, "script has a cleared global\n");
168 0 : return Compile_Abort;
169 : }
170 :
171 141672 : if (!script->ensureRanAnalysis(cx, NULL))
172 0 : return Compile_Error;
173 :
174 141672 : if (!script->analysis()->compileable()) {
175 5205 : JaegerSpew(JSpew_Abort, "script has uncompileable opcodes\n");
176 5205 : return Compile_Abort;
177 : }
178 :
179 136467 : if (cx->typeInferenceEnabled() && !script->ensureRanInference(cx))
180 0 : return Compile_Error;
181 :
182 136467 : ScriptAnalysis *analysis = script->analysis();
183 136467 : analysis->assertMatchingDebugMode();
184 136467 : if (analysis->failed()) {
185 0 : JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
186 0 : return Compile_Abort;
187 : }
188 :
189 136467 : return Compile_Okay;
190 : }
191 :
192 : CompileStatus
193 2894 : mjit::Compiler::addInlineFrame(JSScript *script, uint32_t depth,
194 : uint32_t parent, jsbytecode *parentpc)
195 : {
196 2894 : JS_ASSERT(inlining());
197 :
198 2894 : CompileStatus status = checkAnalysis(script);
199 2894 : if (status != Compile_Okay)
200 0 : return status;
201 :
202 2894 : if (!ssa.addInlineFrame(script, depth, parent, parentpc))
203 0 : return Compile_Error;
204 :
205 2894 : uint32_t index = ssa.iterFrame(ssa.numFrames() - 1).index;
206 2894 : return scanInlineCalls(index, depth);
207 : }
208 :
209 : CompileStatus
210 27463 : mjit::Compiler::scanInlineCalls(uint32_t index, uint32_t depth)
211 : {
212 : /* Maximum number of calls we will inline at the same site. */
213 : static const uint32_t INLINE_SITE_LIMIT = 5;
214 :
215 27463 : JS_ASSERT(inlining() && globalObj);
216 :
217 : /* Not inlining yet from 'new' scripts. */
218 27463 : if (isConstructing)
219 293 : return Compile_Okay;
220 :
221 27170 : JSScript *script = ssa.getFrame(index).script;
222 27170 : ScriptAnalysis *analysis = script->analysis();
223 :
224 : /* Don't inline from functions which could have a non-global scope object. */
225 141591 : if (!script->hasGlobal() ||
226 27169 : script->global() != globalObj ||
227 43626 : (script->function() && script->function()->getParent() != globalObj) ||
228 43626 : (script->function() && script->function()->isHeavyweight()) ||
229 : script->isActiveEval) {
230 4312 : return Compile_Okay;
231 : }
232 :
233 22858 : uint32_t nextOffset = 0;
234 22858 : uint32_t lastOffset = script->length;
235 :
236 22858 : if (index == CrossScriptSSA::OUTER_FRAME) {
237 19965 : nextOffset = outerChunk.begin;
238 19965 : lastOffset = outerChunk.end;
239 : }
240 :
241 1009531 : while (nextOffset < lastOffset) {
242 963832 : uint32_t offset = nextOffset;
243 963832 : jsbytecode *pc = script->code + offset;
244 963832 : nextOffset = offset + GetBytecodeLength(pc);
245 :
246 963832 : Bytecode *code = analysis->maybeCode(pc);
247 963832 : if (!code)
248 7606 : continue;
249 :
250 : /* :XXX: Not yet inlining 'new' calls. */
251 956226 : if (JSOp(*pc) != JSOP_CALL)
252 905615 : continue;
253 :
254 : /* Not inlining at monitored call sites or those with type barriers. */
255 50611 : if (code->monitoredTypes || code->monitoredTypesReturn || analysis->typeBarriers(cx, pc) != NULL)
256 21486 : continue;
257 :
258 29125 : uint32_t argc = GET_ARGC(pc);
259 29125 : types::TypeSet *calleeTypes = analysis->poppedTypes(pc, argc + 1);
260 :
261 29125 : if (calleeTypes->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT)
262 23065 : continue;
263 :
264 6060 : if (calleeTypes->getObjectCount() >= INLINE_SITE_LIMIT)
265 18 : continue;
266 :
267 : /*
268 : * Compute the maximum height we can grow the stack for inlined frames.
269 : * We always reserve space for loop temporaries, for an extra stack
270 : * frame pushed when making a call from the deepest inlined frame, and
271 : * for the temporary slot used by type barriers.
272 : */
273 : uint32_t stackLimit = outerScript->nslots + StackSpace::STACK_JIT_EXTRA
274 6042 : - VALUES_PER_STACK_FRAME - FrameState::TEMPORARY_LIMIT - 1;
275 :
276 : /* Compute the depth of any frames inlined at this site. */
277 6042 : uint32_t nextDepth = depth + VALUES_PER_STACK_FRAME + script->nfixed + code->stackDepth;
278 :
279 : /*
280 : * Scan each of the possible callees for other conditions precluding
281 : * inlining. We only inline at a call site if all callees are inlineable.
282 : */
283 6042 : unsigned count = calleeTypes->getObjectCount();
284 6042 : bool okay = true;
285 8958 : for (unsigned i = 0; i < count; i++) {
286 6228 : if (calleeTypes->getTypeObject(i) != NULL) {
287 837 : okay = false;
288 837 : break;
289 : }
290 :
291 5391 : JSObject *obj = calleeTypes->getSingleObject(i);
292 5391 : if (!obj)
293 0 : continue;
294 :
295 5391 : if (!obj->isFunction()) {
296 3 : okay = false;
297 3 : break;
298 : }
299 :
300 5388 : JSFunction *fun = obj->toFunction();
301 5388 : if (!fun->isInterpreted()) {
302 0 : okay = false;
303 0 : break;
304 : }
305 5388 : JSScript *script = fun->script();
306 :
307 : /*
308 : * Don't inline calls to scripts which haven't been analyzed.
309 : * We need to analyze the inlined scripts to compile them, and
310 : * doing so can change type information we have queried already
311 : * in making inlining decisions.
312 : */
313 5388 : if (!script->hasAnalysis() || !script->analysis()->ranInference()) {
314 319 : okay = false;
315 319 : break;
316 : }
317 :
318 : /*
319 : * The outer and inner scripts must have the same scope. This only
320 : * allows us to inline calls between non-inner functions. Also
321 : * check for consistent strictness between the functions.
322 : */
323 10138 : if (!globalObj ||
324 5069 : fun->getParent() != globalObj ||
325 : outerScript->strictModeCode != script->strictModeCode) {
326 79 : okay = false;
327 79 : break;
328 : }
329 :
330 : /* We can't cope with inlining recursive functions yet. */
331 4990 : uint32_t nindex = index;
332 17642 : while (nindex != CrossScriptSSA::INVALID_FRAME) {
333 7662 : if (ssa.getFrame(nindex).script == script)
334 466 : okay = false;
335 7662 : nindex = ssa.getFrame(nindex).parent;
336 : }
337 4990 : if (!okay)
338 466 : break;
339 :
340 : /* Watch for excessively deep nesting of inlined frames. */
341 4524 : if (nextDepth + script->nslots >= stackLimit) {
342 0 : okay = false;
343 0 : break;
344 : }
345 :
346 4524 : if (!script->types || !script->types->hasScope()) {
347 0 : okay = false;
348 0 : break;
349 : }
350 :
351 4524 : CompileStatus status = checkAnalysis(script);
352 4524 : if (status != Compile_Okay)
353 17 : return status;
354 :
355 4507 : if (!script->analysis()->inlineable(argc)) {
356 1436 : okay = false;
357 1436 : break;
358 : }
359 :
360 3071 : if (types::TypeSet::HasObjectFlags(cx, fun->getType(cx),
361 3071 : types::OBJECT_FLAG_UNINLINEABLE)) {
362 149 : okay = false;
363 149 : break;
364 : }
365 :
366 : /*
367 : * Don't inline scripts which use 'this' if it is possible they
368 : * could be called with a 'this' value requiring wrapping. During
369 : * inlining we do not want to modify frame entries belonging to the
370 : * caller.
371 : */
372 3978 : if (script->analysis()->usesThisValue() &&
373 1056 : types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT) {
374 6 : okay = false;
375 6 : break;
376 : }
377 : }
378 6025 : if (!okay)
379 3295 : continue;
380 :
381 2730 : calleeTypes->addFreeze(cx);
382 :
383 : /*
384 : * Add the inline frames to the cross script SSA. We will pick these
385 : * back up when compiling the call site.
386 : */
387 5624 : for (unsigned i = 0; i < count; i++) {
388 2894 : JSObject *obj = calleeTypes->getSingleObject(i);
389 2894 : if (!obj)
390 0 : continue;
391 :
392 2894 : JSFunction *fun = obj->toFunction();
393 2894 : JSScript *script = fun->script();
394 :
395 2894 : CompileStatus status = addInlineFrame(script, nextDepth, index, pc);
396 2894 : if (status != Compile_Okay)
397 0 : return status;
398 : }
399 : }
400 :
401 22841 : return Compile_Okay;
402 : }
403 :
404 : CompileStatus
405 131933 : mjit::Compiler::pushActiveFrame(JSScript *script, uint32_t argc)
406 : {
407 131933 : if (cx->runtime->profilingScripts && !script->pcCounters)
408 0 : script->initCounts(cx);
409 :
410 131933 : ActiveFrame *newa = OffTheBooks::new_<ActiveFrame>(cx);
411 131933 : if (!newa) {
412 0 : js_ReportOutOfMemory(cx);
413 0 : return Compile_Error;
414 : }
415 :
416 131933 : newa->parent = a;
417 131933 : if (a)
418 2884 : newa->parentPC = PC;
419 131933 : newa->script = script;
420 131933 : newa->mainCodeStart = masm.size();
421 131933 : newa->stubCodeStart = stubcc.size();
422 :
423 131933 : if (outer) {
424 2884 : newa->inlineIndex = uint32_t(inlineFrames.length());
425 2884 : inlineFrames.append(newa);
426 : } else {
427 129049 : newa->inlineIndex = CrossScriptSSA::OUTER_FRAME;
428 129049 : outer = newa;
429 : }
430 131933 : JS_ASSERT(ssa.getFrame(newa->inlineIndex).script == script);
431 :
432 131933 : newa->inlinePCOffset = ssa.frameLength(newa->inlineIndex);
433 :
434 131933 : ScriptAnalysis *newAnalysis = script->analysis();
435 :
436 : #ifdef JS_METHODJIT_SPEW
437 131933 : if (cx->typeInferenceEnabled() && IsJaegerSpewChannelActive(JSpew_Regalloc)) {
438 0 : unsigned nargs = script->function() ? script->function()->nargs : 0;
439 0 : for (unsigned i = 0; i < nargs; i++) {
440 0 : uint32_t slot = ArgSlot(i);
441 0 : if (!newAnalysis->slotEscapes(slot)) {
442 0 : JaegerSpew(JSpew_Regalloc, "Argument %u:", i);
443 0 : newAnalysis->liveness(slot).print();
444 : }
445 : }
446 0 : for (unsigned i = 0; i < script->nfixed; i++) {
447 0 : uint32_t slot = LocalSlot(script, i);
448 0 : if (!newAnalysis->slotEscapes(slot)) {
449 0 : JaegerSpew(JSpew_Regalloc, "Local %u:", i);
450 0 : newAnalysis->liveness(slot).print();
451 : }
452 : }
453 : }
454 : #endif
455 :
456 131933 : if (!frame.pushActiveFrame(script, argc)) {
457 0 : js_ReportOutOfMemory(cx);
458 0 : return Compile_Error;
459 : }
460 :
461 131933 : newa->jumpMap = (Label *)OffTheBooks::malloc_(sizeof(Label) * script->length);
462 131933 : if (!newa->jumpMap) {
463 0 : js_ReportOutOfMemory(cx);
464 0 : return Compile_Error;
465 : }
466 : #ifdef DEBUG
467 33544939 : for (uint32_t i = 0; i < script->length; i++)
468 33413006 : newa->jumpMap[i] = Label();
469 : #endif
470 :
471 131933 : if (cx->typeInferenceEnabled()) {
472 59745 : CompileStatus status = prepareInferenceTypes(script, newa);
473 59745 : if (status != Compile_Okay)
474 0 : return status;
475 : }
476 :
477 131933 : this->script = script;
478 131933 : this->analysis = newAnalysis;
479 131933 : this->PC = script->code;
480 131933 : this->a = newa;
481 :
482 131933 : return Compile_Okay;
483 : }
484 :
485 : void
486 2884 : mjit::Compiler::popActiveFrame()
487 : {
488 2884 : JS_ASSERT(a->parent);
489 2884 : a->mainCodeEnd = masm.size();
490 2884 : a->stubCodeEnd = stubcc.size();
491 2884 : this->PC = a->parentPC;
492 2884 : this->a = (ActiveFrame *) a->parent;
493 2884 : this->script = a->script;
494 2884 : this->analysis = this->script->analysis();
495 :
496 2884 : frame.popActiveFrame();
497 2884 : }
498 :
499 : #define CHECK_STATUS(expr) \
500 : JS_BEGIN_MACRO \
501 : CompileStatus status_ = (expr); \
502 : if (status_ != Compile_Okay) { \
503 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
504 : js_ReportOutOfMemory(cx); \
505 : return status_; \
506 : } \
507 : JS_END_MACRO
508 :
509 : CompileStatus
510 134254 : mjit::Compiler::performCompilation()
511 : {
512 : JaegerSpew(JSpew_Scripts,
513 : "compiling script (file \"%s\") (line \"%d\") (length \"%d\") (chunk \"%d\")\n",
514 134254 : outerScript->filename, outerScript->lineno, outerScript->length, chunkIndex);
515 :
516 134254 : if (inlining()) {
517 : JaegerSpew(JSpew_Inlining,
518 : "inlining calls in script (file \"%s\") (line \"%d\")\n",
519 24976 : outerScript->filename, outerScript->lineno);
520 : }
521 :
522 : #ifdef JS_METHODJIT_SPEW
523 : Profiler prof;
524 134254 : prof.start();
525 : #endif
526 :
527 : #ifdef JS_METHODJIT
528 134254 : outerScript->debugMode = debugMode();
529 : #endif
530 :
531 134254 : JS_ASSERT(cx->compartment->activeInference);
532 :
533 : {
534 268508 : types::AutoEnterCompilation enter(cx, outerScript, isConstructing, chunkIndex);
535 :
536 134254 : CHECK_STATUS(checkAnalysis(outerScript));
537 129066 : if (inlining())
538 24569 : CHECK_STATUS(scanInlineCalls(CrossScriptSSA::OUTER_FRAME, 0));
539 129049 : CHECK_STATUS(pushActiveFrame(outerScript, 0));
540 :
541 129049 : if (outerScript->pcCounters || Probes::wantNativeAddressInfo(cx)) {
542 0 : size_t length = ssa.frameLength(ssa.numFrames() - 1);
543 0 : pcLengths = (PCLengthEntry *) OffTheBooks::calloc_(sizeof(pcLengths[0]) * length);
544 0 : if (!pcLengths)
545 0 : return Compile_Error;
546 : }
547 :
548 129049 : if (chunkIndex == 0)
549 127892 : CHECK_STATUS(generatePrologue());
550 129049 : CHECK_STATUS(generateMethod());
551 129015 : if (outerJIT() && chunkIndex == outerJIT()->nchunks - 1)
552 127765 : CHECK_STATUS(generateEpilogue());
553 129015 : CHECK_STATUS(finishThisUp());
554 : }
555 :
556 : #ifdef JS_METHODJIT_SPEW
557 129015 : prof.stop();
558 129015 : JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
559 : #endif
560 :
561 : JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%u\")\n",
562 129015 : outerChunkRef().chunk->code.m_code.executableAddress(),
563 258030 : unsigned(outerChunkRef().chunk->code.m_size));
564 :
565 129015 : return Compile_Okay;
566 : }
567 :
568 : #undef CHECK_STATUS
569 :
570 131933 : mjit::JSActiveFrame::JSActiveFrame()
571 131933 : : parent(NULL), parentPC(NULL), script(NULL), inlineIndex(UINT32_MAX)
572 : {
573 131933 : }
574 :
575 131933 : mjit::Compiler::ActiveFrame::ActiveFrame(JSContext *cx)
576 : : jumpMap(NULL),
577 : varTypes(NULL), needReturnValue(false),
578 : syncReturnValue(false), returnValueDouble(false), returnSet(false),
579 131933 : returnEntry(NULL), returnJumps(NULL), exitState(NULL)
580 131933 : {}
581 :
582 131933 : mjit::Compiler::ActiveFrame::~ActiveFrame()
583 : {
584 131933 : js::Foreground::free_(jumpMap);
585 131933 : if (varTypes)
586 59745 : js::Foreground::free_(varTypes);
587 131933 : }
588 :
589 268508 : mjit::Compiler::~Compiler()
590 : {
591 134254 : if (outer)
592 129049 : cx->delete_(outer);
593 137138 : for (unsigned i = 0; i < inlineFrames.length(); i++)
594 2884 : cx->delete_(inlineFrames[i]);
595 268520 : while (loop) {
596 12 : LoopState *nloop = loop->outer;
597 12 : cx->delete_(loop);
598 12 : loop = nloop;
599 : }
600 134254 : }
601 :
602 : CompileStatus
603 59745 : mjit::Compiler::prepareInferenceTypes(JSScript *script, ActiveFrame *a)
604 : {
605 : /*
606 : * During our walk of the script, we need to preserve the invariant that at
607 : * join points the in memory type tag is always in sync with the known type
608 : * tag of the variable's SSA value at that join point. In particular, SSA
609 : * values inferred as (int|double) must in fact be doubles, stored either
610 : * in floating point registers or in memory. There is an exception for
611 : * locals whose value is currently dead, whose type might not be synced.
612 : *
613 : * To ensure this, we need to know the SSA values for each variable at each
614 : * join point, which the SSA analysis does not store explicitly. These can
615 : * be recovered, though. During the forward walk, the SSA value of a var
616 : * (and its associated type set) change only when we see an explicit assign
617 : * to the var or get to a join point with a phi node for that var. So we
618 : * can duplicate the effects of that walk here by watching for writes to
619 : * vars (updateVarTypes) and new phi nodes at join points.
620 : *
621 : * When we get to a branch and need to know a variable's value at the
622 : * branch target, we know it will either be a phi node at the target or
623 : * the variable's current value, as no phi node is created at the target
624 : * only if a variable has the same value on all incoming edges.
625 : */
626 :
627 : a->varTypes = (VarType *)
628 59745 : OffTheBooks::calloc_(TotalSlots(script) * sizeof(VarType));
629 59745 : if (!a->varTypes) {
630 0 : js_ReportOutOfMemory(cx);
631 0 : return Compile_Error;
632 : }
633 :
634 229525 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
635 169780 : VarType &vt = a->varTypes[slot];
636 169780 : vt.setTypes(types::TypeScript::SlotTypes(script, slot));
637 : }
638 :
639 59745 : return Compile_Okay;
640 : }
641 :
642 : /*
643 : * Number of times a script must be called or have back edges taken before we
644 : * run it in the methodjit. We wait longer if type inference is enabled, to
645 : * allow more gathering of type information and less recompilation.
646 : */
647 : static const size_t USES_BEFORE_COMPILE = 16;
648 : static const size_t INFER_USES_BEFORE_COMPILE = 40;
649 :
650 : /* Target maximum size, in bytecode length, for a compiled chunk of a script. */
651 : static uint32_t CHUNK_LIMIT = 1500;
652 :
653 : void
654 27 : mjit::SetChunkLimit(uint32_t limit)
655 : {
656 27 : if (limit)
657 27 : CHUNK_LIMIT = limit;
658 27 : }
659 :
660 : JITScript *
661 114059 : MakeJITScript(JSContext *cx, JSScript *script, bool construct)
662 : {
663 114059 : if (!script->ensureRanAnalysis(cx, NULL))
664 0 : return NULL;
665 :
666 114059 : ScriptAnalysis *analysis = script->analysis();
667 :
668 114059 : JITScript *&location = construct ? script->jitCtor : script->jitNormal;
669 :
670 228118 : Vector<ChunkDescriptor> chunks(cx);
671 228118 : Vector<CrossChunkEdge> edges(cx);
672 :
673 114059 : if (script->length < CHUNK_LIMIT || !cx->typeInferenceEnabled()) {
674 113825 : ChunkDescriptor desc;
675 113825 : desc.begin = 0;
676 113825 : desc.end = script->length;
677 113825 : if (!chunks.append(desc))
678 0 : return NULL;
679 : } else {
680 234 : if (!script->ensureRanInference(cx))
681 0 : return NULL;
682 :
683 : /* Outgoing edges within the current chunk. */
684 468 : Vector<CrossChunkEdge> currentEdges(cx);
685 234 : uint32_t chunkStart = 0;
686 :
687 234 : unsigned offset, nextOffset = 0;
688 1225536 : while (nextOffset < script->length) {
689 1225068 : offset = nextOffset;
690 :
691 1225068 : jsbytecode *pc = script->code + offset;
692 1225068 : JSOp op = JSOp(*pc);
693 :
694 1225068 : nextOffset = offset + GetBytecodeLength(pc);
695 :
696 1225068 : Bytecode *code = analysis->maybeCode(offset);
697 1225068 : if (!code)
698 14352 : continue;
699 :
700 : /* Whether this should be the last opcode in the chunk. */
701 1210716 : bool finishChunk = false;
702 :
703 : /* Keep going, override finishChunk. */
704 1210716 : bool preserveChunk = false;
705 :
706 : /*
707 : * Add an edge for opcodes which perform a branch. Skip LABEL ops,
708 : * which do not actually branch. XXX LABEL should not be JOF_JUMP.
709 : */
710 1210716 : uint32_t type = JOF_TYPE(js_CodeSpec[op].format);
711 1210716 : if (type == JOF_JUMP && op != JSOP_LABEL) {
712 30006 : CrossChunkEdge edge;
713 30006 : edge.source = offset;
714 30006 : edge.target = FollowBranch(cx, script, pc - script->code);
715 30006 : if (edge.target < offset) {
716 : /* Always end chunks after loop back edges. */
717 545 : finishChunk = true;
718 545 : if (edge.target < chunkStart) {
719 69 : analysis->getCode(edge.target).safePoint = true;
720 69 : if (!edges.append(edge))
721 0 : return NULL;
722 : }
723 29461 : } else if (edge.target == nextOffset) {
724 : /*
725 : * Override finishChunk for bytecodes which directly
726 : * jump to their fallthrough opcode ('if (x) {}'). This
727 : * creates two CFG edges with the same source/target, which
728 : * will confuse the compiler's edge patching code.
729 : */
730 12 : preserveChunk = true;
731 : } else {
732 29449 : if (!currentEdges.append(edge))
733 0 : return NULL;
734 : }
735 : }
736 :
737 1210716 : if (op == JSOP_TABLESWITCH) {
738 8 : jsbytecode *pc2 = pc;
739 8 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
740 8 : pc2 += JUMP_OFFSET_LEN;
741 8 : int32_t low = GET_JUMP_OFFSET(pc2);
742 8 : pc2 += JUMP_OFFSET_LEN;
743 8 : int32_t high = GET_JUMP_OFFSET(pc2);
744 8 : pc2 += JUMP_OFFSET_LEN;
745 :
746 8 : CrossChunkEdge edge;
747 8 : edge.source = offset;
748 8 : edge.target = defaultOffset;
749 8 : if (!currentEdges.append(edge))
750 0 : return NULL;
751 :
752 28 : for (int32_t i = low; i <= high; i++) {
753 20 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
754 20 : if (targetOffset != offset) {
755 : /*
756 : * This can end up inserting duplicate edges, all but
757 : * the first of which will be ignored.
758 : */
759 20 : CrossChunkEdge edge;
760 20 : edge.source = offset;
761 20 : edge.target = targetOffset;
762 20 : if (!currentEdges.append(edge))
763 0 : return NULL;
764 : }
765 20 : pc2 += JUMP_OFFSET_LEN;
766 : }
767 : }
768 :
769 1210716 : if (op == JSOP_LOOKUPSWITCH) {
770 4 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
771 4 : jsbytecode *pc2 = pc + JUMP_OFFSET_LEN;
772 4 : unsigned npairs = GET_UINT16(pc2);
773 4 : pc2 += UINT16_LEN;
774 :
775 4 : CrossChunkEdge edge;
776 4 : edge.source = offset;
777 4 : edge.target = defaultOffset;
778 4 : if (!currentEdges.append(edge))
779 0 : return NULL;
780 :
781 18 : while (npairs) {
782 10 : pc2 += UINT32_INDEX_LEN;
783 10 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
784 10 : CrossChunkEdge edge;
785 10 : edge.source = offset;
786 10 : edge.target = targetOffset;
787 10 : if (!currentEdges.append(edge))
788 0 : return NULL;
789 10 : pc2 += JUMP_OFFSET_LEN;
790 10 : npairs--;
791 : }
792 : }
793 :
794 1210716 : if (unsigned(offset - chunkStart) > CHUNK_LIMIT)
795 2376 : finishChunk = true;
796 :
797 1210716 : if (nextOffset >= script->length || !analysis->maybeCode(nextOffset)) {
798 : /* Ensure that chunks do not start on unreachable opcodes. */
799 14554 : preserveChunk = true;
800 : } else {
801 : /*
802 : * Start new chunks at the opcode before each loop head.
803 : * This ensures that the initial goto for loops is included in
804 : * the same chunk as the loop itself.
805 : */
806 1196162 : jsbytecode *nextpc = script->code + nextOffset;
807 :
808 : /*
809 : * Don't insert a chunk boundary in the middle of two opcodes
810 : * which may be fused together.
811 : */
812 1196162 : switch (JSOp(*nextpc)) {
813 : case JSOP_POP:
814 : case JSOP_IFNE:
815 : case JSOP_IFEQ:
816 116079 : preserveChunk = true;
817 116079 : break;
818 : default:
819 1080083 : break;
820 : }
821 :
822 1196162 : uint32_t afterOffset = nextOffset + GetBytecodeLength(nextpc);
823 1196162 : if (afterOffset < script->length) {
824 2378107 : if (analysis->maybeCode(afterOffset) &&
825 1181608 : JSOp(script->code[afterOffset]) == JSOP_LOOPHEAD &&
826 543 : analysis->getLoop(afterOffset))
827 : {
828 541 : finishChunk = true;
829 : }
830 : }
831 : }
832 :
833 1210716 : if (finishChunk && !preserveChunk) {
834 3200 : ChunkDescriptor desc;
835 3200 : desc.begin = chunkStart;
836 3200 : desc.end = nextOffset;
837 3200 : if (!chunks.append(desc))
838 0 : return NULL;
839 :
840 : /* Add an edge for fallthrough from this chunk to the next one. */
841 3200 : if (!BytecodeNoFallThrough(op)) {
842 3142 : CrossChunkEdge edge;
843 3142 : edge.source = offset;
844 3142 : edge.target = nextOffset;
845 3142 : analysis->getCode(edge.target).safePoint = true;
846 3142 : if (!edges.append(edge))
847 0 : return NULL;
848 : }
849 :
850 3200 : chunkStart = nextOffset;
851 32655 : for (unsigned i = 0; i < currentEdges.length(); i++) {
852 29455 : const CrossChunkEdge &edge = currentEdges[i];
853 29455 : if (edge.target >= nextOffset) {
854 301 : analysis->getCode(edge.target).safePoint = true;
855 301 : if (!edges.append(edge))
856 0 : return NULL;
857 : }
858 : }
859 3200 : currentEdges.clear();
860 : }
861 : }
862 :
863 234 : if (chunkStart != script->length) {
864 234 : ChunkDescriptor desc;
865 234 : desc.begin = chunkStart;
866 234 : desc.end = script->length;
867 234 : if (!chunks.append(desc))
868 0 : return NULL;
869 : }
870 : }
871 :
872 : size_t dataSize = sizeof(JITScript)
873 114059 : + (chunks.length() * sizeof(ChunkDescriptor))
874 114059 : + (edges.length() * sizeof(CrossChunkEdge));
875 114059 : uint8_t *cursor = (uint8_t *) OffTheBooks::calloc_(dataSize);
876 114059 : if (!cursor)
877 0 : return NULL;
878 :
879 114059 : JITScript *jit = (JITScript *) cursor;
880 114059 : cursor += sizeof(JITScript);
881 :
882 114059 : jit->script = script;
883 114059 : JS_INIT_CLIST(&jit->callers);
884 :
885 114059 : jit->nchunks = chunks.length();
886 231318 : for (unsigned i = 0; i < chunks.length(); i++) {
887 117259 : const ChunkDescriptor &a = chunks[i];
888 117259 : ChunkDescriptor &b = jit->chunkDescriptor(i);
889 117259 : b.begin = a.begin;
890 117259 : b.end = a.end;
891 :
892 117259 : if (chunks.length() == 1) {
893 : /* Seed the chunk's count so it is immediately compiled. */
894 113825 : b.counter = INFER_USES_BEFORE_COMPILE;
895 : }
896 : }
897 :
898 114059 : if (edges.empty()) {
899 113825 : location = jit;
900 113825 : return jit;
901 : }
902 :
903 234 : jit->nedges = edges.length();
904 234 : CrossChunkEdge *jitEdges = jit->edges();
905 3746 : for (unsigned i = 0; i < edges.length(); i++) {
906 3512 : const CrossChunkEdge &a = edges[i];
907 3512 : CrossChunkEdge &b = jitEdges[i];
908 3512 : b.source = a.source;
909 3512 : b.target = a.target;
910 : }
911 :
912 : /* Generate a pool with all cross chunk shims, and set shimLabel for each edge. */
913 468 : Assembler masm;
914 3746 : for (unsigned i = 0; i < jit->nedges; i++) {
915 3512 : jsbytecode *pc = script->code + jitEdges[i].target;
916 3512 : jitEdges[i].shimLabel = (void *) masm.distanceOf(masm.label());
917 3512 : masm.move(JSC::MacroAssembler::ImmPtr(&jitEdges[i]), Registers::ArgReg1);
918 : masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::CrossChunkShim),
919 3512 : pc, NULL, script->nfixed + analysis->getCode(pc).stackDepth);
920 : }
921 468 : LinkerHelper linker(masm, JSC::METHOD_CODE);
922 234 : JSC::ExecutablePool *ep = linker.init(cx);
923 234 : if (!ep)
924 0 : return NULL;
925 234 : jit->shimPool = ep;
926 :
927 234 : masm.finalize(linker);
928 234 : uint8_t *shimCode = (uint8_t *) linker.finalizeCodeAddendum().executableAddress();
929 :
930 234 : JS_ALWAYS_TRUE(linker.verifyRange(JSC::JITCode(shimCode, masm.size())));
931 :
932 : JaegerSpew(JSpew_PICs, "generated SHIM POOL stub %p (%lu bytes)\n",
933 234 : shimCode, (unsigned long)masm.size());
934 :
935 3746 : for (unsigned i = 0; i < jit->nedges; i++) {
936 3512 : CrossChunkEdge &edge = jitEdges[i];
937 3512 : edge.shimLabel = shimCode + (size_t) edge.shimLabel;
938 : }
939 :
940 234 : location = jit;
941 234 : return jit;
942 : }
943 :
944 : CompileStatus
945 30143241 : mjit::CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
946 : bool construct, CompileRequest request)
947 : {
948 : restart:
949 30143241 : if (!cx->methodJitEnabled)
950 10158064 : return Compile_Abort;
951 :
952 19985177 : void *addr = construct ? script->jitArityCheckCtor : script->jitArityCheckNormal;
953 19985177 : if (addr == JS_UNJITTABLE_SCRIPT)
954 1576872 : return Compile_Abort;
955 :
956 18408305 : JITScript *jit = script->getJIT(construct);
957 :
958 57068042 : if (request == CompileRequest_Interpreter &&
959 18408305 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
960 10125716 : (cx->typeInferenceEnabled()
961 4259978 : ? script->incUseCount() <= INFER_USES_BEFORE_COMPILE
962 5865738 : : script->incUseCount() <= USES_BEFORE_COMPILE))
963 : {
964 2039643 : return Compile_Skipped;
965 : }
966 :
967 16368662 : if (!cx->compartment->ensureJaegerCompartmentExists(cx))
968 0 : return Compile_Error;
969 :
970 : // Ensure that constructors have at least one slot.
971 16368662 : if (construct && !script->nslots)
972 216 : script->nslots++;
973 :
974 16368662 : if (!jit) {
975 114059 : jit = MakeJITScript(cx, script, construct);
976 114059 : if (!jit)
977 0 : return Compile_Error;
978 : }
979 16368662 : unsigned chunkIndex = jit->chunkIndex(pc);
980 16368662 : ChunkDescriptor &desc = jit->chunkDescriptor(chunkIndex);
981 :
982 16368662 : if (desc.chunk)
983 16190655 : return Compile_Okay;
984 :
985 356014 : if (request == CompileRequest_Interpreter &&
986 178007 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
987 : ++desc.counter <= INFER_USES_BEFORE_COMPILE)
988 : {
989 43753 : return Compile_Skipped;
990 : }
991 :
992 : CompileStatus status;
993 : {
994 268508 : types::AutoEnterTypeInference enter(cx, true);
995 :
996 268508 : Compiler cc(cx, script, chunkIndex, construct);
997 134254 : status = cc.compile();
998 : }
999 :
1000 134254 : if (status == Compile_Okay) {
1001 : /*
1002 : * Compiling a script can occasionally trigger its own recompilation,
1003 : * so go back through the compilation logic.
1004 : */
1005 129015 : goto restart;
1006 : }
1007 :
1008 : /* Non-OOM errors should have an associated exception. */
1009 0 : JS_ASSERT_IF(status == Compile_Error,
1010 5239 : cx->isExceptionPending() || cx->runtime->hadOutOfMemory);
1011 :
1012 5239 : return status;
1013 : }
1014 :
1015 : CompileStatus
1016 127892 : mjit::Compiler::generatePrologue()
1017 : {
1018 127892 : invokeLabel = masm.label();
1019 :
1020 : /*
1021 : * If there is no function, then this can only be called via JaegerShot(),
1022 : * which expects an existing frame to be initialized like the interpreter.
1023 : */
1024 127892 : if (script->function()) {
1025 81763 : Jump j = masm.jump();
1026 :
1027 : /*
1028 : * Entry point #2: The caller has partially constructed a frame, and
1029 : * either argc >= nargs or the arity check has corrected the frame.
1030 : */
1031 81763 : invokeLabel = masm.label();
1032 :
1033 81763 : Label fastPath = masm.label();
1034 :
1035 : /* Store this early on so slow paths can access it. */
1036 81763 : masm.storePtr(ImmPtr(script->function()),
1037 163526 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1038 :
1039 : {
1040 : /*
1041 : * Entry point #3: The caller has partially constructed a frame,
1042 : * but argc might be != nargs, so an arity check might be called.
1043 : *
1044 : * This loops back to entry point #2.
1045 : */
1046 81763 : arityLabel = stubcc.masm.label();
1047 :
1048 : Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
1049 81763 : Imm32(script->function()->nargs));
1050 :
1051 : if (JSParamReg_Argc != Registers::ArgReg1)
1052 81763 : stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
1053 :
1054 : /* Slow path - call the arity check function. Returns new fp. */
1055 81763 : stubcc.masm.storePtr(ImmPtr(script->function()),
1056 163526 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1057 81763 : OOL_STUBCALL(stubs::FixupArity, REJOIN_NONE);
1058 81763 : stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
1059 81763 : argMatch.linkTo(stubcc.masm.label(), &stubcc.masm);
1060 :
1061 81763 : argsCheckLabel = stubcc.masm.label();
1062 :
1063 : /* Type check the arguments as well. */
1064 81763 : if (cx->typeInferenceEnabled()) {
1065 : #ifdef JS_MONOIC
1066 31155 : this->argsCheckJump = stubcc.masm.jump();
1067 31155 : this->argsCheckStub = stubcc.masm.label();
1068 31155 : this->argsCheckJump.linkTo(this->argsCheckStub, &stubcc.masm);
1069 : #endif
1070 31155 : stubcc.masm.storePtr(ImmPtr(script->function()),
1071 62310 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1072 31155 : OOL_STUBCALL(stubs::CheckArgumentTypes, REJOIN_CHECK_ARGUMENTS);
1073 : #ifdef JS_MONOIC
1074 31155 : this->argsCheckFallthrough = stubcc.masm.label();
1075 : #endif
1076 : }
1077 :
1078 81763 : stubcc.crossJump(stubcc.masm.jump(), fastPath);
1079 : }
1080 :
1081 : /*
1082 : * Guard that there is enough stack space. Note we reserve space for
1083 : * any inline frames we end up generating, or a callee's stack frame
1084 : * we write to before the callee checks the stack.
1085 : */
1086 81763 : uint32_t nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
1087 81763 : masm.addPtr(Imm32(nvals * sizeof(Value)), JSFrameReg, Registers::ReturnReg);
1088 : Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
1089 81763 : FrameAddress(offsetof(VMFrame, stackLimit)));
1090 :
1091 : /*
1092 : * If the stack check fails then we need to either commit more of the
1093 : * reserved stack space or throw an error. Specify that the number of
1094 : * local slots is 0 (instead of the default script->nfixed) since the
1095 : * range [fp->slots(), fp->base()) may not be commited. (The calling
1096 : * contract requires only that the caller has reserved space for fp.)
1097 : */
1098 : {
1099 81763 : stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
1100 81763 : OOL_STUBCALL(stubs::HitStackQuota, REJOIN_NONE);
1101 81763 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1102 : }
1103 :
1104 81763 : markUndefinedLocals();
1105 :
1106 81763 : types::TypeScriptNesting *nesting = script->nesting();
1107 :
1108 : /*
1109 : * Run the function prologue if necessary. This is always done in a
1110 : * stub for heavyweight functions (including nesting outer functions).
1111 : */
1112 81763 : JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
1113 81763 : if (script->function()->isHeavyweight()) {
1114 5431 : prepareStubCall(Uses(0));
1115 5431 : INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1116 : } else {
1117 : /*
1118 : * Load the scope chain into the frame if it will be needed by NAME
1119 : * opcodes or by the nesting prologue below. The scope chain is
1120 : * always set for global and eval frames, and will have been set by
1121 : * CreateFunCallObject for heavyweight function frames.
1122 : */
1123 76332 : if (analysis->usesScopeChain() || nesting) {
1124 47701 : RegisterID t0 = Registers::ReturnReg;
1125 : Jump hasScope = masm.branchTest32(Assembler::NonZero,
1126 47701 : FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
1127 47701 : masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
1128 47701 : masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
1129 47701 : masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
1130 47701 : hasScope.linkTo(masm.label(), &masm);
1131 : }
1132 :
1133 76332 : if (nesting) {
1134 : /*
1135 : * Inline the common case for the nesting prologue: the
1136 : * function is a non-heavyweight inner function with no
1137 : * children of its own. We ensure during inference that the
1138 : * outer function does not add scope objects for 'let' or
1139 : * 'with', so that the frame's scope chain will be
1140 : * the parent's call object, and if it differs from the
1141 : * parent's current activation then the parent is reentrant.
1142 : */
1143 2460 : JSScript *parent = nesting->parent;
1144 2460 : JS_ASSERT(parent);
1145 7380 : JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
1146 7380 : !parent->analysis()->addsScopeObjects());
1147 :
1148 2460 : RegisterID t0 = Registers::ReturnReg;
1149 2460 : masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
1150 2460 : masm.loadPtr(Address(t0), t0);
1151 :
1152 2460 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
1153 2460 : Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
1154 2460 : masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
1155 :
1156 2460 : stubcc.linkExitDirect(mismatch, stubcc.masm.label());
1157 2460 : OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1158 2460 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1159 : }
1160 : }
1161 :
1162 81763 : if (outerScript->usesArguments && !script->function()->isHeavyweight()) {
1163 : /*
1164 : * Make sure that fp->u.nactual is always coherent. This may be
1165 : * inspected directly by JIT code, and is not guaranteed to be
1166 : * correct if the UNDERFLOW and OVERFLOW flags are not set.
1167 : */
1168 : Jump hasArgs = masm.branchTest32(Assembler::NonZero, FrameFlagsAddress(),
1169 : Imm32(StackFrame::UNDERFLOW_ARGS |
1170 : StackFrame::OVERFLOW_ARGS |
1171 1780 : StackFrame::HAS_ARGS_OBJ));
1172 1780 : masm.storePtr(ImmPtr((void *)(size_t) script->function()->nargs),
1173 3560 : Address(JSFrameReg, StackFrame::offsetOfNumActual()));
1174 1780 : hasArgs.linkTo(masm.label(), &masm);
1175 : }
1176 :
1177 81763 : j.linkTo(masm.label(), &masm);
1178 : }
1179 :
1180 127892 : if (cx->typeInferenceEnabled()) {
1181 : #ifdef DEBUG
1182 55704 : if (script->function()) {
1183 31155 : prepareStubCall(Uses(0));
1184 31155 : INLINE_STUBCALL(stubs::AssertArgumentTypes, REJOIN_NONE);
1185 : }
1186 : #endif
1187 55704 : ensureDoubleArguments();
1188 : }
1189 :
1190 127892 : if (isConstructing) {
1191 1774 : if (!constructThis())
1192 0 : return Compile_Error;
1193 : }
1194 :
1195 127892 : if (debugMode()) {
1196 69740 : prepareStubCall(Uses(0));
1197 69740 : INLINE_STUBCALL(stubs::ScriptDebugPrologue, REJOIN_RESUME);
1198 58152 : } else if (Probes::callTrackingActive(cx)) {
1199 0 : prepareStubCall(Uses(0));
1200 0 : INLINE_STUBCALL(stubs::ScriptProbeOnlyPrologue, REJOIN_RESUME);
1201 : }
1202 :
1203 127892 : recompileCheckHelper();
1204 :
1205 127892 : return Compile_Okay;
1206 : }
1207 :
1208 : void
1209 58588 : mjit::Compiler::ensureDoubleArguments()
1210 : {
1211 : /* Convert integer arguments which were inferred as (int|double) to doubles. */
1212 126786 : for (uint32_t i = 0; script->function() && i < script->function()->nargs; i++) {
1213 68198 : uint32_t slot = ArgSlot(i);
1214 68198 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE && analysis->trackSlot(slot))
1215 602 : frame.ensureDouble(frame.getArg(i));
1216 : }
1217 58588 : }
1218 :
1219 : void
1220 151585 : mjit::Compiler::markUndefinedLocal(uint32_t offset, uint32_t i)
1221 : {
1222 151585 : uint32_t depth = ssa.getFrame(a->inlineIndex).depth;
1223 151585 : uint32_t slot = LocalSlot(script, i);
1224 151585 : Address local(JSFrameReg, sizeof(StackFrame) + (depth + i) * sizeof(Value));
1225 151585 : if (!cx->typeInferenceEnabled() || !analysis->trackSlot(slot)) {
1226 130910 : masm.storeValue(UndefinedValue(), local);
1227 : } else {
1228 20675 : Lifetime *lifetime = analysis->liveness(slot).live(offset);
1229 20675 : if (lifetime)
1230 4278 : masm.storeValue(UndefinedValue(), local);
1231 : }
1232 151585 : }
1233 :
1234 : void
1235 84647 : mjit::Compiler::markUndefinedLocals()
1236 : {
1237 : /*
1238 : * Set locals to undefined, as in initCallFrameLatePrologue.
1239 : * Skip locals which aren't closed and are known to be defined before used,
1240 : */
1241 233618 : for (uint32_t i = 0; i < script->nfixed; i++)
1242 148971 : markUndefinedLocal(0, i);
1243 84647 : }
1244 :
1245 : CompileStatus
1246 127765 : mjit::Compiler::generateEpilogue()
1247 : {
1248 127765 : return Compile_Okay;
1249 : }
1250 :
1251 : CompileStatus
1252 129015 : mjit::Compiler::finishThisUp()
1253 : {
1254 : #ifdef JS_CPU_X64
1255 : /* Generate trampolines to ensure that cross chunk edges are patchable. */
1256 : for (unsigned i = 0; i < chunkEdges.length(); i++) {
1257 : chunkEdges[i].sourceTrampoline = stubcc.masm.label();
1258 : stubcc.masm.move(ImmPtr(NULL), Registers::ScratchReg);
1259 : stubcc.masm.jump(Registers::ScratchReg);
1260 : }
1261 : #endif
1262 :
1263 129015 : RETURN_IF_OOM(Compile_Error);
1264 :
1265 : /*
1266 : * Watch for reallocation of the global slots while we were in the middle
1267 : * of compiling due to, e.g. standard class initialization.
1268 : */
1269 129015 : if (globalSlots && globalObj->getRawSlots() != globalSlots)
1270 0 : return Compile_Retry;
1271 :
1272 : /*
1273 : * Watch for GCs which occurred during compilation. These may have
1274 : * renumbered shapes baked into the jitcode.
1275 : */
1276 129015 : if (cx->runtime->gcNumber != gcNumber)
1277 0 : return Compile_Retry;
1278 :
1279 : /* The JIT will not have been cleared if no GC has occurred. */
1280 129015 : JITScript *jit = outerJIT();
1281 129015 : JS_ASSERT(jit != NULL);
1282 :
1283 129015 : if (overflowICSpace) {
1284 0 : JaegerSpew(JSpew_Scripts, "dumped a constant pool while generating an IC\n");
1285 0 : return Compile_Abort;
1286 : }
1287 :
1288 129015 : a->mainCodeEnd = masm.size();
1289 129015 : a->stubCodeEnd = stubcc.size();
1290 :
1291 327909 : for (size_t i = 0; i < branchPatches.length(); i++) {
1292 198894 : Label label = labelOf(branchPatches[i].pc, branchPatches[i].inlineIndex);
1293 198894 : branchPatches[i].jump.linkTo(label, &masm);
1294 : }
1295 :
1296 : #ifdef JS_CPU_ARM
1297 : masm.forceFlushConstantPool();
1298 : stubcc.masm.forceFlushConstantPool();
1299 : #endif
1300 : JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %lu, Slow code (stubcc) size = %lu.\n",
1301 129015 : (unsigned long) masm.size(), (unsigned long) stubcc.size());
1302 :
1303 : /* To make inlineDoubles and oolDoubles aligned to sizeof(double) bytes,
1304 : MIPS adds extra sizeof(double) bytes to codeSize. */
1305 129015 : size_t codeSize = masm.size() +
1306 : #if defined(JS_CPU_MIPS)
1307 : stubcc.size() + sizeof(double) +
1308 : #else
1309 129015 : stubcc.size() +
1310 : #endif
1311 129015 : (masm.numDoubles() * sizeof(double)) +
1312 129015 : (stubcc.masm.numDoubles() * sizeof(double)) +
1313 258030 : jumpTableEdges.length() * sizeof(void *);
1314 :
1315 258030 : Vector<ChunkJumpTableEdge> chunkJumps(cx);
1316 129015 : if (!chunkJumps.reserve(jumpTableEdges.length()))
1317 0 : return Compile_Error;
1318 :
1319 : JSC::ExecutablePool *execPool;
1320 : uint8_t *result = (uint8_t *)script->compartment()->jaegerCompartment()->execAlloc()->
1321 129015 : alloc(codeSize, &execPool, JSC::METHOD_CODE);
1322 129015 : if (!result) {
1323 0 : js_ReportOutOfMemory(cx);
1324 0 : return Compile_Error;
1325 : }
1326 129015 : JS_ASSERT(execPool);
1327 129015 : JSC::ExecutableAllocator::makeWritable(result, codeSize);
1328 129015 : masm.executableCopy(result);
1329 129015 : stubcc.masm.executableCopy(result + masm.size());
1330 :
1331 258030 : JSC::LinkBuffer fullCode(result, codeSize, JSC::METHOD_CODE);
1332 258030 : JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::METHOD_CODE);
1333 :
1334 129015 : JS_ASSERT(!loop);
1335 :
1336 129015 : size_t nNmapLive = loopEntries.length();
1337 22143897 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1338 22014882 : Bytecode *opinfo = analysis->maybeCode(i);
1339 22014882 : if (opinfo && opinfo->safePoint)
1340 287964 : nNmapLive++;
1341 : }
1342 :
1343 : /* Please keep in sync with JITChunk::sizeOfIncludingThis! */
1344 : size_t dataSize = sizeof(JITChunk) +
1345 : sizeof(NativeMapEntry) * nNmapLive +
1346 129015 : sizeof(InlineFrame) * inlineFrames.length() +
1347 129015 : sizeof(CallSite) * callSites.length() +
1348 : #if defined JS_MONOIC
1349 129015 : sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
1350 129015 : sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
1351 129015 : sizeof(ic::CallICInfo) * callICs.length() +
1352 129015 : sizeof(ic::EqualityICInfo) * equalityICs.length() +
1353 : #endif
1354 : #if defined JS_POLYIC
1355 129015 : sizeof(ic::PICInfo) * pics.length() +
1356 129015 : sizeof(ic::GetElementIC) * getElemICs.length() +
1357 129015 : sizeof(ic::SetElementIC) * setElemICs.length() +
1358 : #endif
1359 1161135 : 0;
1360 :
1361 129015 : uint8_t *cursor = (uint8_t *)OffTheBooks::calloc_(dataSize);
1362 129015 : if (!cursor) {
1363 0 : execPool->release();
1364 0 : js_ReportOutOfMemory(cx);
1365 0 : return Compile_Error;
1366 : }
1367 :
1368 129015 : JITChunk *chunk = new(cursor) JITChunk;
1369 129015 : cursor += sizeof(JITChunk);
1370 :
1371 129015 : JS_ASSERT(outerScript == script);
1372 :
1373 129015 : chunk->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
1374 129015 : chunk->pcLengths = pcLengths;
1375 :
1376 129015 : if (chunkIndex == 0) {
1377 127858 : jit->invokeEntry = result;
1378 127858 : if (script->function()) {
1379 81739 : jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
1380 81739 : jit->argsCheckEntry = stubCode.locationOf(argsCheckLabel).executableAddress();
1381 81739 : jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
1382 81739 : void *&addr = isConstructing ? script->jitArityCheckCtor : script->jitArityCheckNormal;
1383 81739 : addr = jit->arityCheckEntry;
1384 : }
1385 : }
1386 :
1387 : /*
1388 : * WARNING: mics(), callICs() et al depend on the ordering of these
1389 : * variable-length sections. See JITChunk's declaration for details.
1390 : */
1391 :
1392 : /* ICs can only refer to bytecodes in the outermost script, not inlined calls. */
1393 129015 : Label *jumpMap = a->jumpMap;
1394 :
1395 : /* Build the pc -> ncode mapping. */
1396 129015 : NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
1397 129015 : chunk->nNmapPairs = nNmapLive;
1398 129015 : cursor += sizeof(NativeMapEntry) * chunk->nNmapPairs;
1399 129015 : size_t ix = 0;
1400 129015 : if (chunk->nNmapPairs > 0) {
1401 19168973 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1402 19102486 : Bytecode *opinfo = analysis->maybeCode(i);
1403 19102486 : if (opinfo && opinfo->safePoint) {
1404 287964 : Label L = jumpMap[i];
1405 287964 : JS_ASSERT(L.isSet());
1406 287964 : jitNmap[ix].bcOff = i;
1407 287964 : jitNmap[ix].ncode = (uint8_t *)(result + masm.distanceOf(L));
1408 287964 : ix++;
1409 : }
1410 : }
1411 100093 : for (size_t i = 0; i < loopEntries.length(); i++) {
1412 : /* Insert the entry at the right position. */
1413 33606 : const LoopEntry &entry = loopEntries[i];
1414 : size_t j;
1415 83665 : for (j = 0; j < ix; j++) {
1416 54175 : if (jitNmap[j].bcOff > entry.pcOffset) {
1417 4116 : memmove(jitNmap + j + 1, jitNmap + j, (ix - j) * sizeof(NativeMapEntry));
1418 4116 : break;
1419 : }
1420 : }
1421 33606 : jitNmap[j].bcOff = entry.pcOffset;
1422 33606 : jitNmap[j].ncode = (uint8_t *) stubCode.locationOf(entry.label).executableAddress();
1423 33606 : ix++;
1424 : }
1425 : }
1426 129015 : JS_ASSERT(ix == chunk->nNmapPairs);
1427 :
1428 : /* Build the table of inlined frames. */
1429 129015 : InlineFrame *jitInlineFrames = (InlineFrame *)cursor;
1430 129015 : chunk->nInlineFrames = inlineFrames.length();
1431 129015 : cursor += sizeof(InlineFrame) * chunk->nInlineFrames;
1432 131899 : for (size_t i = 0; i < chunk->nInlineFrames; i++) {
1433 2884 : InlineFrame &to = jitInlineFrames[i];
1434 2884 : ActiveFrame *from = inlineFrames[i];
1435 2884 : if (from->parent != outer)
1436 1198 : to.parent = &jitInlineFrames[from->parent->inlineIndex];
1437 : else
1438 1686 : to.parent = NULL;
1439 2884 : to.parentpc = from->parentPC;
1440 2884 : to.fun = from->script->function();
1441 2884 : to.depth = ssa.getFrame(from->inlineIndex).depth;
1442 : }
1443 :
1444 : /* Build the table of call sites. */
1445 129015 : CallSite *jitCallSites = (CallSite *)cursor;
1446 129015 : chunk->nCallSites = callSites.length();
1447 129015 : cursor += sizeof(CallSite) * chunk->nCallSites;
1448 5197109 : for (size_t i = 0; i < chunk->nCallSites; i++) {
1449 5068094 : CallSite &to = jitCallSites[i];
1450 5068094 : InternalCallSite &from = callSites[i];
1451 :
1452 : /* Patch stores of f.regs.inlined for stubs called from within inline frames. */
1453 5068094 : if (cx->typeInferenceEnabled() &&
1454 : from.rejoin != REJOIN_TRAP &&
1455 : from.rejoin != REJOIN_SCRIPTED &&
1456 : from.inlineIndex != UINT32_MAX) {
1457 24154 : if (from.ool)
1458 23664 : stubCode.patch(from.inlinePatch, &to);
1459 : else
1460 490 : fullCode.patch(from.inlinePatch, &to);
1461 : }
1462 :
1463 : JSScript *script =
1464 5068094 : (from.inlineIndex == UINT32_MAX) ? outerScript : inlineFrames[from.inlineIndex]->script;
1465 : uint32_t codeOffset = from.ool
1466 3673923 : ? masm.size() + from.returnOffset
1467 8742017 : : from.returnOffset;
1468 5068094 : to.initialize(codeOffset, from.inlineIndex, from.inlinepc - script->code, from.rejoin);
1469 :
1470 : /*
1471 : * Patch stores of the base call's return address for InvariantFailure
1472 : * calls. InvariantFailure will patch its own return address to this
1473 : * pointer before triggering recompilation.
1474 : */
1475 5068094 : if (from.loopPatch.hasPatch)
1476 8244 : stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
1477 : }
1478 :
1479 : #if defined JS_MONOIC
1480 129015 : if (chunkIndex == 0 && script->function()) {
1481 81739 : JS_ASSERT(jit->argsCheckPool == NULL);
1482 81739 : if (cx->typeInferenceEnabled()) {
1483 31131 : jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
1484 31131 : jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
1485 31131 : jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
1486 : }
1487 : }
1488 :
1489 129015 : ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
1490 129015 : chunk->nGetGlobalNames = getGlobalNames.length();
1491 129015 : cursor += sizeof(ic::GetGlobalNameIC) * chunk->nGetGlobalNames;
1492 507161 : for (size_t i = 0; i < chunk->nGetGlobalNames; i++) {
1493 378146 : ic::GetGlobalNameIC &to = getGlobalNames_[i];
1494 378146 : GetGlobalNameICInfo &from = getGlobalNames[i];
1495 378146 : from.copyTo(to, fullCode, stubCode);
1496 :
1497 378146 : int offset = fullCode.locationOf(from.load) - to.fastPathStart;
1498 378146 : to.loadStoreOffset = offset;
1499 378146 : JS_ASSERT(to.loadStoreOffset == offset);
1500 :
1501 378146 : stubCode.patch(from.addrLabel, &to);
1502 : }
1503 :
1504 129015 : ic::SetGlobalNameIC *setGlobalNames_ = (ic::SetGlobalNameIC *)cursor;
1505 129015 : chunk->nSetGlobalNames = setGlobalNames.length();
1506 129015 : cursor += sizeof(ic::SetGlobalNameIC) * chunk->nSetGlobalNames;
1507 159774 : for (size_t i = 0; i < chunk->nSetGlobalNames; i++) {
1508 30759 : ic::SetGlobalNameIC &to = setGlobalNames_[i];
1509 30759 : SetGlobalNameICInfo &from = setGlobalNames[i];
1510 30759 : from.copyTo(to, fullCode, stubCode);
1511 30759 : to.slowPathStart = stubCode.locationOf(from.slowPathStart);
1512 :
1513 30759 : int offset = fullCode.locationOf(from.store).labelAtOffset(0) -
1514 61518 : to.fastPathStart;
1515 30759 : to.loadStoreOffset = offset;
1516 30759 : JS_ASSERT(to.loadStoreOffset == offset);
1517 :
1518 30759 : to.hasExtraStub = 0;
1519 30759 : to.objConst = from.objConst;
1520 30759 : to.shapeReg = from.shapeReg;
1521 30759 : to.objReg = from.objReg;
1522 30759 : to.vr = from.vr;
1523 :
1524 : offset = fullCode.locationOf(from.shapeGuardJump) -
1525 30759 : to.fastPathStart;
1526 30759 : to.inlineShapeJump = offset;
1527 30759 : JS_ASSERT(to.inlineShapeJump == offset);
1528 :
1529 : offset = fullCode.locationOf(from.fastPathRejoin) -
1530 30759 : to.fastPathStart;
1531 30759 : to.fastRejoinOffset = offset;
1532 30759 : JS_ASSERT(to.fastRejoinOffset == offset);
1533 :
1534 30759 : stubCode.patch(from.addrLabel, &to);
1535 : }
1536 :
1537 129015 : ic::CallICInfo *jitCallICs = (ic::CallICInfo *)cursor;
1538 129015 : chunk->nCallICs = callICs.length();
1539 129015 : cursor += sizeof(ic::CallICInfo) * chunk->nCallICs;
1540 276721 : for (size_t i = 0; i < chunk->nCallICs; i++) {
1541 147706 : jitCallICs[i].reset();
1542 147706 : jitCallICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
1543 147706 : jitCallICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
1544 147706 : jitCallICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
1545 147706 : jitCallICs[i].typeMonitored = callICs[i].typeMonitored;
1546 :
1547 : /* Compute the hot call offset. */
1548 147706 : uint32_t offset = fullCode.locationOf(callICs[i].hotJump) -
1549 295412 : fullCode.locationOf(callICs[i].funGuard);
1550 147706 : jitCallICs[i].hotJumpOffset = offset;
1551 147706 : JS_ASSERT(jitCallICs[i].hotJumpOffset == offset);
1552 :
1553 : /* Compute the join point offset. */
1554 147706 : offset = fullCode.locationOf(callICs[i].joinPoint) -
1555 295412 : fullCode.locationOf(callICs[i].funGuard);
1556 147706 : jitCallICs[i].joinPointOffset = offset;
1557 147706 : JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
1558 :
1559 : /* Compute the OOL call offset. */
1560 147706 : offset = stubCode.locationOf(callICs[i].oolCall) -
1561 295412 : stubCode.locationOf(callICs[i].slowPathStart);
1562 147706 : jitCallICs[i].oolCallOffset = offset;
1563 147706 : JS_ASSERT(jitCallICs[i].oolCallOffset == offset);
1564 :
1565 : /* Compute the OOL jump offset. */
1566 147706 : offset = stubCode.locationOf(callICs[i].oolJump) -
1567 295412 : stubCode.locationOf(callICs[i].slowPathStart);
1568 147706 : jitCallICs[i].oolJumpOffset = offset;
1569 147706 : JS_ASSERT(jitCallICs[i].oolJumpOffset == offset);
1570 :
1571 : /* Compute the start of the OOL IC call. */
1572 147706 : offset = stubCode.locationOf(callICs[i].icCall) -
1573 295412 : stubCode.locationOf(callICs[i].slowPathStart);
1574 147706 : jitCallICs[i].icCallOffset = offset;
1575 147706 : JS_ASSERT(jitCallICs[i].icCallOffset == offset);
1576 :
1577 : /* Compute the slow join point offset. */
1578 147706 : offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
1579 295412 : stubCode.locationOf(callICs[i].slowPathStart);
1580 147706 : jitCallICs[i].slowJoinOffset = offset;
1581 147706 : JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
1582 :
1583 : /* Compute the join point offset for continuing on the hot path. */
1584 147706 : offset = stubCode.locationOf(callICs[i].hotPathLabel) -
1585 295412 : stubCode.locationOf(callICs[i].funGuard);
1586 147706 : jitCallICs[i].hotPathOffset = offset;
1587 147706 : JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
1588 :
1589 147706 : jitCallICs[i].call = &jitCallSites[callICs[i].callIndex];
1590 147706 : jitCallICs[i].frameSize = callICs[i].frameSize;
1591 147706 : jitCallICs[i].funObjReg = callICs[i].funObjReg;
1592 147706 : stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
1593 147706 : stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
1594 : }
1595 :
1596 129015 : ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
1597 129015 : chunk->nEqualityICs = equalityICs.length();
1598 129015 : cursor += sizeof(ic::EqualityICInfo) * chunk->nEqualityICs;
1599 141935 : for (size_t i = 0; i < chunk->nEqualityICs; i++) {
1600 12920 : if (equalityICs[i].trampoline) {
1601 310 : jitEqualityICs[i].target = stubCode.locationOf(equalityICs[i].trampolineStart);
1602 : } else {
1603 12610 : uint32_t offs = uint32_t(equalityICs[i].jumpTarget - script->code);
1604 12610 : JS_ASSERT(jumpMap[offs].isSet());
1605 12610 : jitEqualityICs[i].target = fullCode.locationOf(jumpMap[offs]);
1606 : }
1607 12920 : jitEqualityICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
1608 12920 : jitEqualityICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
1609 12920 : jitEqualityICs[i].stub = equalityICs[i].stub;
1610 12920 : jitEqualityICs[i].lvr = equalityICs[i].lvr;
1611 12920 : jitEqualityICs[i].rvr = equalityICs[i].rvr;
1612 12920 : jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
1613 12920 : jitEqualityICs[i].cond = equalityICs[i].cond;
1614 12920 : if (equalityICs[i].jumpToStub.isSet())
1615 11175 : jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
1616 12920 : jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
1617 :
1618 12920 : stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
1619 : }
1620 : #endif /* JS_MONOIC */
1621 :
1622 424032 : for (size_t i = 0; i < callPatches.length(); i++) {
1623 295017 : CallPatchInfo &patch = callPatches[i];
1624 :
1625 : CodeLocationLabel joinPoint = patch.joinSlow
1626 : ? stubCode.locationOf(patch.joinPoint)
1627 295017 : : fullCode.locationOf(patch.joinPoint);
1628 :
1629 295017 : if (patch.hasFastNcode)
1630 291915 : fullCode.patch(patch.fastNcodePatch, joinPoint);
1631 295017 : if (patch.hasSlowNcode)
1632 147706 : stubCode.patch(patch.slowNcodePatch, joinPoint);
1633 : }
1634 :
1635 : #ifdef JS_POLYIC
1636 129015 : ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
1637 129015 : chunk->nGetElems = getElemICs.length();
1638 129015 : cursor += sizeof(ic::GetElementIC) * chunk->nGetElems;
1639 150755 : for (size_t i = 0; i < chunk->nGetElems; i++) {
1640 21740 : ic::GetElementIC &to = jitGetElems[i];
1641 21740 : GetElementICInfo &from = getElemICs[i];
1642 :
1643 21740 : new (&to) ic::GetElementIC();
1644 21740 : from.copyTo(to, fullCode, stubCode);
1645 :
1646 21740 : to.typeReg = from.typeReg;
1647 21740 : to.objReg = from.objReg;
1648 21740 : to.idRemat = from.id;
1649 :
1650 21740 : if (from.typeGuard.isSet()) {
1651 8262 : int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
1652 16524 : fullCode.locationOf(from.fastPathStart);
1653 8262 : to.inlineTypeGuard = inlineTypeGuard;
1654 8262 : JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
1655 : }
1656 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1657 21740 : fullCode.locationOf(from.fastPathStart);
1658 21740 : to.inlineShapeGuard = inlineShapeGuard;
1659 21740 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1660 :
1661 21740 : stubCode.patch(from.paramAddr, &to);
1662 : }
1663 :
1664 129015 : ic::SetElementIC *jitSetElems = (ic::SetElementIC *)cursor;
1665 129015 : chunk->nSetElems = setElemICs.length();
1666 129015 : cursor += sizeof(ic::SetElementIC) * chunk->nSetElems;
1667 134722 : for (size_t i = 0; i < chunk->nSetElems; i++) {
1668 5707 : ic::SetElementIC &to = jitSetElems[i];
1669 5707 : SetElementICInfo &from = setElemICs[i];
1670 :
1671 5707 : new (&to) ic::SetElementIC();
1672 5707 : from.copyTo(to, fullCode, stubCode);
1673 :
1674 5707 : to.strictMode = script->strictModeCode;
1675 5707 : to.vr = from.vr;
1676 5707 : to.objReg = from.objReg;
1677 5707 : to.objRemat = from.objRemat.toInt32();
1678 5707 : JS_ASSERT(to.objRemat == from.objRemat.toInt32());
1679 :
1680 5707 : to.hasConstantKey = from.key.isConstant();
1681 5707 : if (from.key.isConstant())
1682 1777 : to.keyValue = from.key.index();
1683 : else
1684 3930 : to.keyReg = from.key.reg();
1685 :
1686 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1687 5707 : fullCode.locationOf(from.fastPathStart);
1688 5707 : to.inlineShapeGuard = inlineShapeGuard;
1689 5707 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1690 :
1691 : int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
1692 5707 : fullCode.locationOf(from.fastPathStart);
1693 5707 : to.inlineHoleGuard = inlineHoleGuard;
1694 5707 : JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
1695 :
1696 5707 : CheckIsStubCall(to.slowPathCall.labelAtOffset(0));
1697 :
1698 5707 : to.volatileMask = from.volatileMask;
1699 5707 : JS_ASSERT(to.volatileMask == from.volatileMask);
1700 :
1701 5707 : stubCode.patch(from.paramAddr, &to);
1702 : }
1703 :
1704 129015 : ic::PICInfo *jitPics = (ic::PICInfo *)cursor;
1705 129015 : chunk->nPICs = pics.length();
1706 129015 : cursor += sizeof(ic::PICInfo) * chunk->nPICs;
1707 956201 : for (size_t i = 0; i < chunk->nPICs; i++) {
1708 827186 : new (&jitPics[i]) ic::PICInfo();
1709 827186 : pics[i].copyTo(jitPics[i], fullCode, stubCode);
1710 827186 : pics[i].copySimpleMembersTo(jitPics[i]);
1711 :
1712 827186 : jitPics[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
1713 827186 : masm.distanceOf(pics[i].fastPathStart);
1714 1654372 : JS_ASSERT(jitPics[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
1715 1654372 : masm.distanceOf(pics[i].fastPathStart));
1716 827186 : jitPics[i].shapeRegHasBaseShape = true;
1717 827186 : jitPics[i].pc = pics[i].pc;
1718 :
1719 1623724 : if (pics[i].kind == ic::PICInfo::SET ||
1720 796538 : pics[i].kind == ic::PICInfo::SETMETHOD) {
1721 33215 : jitPics[i].u.vr = pics[i].vr;
1722 793971 : } else if (pics[i].kind != ic::PICInfo::NAME) {
1723 453428 : if (pics[i].hasTypeCheck) {
1724 392148 : int32_t distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
1725 392148 : stubcc.masm.distanceOf(pics[i].slowPathStart);
1726 392148 : JS_ASSERT(distance <= 0);
1727 392148 : jitPics[i].u.get.typeCheckOffset = distance;
1728 : }
1729 : }
1730 827186 : stubCode.patch(pics[i].paramAddr, &jitPics[i]);
1731 : }
1732 : #endif
1733 :
1734 129015 : JS_ASSERT(size_t(cursor - (uint8_t*)chunk) == dataSize);
1735 : /* Use the computed size here -- we don't want slop bytes to be counted. */
1736 129015 : JS_ASSERT(chunk->computedSizeOfIncludingThis() == dataSize);
1737 :
1738 : /* Link fast and slow paths together. */
1739 129015 : stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
1740 :
1741 : #if defined(JS_CPU_MIPS)
1742 : /* Make sure doubleOffset is aligned to sizeof(double) bytes. */
1743 : size_t doubleOffset = (((size_t)result + masm.size() + stubcc.size() +
1744 : sizeof(double) - 1) & (~(sizeof(double) - 1))) -
1745 : (size_t)result;
1746 : JS_ASSERT((((size_t)result + doubleOffset) & 7) == 0);
1747 : #else
1748 129015 : size_t doubleOffset = masm.size() + stubcc.size();
1749 : #endif
1750 :
1751 129015 : double *inlineDoubles = (double *) (result + doubleOffset);
1752 : double *oolDoubles = (double*) (result + doubleOffset +
1753 129015 : masm.numDoubles() * sizeof(double));
1754 :
1755 : /* Generate jump tables. */
1756 129015 : void **jumpVec = (void **)(oolDoubles + stubcc.masm.numDoubles());
1757 :
1758 130414 : for (size_t i = 0; i < jumpTableEdges.length(); i++) {
1759 1399 : JumpTableEdge edge = jumpTableEdges[i];
1760 1399 : if (bytecodeInChunk(script->code + edge.target)) {
1761 1389 : JS_ASSERT(jumpMap[edge.target].isSet());
1762 1389 : jumpVec[i] = (void *)(result + masm.distanceOf(jumpMap[edge.target]));
1763 : } else {
1764 : ChunkJumpTableEdge nedge;
1765 10 : nedge.edge = edge;
1766 10 : nedge.jumpTableEntry = &jumpVec[i];
1767 10 : chunkJumps.infallibleAppend(nedge);
1768 10 : jumpVec[i] = NULL;
1769 : }
1770 : }
1771 :
1772 : /* Patch jump table references. */
1773 129289 : for (size_t i = 0; i < jumpTables.length(); i++) {
1774 274 : JumpTable &jumpTable = jumpTables[i];
1775 274 : fullCode.patch(jumpTable.label, &jumpVec[jumpTable.offsetIndex]);
1776 : }
1777 :
1778 : /* Patch all outgoing calls. */
1779 129015 : masm.finalize(fullCode, inlineDoubles);
1780 129015 : stubcc.masm.finalize(stubCode, oolDoubles);
1781 :
1782 129015 : JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
1783 129015 : JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
1784 :
1785 : Probes::registerMJITCode(cx, jit,
1786 : a,
1787 129015 : (JSActiveFrame**) inlineFrames.begin(),
1788 : result, masm.size(),
1789 258030 : result + masm.size(), stubcc.size());
1790 :
1791 129015 : outerChunkRef().chunk = chunk;
1792 :
1793 : /* Patch all incoming and outgoing cross-chunk jumps. */
1794 129015 : CrossChunkEdge *crossEdges = jit->edges();
1795 152696 : for (unsigned i = 0; i < jit->nedges; i++) {
1796 23681 : CrossChunkEdge &edge = crossEdges[i];
1797 23681 : if (bytecodeInChunk(outerScript->code + edge.source)) {
1798 1794 : JS_ASSERT(!edge.sourceJump1 && !edge.sourceJump2);
1799 1794 : void *label = edge.targetLabel ? edge.targetLabel : edge.shimLabel;
1800 1794 : CodeLocationLabel targetLabel(label);
1801 1794 : JSOp op = JSOp(script->code[edge.source]);
1802 1794 : if (op == JSOP_TABLESWITCH) {
1803 16 : if (edge.jumpTableEntries)
1804 0 : cx->free_(edge.jumpTableEntries);
1805 16 : CrossChunkEdge::JumpTableEntryVector *jumpTableEntries = NULL;
1806 16 : bool failed = false;
1807 50 : for (unsigned j = 0; j < chunkJumps.length(); j++) {
1808 34 : ChunkJumpTableEdge nedge = chunkJumps[j];
1809 34 : if (nedge.edge.source == edge.source && nedge.edge.target == edge.target) {
1810 10 : if (!jumpTableEntries) {
1811 10 : jumpTableEntries = OffTheBooks::new_<CrossChunkEdge::JumpTableEntryVector>();
1812 10 : if (!jumpTableEntries)
1813 0 : failed = true;
1814 : }
1815 10 : if (!jumpTableEntries->append(nedge.jumpTableEntry))
1816 0 : failed = true;
1817 10 : *nedge.jumpTableEntry = label;
1818 : }
1819 : }
1820 16 : if (failed) {
1821 0 : execPool->release();
1822 0 : cx->free_(chunk);
1823 0 : js_ReportOutOfMemory(cx);
1824 0 : return Compile_Error;
1825 : }
1826 16 : edge.jumpTableEntries = jumpTableEntries;
1827 : }
1828 2366 : for (unsigned j = 0; j < chunkEdges.length(); j++) {
1829 2348 : const OutgoingChunkEdge &oedge = chunkEdges[j];
1830 2348 : if (oedge.source == edge.source && oedge.target == edge.target) {
1831 : /*
1832 : * Only a single edge needs to be patched; we ensured while
1833 : * generating chunks that no two cross chunk edges can have
1834 : * the same source and target. Note that there may not be
1835 : * an edge to patch, if constant folding determined the
1836 : * jump is never taken.
1837 : */
1838 1776 : edge.sourceJump1 = fullCode.locationOf(oedge.fastJump).executableAddress();
1839 1776 : if (oedge.slowJump.isSet()) {
1840 : edge.sourceJump2 =
1841 81 : stubCode.locationOf(oedge.slowJump.get()).executableAddress();
1842 : }
1843 : #ifdef JS_CPU_X64
1844 : edge.sourceTrampoline =
1845 : stubCode.locationOf(oedge.sourceTrampoline).executableAddress();
1846 : #endif
1847 1776 : jit->patchEdge(edge, label);
1848 1776 : break;
1849 : }
1850 : }
1851 21887 : } else if (bytecodeInChunk(outerScript->code + edge.target)) {
1852 1659 : JS_ASSERT(!edge.targetLabel);
1853 1659 : JS_ASSERT(jumpMap[edge.target].isSet());
1854 1659 : edge.targetLabel = fullCode.locationOf(jumpMap[edge.target]).executableAddress();
1855 1659 : jit->patchEdge(edge, edge.targetLabel);
1856 : }
1857 : }
1858 :
1859 129015 : return Compile_Okay;
1860 : }
1861 :
1862 : #ifdef DEBUG
1863 : #define SPEW_OPCODE() \
1864 : JS_BEGIN_MACRO \
1865 : if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
1866 : Sprinter sprinter(cx); \
1867 : sprinter.init(); \
1868 : js_Disassemble1(cx, script, PC, PC - script->code, \
1869 : JS_TRUE, &sprinter); \
1870 : JaegerSpew(JSpew_JSOps, " %2d %s", \
1871 : frame.stackDepth(), sprinter.string()); \
1872 : } \
1873 : JS_END_MACRO;
1874 : #else
1875 : #define SPEW_OPCODE()
1876 : #endif /* DEBUG */
1877 :
1878 : #define BEGIN_CASE(name) case name:
1879 : #define END_CASE(name) \
1880 : JS_BEGIN_MACRO \
1881 : PC += name##_LENGTH; \
1882 : JS_END_MACRO; \
1883 : break;
1884 :
1885 : static inline void
1886 : FixDouble(Value &val)
1887 : {
1888 : if (val.isInt32())
1889 : val.setDouble((double)val.toInt32());
1890 : }
1891 :
1892 : inline bool
1893 199955 : mjit::Compiler::shouldStartLoop(jsbytecode *head)
1894 : {
1895 : /*
1896 : * Don't do loop based optimizations or register allocation for loops which
1897 : * span multiple chunks.
1898 : */
1899 199955 : if (*head == JSOP_LOOPHEAD && analysis->getLoop(head)) {
1900 67681 : uint32_t backedge = analysis->getLoop(head)->backedge;
1901 67681 : if (!bytecodeInChunk(script->code + backedge))
1902 429 : return false;
1903 67252 : return true;
1904 : }
1905 132274 : return false;
1906 : }
1907 :
1908 : CompileStatus
1909 131933 : mjit::Compiler::generateMethod()
1910 : {
1911 131933 : SrcNoteLineScanner scanner(script->notes(), script->lineno);
1912 :
1913 : /* For join points, whether there was fallthrough from the previous opcode. */
1914 131933 : bool fallthrough = true;
1915 :
1916 : /* Last bytecode processed. */
1917 131933 : jsbytecode *lastPC = NULL;
1918 :
1919 131933 : if (!outerJIT())
1920 0 : return Compile_Retry;
1921 :
1922 131933 : uint32_t chunkBegin = 0, chunkEnd = script->length;
1923 131933 : if (!a->parent) {
1924 : const ChunkDescriptor &desc =
1925 129049 : outerJIT()->chunkDescriptor(chunkIndex);
1926 129049 : chunkBegin = desc.begin;
1927 129049 : chunkEnd = desc.end;
1928 :
1929 2174088 : while (PC != script->code + chunkBegin) {
1930 1915990 : Bytecode *opinfo = analysis->maybeCode(PC);
1931 1915990 : if (opinfo) {
1932 1908714 : if (opinfo->jumpTarget) {
1933 : /* Update variable types for all new values at this bytecode. */
1934 20769 : const SlotValue *newv = analysis->newValues(PC);
1935 20769 : if (newv) {
1936 23943 : while (newv->slot) {
1937 16711 : if (newv->slot < TotalSlots(script)) {
1938 10704 : VarType &vt = a->varTypes[newv->slot];
1939 10704 : vt.setTypes(analysis->getValueTypes(newv->value));
1940 : }
1941 16711 : newv++;
1942 : }
1943 : }
1944 : }
1945 1908714 : if (analyze::BytecodeUpdatesSlot(JSOp(*PC))) {
1946 208962 : uint32_t slot = GetBytecodeSlot(script, PC);
1947 208962 : if (analysis->trackSlot(slot)) {
1948 100208 : VarType &vt = a->varTypes[slot];
1949 100208 : vt.setTypes(analysis->pushedTypes(PC, 0));
1950 : }
1951 : }
1952 : }
1953 :
1954 1915990 : PC += GetBytecodeLength(PC);
1955 : }
1956 :
1957 129049 : if (chunkIndex != 0) {
1958 1157 : uint32_t depth = analysis->getCode(PC).stackDepth;
1959 42287 : for (uint32_t i = 0; i < depth; i++)
1960 41130 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
1961 : }
1962 : }
1963 :
1964 8776222 : for (;;) {
1965 8908155 : JSOp op = JSOp(*PC);
1966 8908155 : int trap = stubs::JSTRAP_NONE;
1967 :
1968 8908155 : if (script->hasBreakpointsAt(PC))
1969 339 : trap |= stubs::JSTRAP_TRAP;
1970 :
1971 8908155 : Bytecode *opinfo = analysis->maybeCode(PC);
1972 :
1973 8908155 : if (!opinfo) {
1974 65548 : if (op == JSOP_STOP)
1975 40488 : break;
1976 25060 : if (js_CodeSpec[op].length != -1)
1977 25060 : PC += js_CodeSpec[op].length;
1978 : else
1979 0 : PC += js_GetVariableBytecodeLength(PC);
1980 25060 : continue;
1981 : }
1982 :
1983 8842607 : if (PC >= script->code + script->length)
1984 0 : break;
1985 :
1986 8842607 : scanner.advanceTo(PC - script->code);
1987 8842748 : if (script->stepModeEnabled() &&
1988 141 : (scanner.isLineHeader() || opinfo->jumpTarget))
1989 : {
1990 54 : trap |= stubs::JSTRAP_SINGLESTEP;
1991 : }
1992 :
1993 8842607 : frame.setPC(PC);
1994 8842607 : frame.setInTryBlock(opinfo->inTryBlock);
1995 :
1996 8842607 : if (fallthrough) {
1997 : /*
1998 : * If there is fallthrough from the previous opcode and we changed
1999 : * any entries into doubles for a branch at that previous op,
2000 : * revert those entries into integers. Similarly, if we forgot that
2001 : * an entry is a double then make it a double again, as the frame
2002 : * may have assigned it a normal register.
2003 : */
2004 8686106 : for (unsigned i = 0; i < fixedIntToDoubleEntries.length(); i++) {
2005 4 : FrameEntry *fe = frame.getSlotEntry(fixedIntToDoubleEntries[i]);
2006 4 : frame.ensureInteger(fe);
2007 : }
2008 8686136 : for (unsigned i = 0; i < fixedDoubleToAnyEntries.length(); i++) {
2009 34 : FrameEntry *fe = frame.getSlotEntry(fixedDoubleToAnyEntries[i]);
2010 34 : frame.syncAndForgetFe(fe);
2011 : }
2012 : }
2013 8842607 : fixedIntToDoubleEntries.clear();
2014 8842607 : fixedDoubleToAnyEntries.clear();
2015 :
2016 8842607 : if (PC >= script->code + chunkEnd) {
2017 1250 : if (fallthrough) {
2018 1250 : if (opinfo->jumpTarget)
2019 496 : fixDoubleTypes(PC);
2020 1250 : frame.syncAndForgetEverything();
2021 1250 : jsbytecode *curPC = PC;
2022 3624 : do {
2023 3624 : PC--;
2024 3624 : } while (!analysis->maybeCode(PC));
2025 1250 : if (!jumpAndRun(masm.jump(), curPC, NULL, NULL, /* fallthrough = */ true))
2026 0 : return Compile_Error;
2027 1250 : PC = curPC;
2028 : }
2029 1250 : break;
2030 : }
2031 :
2032 8841357 : if (opinfo->jumpTarget || trap) {
2033 455910 : if (fallthrough) {
2034 299405 : fixDoubleTypes(PC);
2035 299405 : fixedIntToDoubleEntries.clear();
2036 299405 : fixedDoubleToAnyEntries.clear();
2037 :
2038 : /*
2039 : * Watch for fallthrough to the head of a 'do while' loop.
2040 : * We don't know what register state we will be using at the head
2041 : * of the loop so sync, branch, and fix it up after the loop
2042 : * has been processed.
2043 : */
2044 299405 : if (cx->typeInferenceEnabled() && shouldStartLoop(PC)) {
2045 68 : frame.syncAndForgetEverything();
2046 68 : Jump j = masm.jump();
2047 68 : if (!startLoop(PC, j, PC))
2048 0 : return Compile_Error;
2049 : } else {
2050 299337 : Label start = masm.label();
2051 299337 : if (!frame.syncForBranch(PC, Uses(0)))
2052 0 : return Compile_Error;
2053 299337 : if (pcLengths && lastPC) {
2054 : /* Track this sync code for the previous op. */
2055 0 : size_t length = masm.size() - masm.distanceOf(start);
2056 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
2057 0 : pcLengths[offset].codeLength += length;
2058 : }
2059 299337 : JS_ASSERT(frame.consistentRegisters(PC));
2060 : }
2061 : }
2062 :
2063 455910 : if (!frame.discardForJoin(analysis->getAllocation(PC), opinfo->stackDepth))
2064 0 : return Compile_Error;
2065 455910 : updateJoinVarTypes();
2066 455910 : fallthrough = true;
2067 :
2068 455910 : if (!cx->typeInferenceEnabled()) {
2069 : /* All join points have synced state if we aren't doing cross-branch regalloc. */
2070 282314 : opinfo->safePoint = true;
2071 455910 : }
2072 8385447 : } else if (opinfo->safePoint) {
2073 1030 : frame.syncAndForgetEverything();
2074 : }
2075 8841357 : frame.assertValidRegisterState();
2076 8841357 : a->jumpMap[uint32_t(PC - script->code)] = masm.label();
2077 :
2078 : // Now that we have the PC's register allocation, make sure it gets
2079 : // explicitly updated if this is the loop entry and new loop registers
2080 : // are allocated later on.
2081 8841357 : if (loop && !a->parent)
2082 926980 : loop->setOuterPC(PC);
2083 :
2084 8841357 : SPEW_OPCODE();
2085 8841357 : JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
2086 :
2087 8841357 : if (op == JSOP_LOOPHEAD && analysis->getLoop(PC)) {
2088 34055 : jsbytecode *backedge = script->code + analysis->getLoop(PC)->backedge;
2089 34055 : if (!bytecodeInChunk(backedge)){
2090 15438 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
2091 15009 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
2092 4 : FrameEntry *fe = frame.getSlotEntry(slot);
2093 4 : masm.ensureInMemoryDouble(frame.addressOf(fe));
2094 : }
2095 : }
2096 : }
2097 : }
2098 :
2099 : // If this is an exception entry point, then jsl_InternalThrow has set
2100 : // VMFrame::fp to the correct fp for the entry point. We need to copy
2101 : // that value here to FpReg so that FpReg also has the correct sp.
2102 : // Otherwise, we would simply be using a stale FpReg value.
2103 8841357 : if (op == JSOP_ENTERBLOCK && analysis->getCode(PC).exceptionEntry)
2104 19253 : masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
2105 :
2106 8841357 : if (trap) {
2107 393 : prepareStubCall(Uses(0));
2108 393 : masm.move(Imm32(trap), Registers::ArgReg1);
2109 393 : Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap), NULL);
2110 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
2111 393 : REJOIN_TRAP, false);
2112 393 : addCallSite(site);
2113 : }
2114 :
2115 : /* Don't compile fat opcodes, run the decomposed version instead. */
2116 8841357 : if (js_CodeSpec[op].format & JOF_DECOMPOSE) {
2117 205997 : PC += js_CodeSpec[op].length;
2118 205997 : continue;
2119 : }
2120 :
2121 8635360 : Label codeStart = masm.label();
2122 8635360 : bool countersUpdated = false;
2123 8635360 : bool arithUpdated = false;
2124 :
2125 8635360 : JSValueType arithFirstUseType = JSVAL_TYPE_UNKNOWN;
2126 8635360 : JSValueType arithSecondUseType = JSVAL_TYPE_UNKNOWN;
2127 8635360 : if (script->pcCounters && !!(js_CodeSpec[op].format & JOF_ARITH)) {
2128 0 : if (GetUseCount(script, PC - script->code) == 1) {
2129 0 : FrameEntry *use = frame.peek(-1);
2130 : /*
2131 : * Pretend it's a binary operation and the second operand has
2132 : * the same type as the first one.
2133 : */
2134 0 : if (use->isTypeKnown())
2135 0 : arithFirstUseType = arithSecondUseType = use->getKnownType();
2136 : } else {
2137 0 : FrameEntry *use = frame.peek(-1);
2138 0 : if (use->isTypeKnown())
2139 0 : arithFirstUseType = use->getKnownType();
2140 0 : use = frame.peek(-2);
2141 0 : if (use->isTypeKnown())
2142 0 : arithSecondUseType = use->getKnownType();
2143 : }
2144 : }
2145 :
2146 : /*
2147 : * Update PC counters for jump opcodes at their start, so that we don't
2148 : * miss them when taking the jump. This is delayed for other opcodes,
2149 : * as we want to skip updating for ops we didn't generate any code for.
2150 : */
2151 8635360 : if (script->pcCounters && JOF_OPTYPE(op) == JOF_JUMP)
2152 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2153 :
2154 : /**********************
2155 : * BEGIN COMPILER OPS *
2156 : **********************/
2157 :
2158 8635360 : lastPC = PC;
2159 :
2160 8635360 : switch (op) {
2161 : BEGIN_CASE(JSOP_NOP)
2162 24099 : END_CASE(JSOP_NOP)
2163 :
2164 : BEGIN_CASE(JSOP_UNDEFINED)
2165 142770 : frame.push(UndefinedValue());
2166 142770 : END_CASE(JSOP_UNDEFINED)
2167 :
2168 : BEGIN_CASE(JSOP_POPV)
2169 : BEGIN_CASE(JSOP_SETRVAL)
2170 : {
2171 17971 : RegisterID reg = frame.allocReg();
2172 17971 : masm.load32(FrameFlagsAddress(), reg);
2173 17971 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
2174 17971 : masm.store32(reg, FrameFlagsAddress());
2175 17971 : frame.freeReg(reg);
2176 :
2177 : /* Scripts which write to the frame's return slot aren't inlined. */
2178 17971 : JS_ASSERT(a == outer);
2179 :
2180 17971 : FrameEntry *fe = frame.peek(-1);
2181 17971 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
2182 17971 : frame.pop();
2183 : }
2184 17971 : END_CASE(JSOP_POPV)
2185 :
2186 : BEGIN_CASE(JSOP_RETURN)
2187 55772 : if (script->pcCounters)
2188 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2189 55772 : emitReturn(frame.peek(-1));
2190 55772 : fallthrough = false;
2191 55772 : END_CASE(JSOP_RETURN)
2192 :
2193 : BEGIN_CASE(JSOP_GOTO)
2194 : BEGIN_CASE(JSOP_DEFAULT)
2195 : {
2196 122855 : unsigned targetOffset = FollowBranch(cx, script, PC - script->code);
2197 122855 : jsbytecode *target = script->code + targetOffset;
2198 :
2199 122855 : fixDoubleTypes(target);
2200 :
2201 : /*
2202 : * Watch for gotos which are entering a 'for' or 'while' loop.
2203 : * These jump to the loop condition test and are immediately
2204 : * followed by the head of the loop.
2205 : */
2206 122855 : jsbytecode *next = PC + js_CodeSpec[op].length;
2207 218157 : if (cx->typeInferenceEnabled() &&
2208 48689 : analysis->maybeCode(next) &&
2209 46613 : shouldStartLoop(next))
2210 : {
2211 33558 : frame.syncAndForgetEverything();
2212 33558 : Jump j = masm.jump();
2213 33558 : if (!startLoop(next, j, target))
2214 0 : return Compile_Error;
2215 : } else {
2216 89297 : if (!frame.syncForBranch(target, Uses(0)))
2217 0 : return Compile_Error;
2218 89297 : Jump j = masm.jump();
2219 89297 : if (!jumpAndRun(j, target))
2220 0 : return Compile_Error;
2221 : }
2222 122855 : fallthrough = false;
2223 122855 : PC += js_CodeSpec[op].length;
2224 122855 : break;
2225 : }
2226 : END_CASE(JSOP_GOTO)
2227 :
2228 : BEGIN_CASE(JSOP_IFEQ)
2229 : BEGIN_CASE(JSOP_IFNE)
2230 : {
2231 69016 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2232 69016 : fixDoubleTypes(target);
2233 69016 : if (!jsop_ifneq(op, target))
2234 0 : return Compile_Error;
2235 69016 : PC += js_CodeSpec[op].length;
2236 69016 : break;
2237 : }
2238 : END_CASE(JSOP_IFNE)
2239 :
2240 : BEGIN_CASE(JSOP_ARGUMENTS)
2241 : /*
2242 : * For calls of the form 'f.apply(x, arguments)' we can avoid
2243 : * creating an args object by having ic::SplatApplyArgs pull
2244 : * directly from the stack. To do this, we speculate here that
2245 : * 'apply' actually refers to js_fun_apply. If this is not true,
2246 : * the slow path in JSOP_FUNAPPLY will create the args object.
2247 : */
2248 2898 : if (canUseApplyTricks()) {
2249 : /*
2250 : * Check for interrupts at the JSOP_ARGUMENTS when using
2251 : * apply tricks, see inlineCallHelper().
2252 : */
2253 252 : interruptCheckHelper();
2254 :
2255 252 : applyTricks = LazyArgsObj;
2256 252 : pushSyncedEntry(0);
2257 4082 : } else if (cx->typeInferenceEnabled() && !script->strictModeCode &&
2258 1436 : !types::TypeSet::HasObjectFlags(cx, script->function()->getType(cx),
2259 2872 : types::OBJECT_FLAG_CREATED_ARGUMENTS)) {
2260 279 : frame.push(MagicValue(JS_LAZY_ARGUMENTS));
2261 : } else {
2262 2367 : jsop_arguments(REJOIN_FALLTHROUGH);
2263 2367 : pushSyncedEntry(0);
2264 : }
2265 2898 : END_CASE(JSOP_ARGUMENTS)
2266 :
2267 : BEGIN_CASE(JSOP_ITERNEXT)
2268 4645 : iterNext(GET_INT8(PC));
2269 4645 : END_CASE(JSOP_ITERNEXT)
2270 :
2271 : BEGIN_CASE(JSOP_DUP)
2272 363092 : frame.dup();
2273 363092 : END_CASE(JSOP_DUP)
2274 :
2275 : BEGIN_CASE(JSOP_DUP2)
2276 195033 : frame.dup2();
2277 195033 : END_CASE(JSOP_DUP2)
2278 :
2279 : BEGIN_CASE(JSOP_SWAP)
2280 164740 : frame.dup2();
2281 164740 : frame.shift(-3);
2282 164740 : frame.shift(-1);
2283 164740 : END_CASE(JSOP_SWAP)
2284 :
2285 : BEGIN_CASE(JSOP_PICK)
2286 : {
2287 589479 : uint32_t amt = GET_UINT8(PC);
2288 :
2289 : // Push -(amt + 1), say amt == 2
2290 : // Stack before: X3 X2 X1
2291 : // Stack after: X3 X2 X1 X3
2292 589479 : frame.dupAt(-int32_t(amt + 1));
2293 :
2294 : // For each item X[i...1] push it then move it down.
2295 : // The above would transition like so:
2296 : // X3 X2 X1 X3 X2 (dupAt)
2297 : // X2 X2 X1 X3 (shift)
2298 : // X2 X2 X1 X3 X1 (dupAt)
2299 : // X2 X1 X1 X3 (shift)
2300 2155831 : for (int32_t i = -int32_t(amt); i < 0; i++) {
2301 1566352 : frame.dupAt(i - 1);
2302 1566352 : frame.shift(i - 2);
2303 : }
2304 :
2305 : // The stack looks like:
2306 : // Xn ... X1 X1 X{n+1}
2307 : // So shimmy the last value down.
2308 589479 : frame.shimmy(1);
2309 : }
2310 589479 : END_CASE(JSOP_PICK)
2311 :
2312 : BEGIN_CASE(JSOP_BITOR)
2313 : BEGIN_CASE(JSOP_BITXOR)
2314 : BEGIN_CASE(JSOP_BITAND)
2315 7274 : jsop_bitop(op);
2316 7274 : END_CASE(JSOP_BITAND)
2317 :
2318 : BEGIN_CASE(JSOP_LT)
2319 : BEGIN_CASE(JSOP_LE)
2320 : BEGIN_CASE(JSOP_GT)
2321 : BEGIN_CASE(JSOP_GE)
2322 : BEGIN_CASE(JSOP_EQ)
2323 : BEGIN_CASE(JSOP_NE)
2324 : {
2325 94777 : if (script->pcCounters) {
2326 0 : updateArithCounters(PC, NULL, arithFirstUseType, arithSecondUseType);
2327 0 : arithUpdated = true;
2328 : }
2329 :
2330 : /* Detect fusions. */
2331 94777 : jsbytecode *next = &PC[JSOP_GE_LENGTH];
2332 94777 : JSOp fused = JSOp(*next);
2333 94777 : if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
2334 18094 : fused = JSOP_NOP;
2335 :
2336 : /* Get jump target, if any. */
2337 94777 : jsbytecode *target = NULL;
2338 94777 : if (fused != JSOP_NOP) {
2339 76683 : if (script->pcCounters)
2340 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2341 76683 : target = next + GET_JUMP_OFFSET(next);
2342 76683 : fixDoubleTypes(target);
2343 : }
2344 :
2345 94777 : BoolStub stub = NULL;
2346 94777 : switch (op) {
2347 : case JSOP_LT:
2348 53487 : stub = stubs::LessThan;
2349 53487 : break;
2350 : case JSOP_LE:
2351 4657 : stub = stubs::LessEqual;
2352 4657 : break;
2353 : case JSOP_GT:
2354 5607 : stub = stubs::GreaterThan;
2355 5607 : break;
2356 : case JSOP_GE:
2357 7443 : stub = stubs::GreaterEqual;
2358 7443 : break;
2359 : case JSOP_EQ:
2360 16927 : stub = stubs::Equal;
2361 16927 : break;
2362 : case JSOP_NE:
2363 6656 : stub = stubs::NotEqual;
2364 6656 : break;
2365 : default:
2366 0 : JS_NOT_REACHED("WAT");
2367 : break;
2368 : }
2369 :
2370 : /*
2371 : * We need to ensure in the target case that we always rejoin
2372 : * before the rval test. In the non-target case we will rejoin
2373 : * correctly after the op finishes.
2374 : */
2375 :
2376 94777 : FrameEntry *rhs = frame.peek(-1);
2377 94777 : FrameEntry *lhs = frame.peek(-2);
2378 :
2379 : /* Check for easy cases that the parser does not constant fold. */
2380 94777 : if (lhs->isConstant() && rhs->isConstant()) {
2381 : /* Primitives can be trivially constant folded. */
2382 249 : const Value &lv = lhs->getValue();
2383 249 : const Value &rv = rhs->getValue();
2384 :
2385 249 : if (lv.isPrimitive() && rv.isPrimitive()) {
2386 249 : bool result = compareTwoValues(cx, op, lv, rv);
2387 :
2388 249 : frame.pop();
2389 249 : frame.pop();
2390 :
2391 249 : if (!target) {
2392 130 : frame.push(Value(BooleanValue(result)));
2393 : } else {
2394 119 : if (fused == JSOP_IFEQ)
2395 119 : result = !result;
2396 119 : if (!constantFoldBranch(target, result))
2397 0 : return Compile_Error;
2398 : }
2399 : } else {
2400 0 : if (!emitStubCmpOp(stub, target, fused))
2401 0 : return Compile_Error;
2402 : }
2403 : } else {
2404 : /* Anything else should go through the fast path generator. */
2405 94528 : if (!jsop_relational(op, stub, target, fused))
2406 0 : return Compile_Error;
2407 : }
2408 :
2409 : /* Advance PC manually. */
2410 : JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
2411 : JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
2412 : JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
2413 : JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
2414 : JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
2415 :
2416 94777 : PC += JSOP_GE_LENGTH;
2417 94777 : if (fused != JSOP_NOP) {
2418 76683 : SPEW_OPCODE();
2419 76683 : PC += JSOP_IFNE_LENGTH;
2420 : }
2421 94777 : break;
2422 : }
2423 : END_CASE(JSOP_GE)
2424 :
2425 : BEGIN_CASE(JSOP_LSH)
2426 2128 : jsop_bitop(op);
2427 2128 : END_CASE(JSOP_LSH)
2428 :
2429 : BEGIN_CASE(JSOP_RSH)
2430 2846 : jsop_bitop(op);
2431 2846 : END_CASE(JSOP_RSH)
2432 :
2433 : BEGIN_CASE(JSOP_URSH)
2434 836 : jsop_bitop(op);
2435 836 : END_CASE(JSOP_URSH)
2436 :
2437 : BEGIN_CASE(JSOP_ADD)
2438 607709 : if (!jsop_binary(op, stubs::Add, knownPushedType(0), pushedTypeSet(0)))
2439 3 : return Compile_Retry;
2440 607706 : END_CASE(JSOP_ADD)
2441 :
2442 : BEGIN_CASE(JSOP_SUB)
2443 10672 : if (!jsop_binary(op, stubs::Sub, knownPushedType(0), pushedTypeSet(0)))
2444 0 : return Compile_Retry;
2445 10672 : END_CASE(JSOP_SUB)
2446 :
2447 : BEGIN_CASE(JSOP_MUL)
2448 6221 : if (!jsop_binary(op, stubs::Mul, knownPushedType(0), pushedTypeSet(0)))
2449 0 : return Compile_Retry;
2450 6221 : END_CASE(JSOP_MUL)
2451 :
2452 : BEGIN_CASE(JSOP_DIV)
2453 8349 : if (!jsop_binary(op, stubs::Div, knownPushedType(0), pushedTypeSet(0)))
2454 4 : return Compile_Retry;
2455 8345 : END_CASE(JSOP_DIV)
2456 :
2457 : BEGIN_CASE(JSOP_MOD)
2458 3903 : if (!jsop_mod())
2459 2 : return Compile_Retry;
2460 3901 : END_CASE(JSOP_MOD)
2461 :
2462 : BEGIN_CASE(JSOP_NOT)
2463 33078 : jsop_not();
2464 33078 : END_CASE(JSOP_NOT)
2465 :
2466 : BEGIN_CASE(JSOP_BITNOT)
2467 : {
2468 167 : FrameEntry *top = frame.peek(-1);
2469 167 : if (top->isConstant() && top->getValue().isPrimitive()) {
2470 : int32_t i;
2471 0 : JS_ALWAYS_TRUE(ToInt32(cx, top->getValue(), &i));
2472 0 : i = ~i;
2473 0 : frame.pop();
2474 0 : frame.push(Int32Value(i));
2475 : } else {
2476 167 : jsop_bitnot();
2477 : }
2478 : }
2479 167 : END_CASE(JSOP_BITNOT)
2480 :
2481 : BEGIN_CASE(JSOP_NEG)
2482 : {
2483 4197 : FrameEntry *top = frame.peek(-1);
2484 4197 : if (top->isConstant() && top->getValue().isPrimitive()) {
2485 : double d;
2486 429 : JS_ALWAYS_TRUE(ToNumber(cx, top->getValue(), &d));
2487 429 : d = -d;
2488 429 : Value v = NumberValue(d);
2489 :
2490 : /* Watch for overflow in constant propagation. */
2491 429 : types::TypeSet *pushed = pushedTypeSet(0);
2492 429 : if (!v.isInt32() && pushed && !pushed->hasType(types::Type::DoubleType())) {
2493 24 : types::TypeScript::MonitorOverflow(cx, script, PC);
2494 24 : return Compile_Retry;
2495 : }
2496 :
2497 405 : frame.pop();
2498 405 : frame.push(v);
2499 : } else {
2500 3768 : jsop_neg();
2501 : }
2502 : }
2503 4173 : END_CASE(JSOP_NEG)
2504 :
2505 : BEGIN_CASE(JSOP_POS)
2506 206193 : jsop_pos();
2507 206193 : END_CASE(JSOP_POS)
2508 :
2509 : BEGIN_CASE(JSOP_DELNAME)
2510 : {
2511 410 : uint32_t index = GET_UINT32_INDEX(PC);
2512 410 : PropertyName *name = script->getName(index);
2513 :
2514 410 : prepareStubCall(Uses(0));
2515 410 : masm.move(ImmPtr(name), Registers::ArgReg1);
2516 410 : INLINE_STUBCALL(stubs::DelName, REJOIN_FALLTHROUGH);
2517 410 : pushSyncedEntry(0);
2518 : }
2519 410 : END_CASE(JSOP_DELNAME)
2520 :
2521 : BEGIN_CASE(JSOP_DELPROP)
2522 : {
2523 271 : uint32_t index = GET_UINT32_INDEX(PC);
2524 271 : PropertyName *name = script->getName(index);
2525 :
2526 271 : prepareStubCall(Uses(1));
2527 271 : masm.move(ImmPtr(name), Registers::ArgReg1);
2528 271 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp), REJOIN_FALLTHROUGH);
2529 271 : frame.pop();
2530 271 : pushSyncedEntry(0);
2531 : }
2532 271 : END_CASE(JSOP_DELPROP)
2533 :
2534 : BEGIN_CASE(JSOP_DELELEM)
2535 : {
2536 452 : prepareStubCall(Uses(2));
2537 452 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem), REJOIN_FALLTHROUGH);
2538 452 : frame.popn(2);
2539 452 : pushSyncedEntry(0);
2540 : }
2541 452 : END_CASE(JSOP_DELELEM)
2542 :
2543 : BEGIN_CASE(JSOP_TYPEOF)
2544 : BEGIN_CASE(JSOP_TYPEOFEXPR)
2545 2566 : jsop_typeof();
2546 2566 : END_CASE(JSOP_TYPEOF)
2547 :
2548 : BEGIN_CASE(JSOP_VOID)
2549 524 : frame.pop();
2550 524 : frame.push(UndefinedValue());
2551 524 : END_CASE(JSOP_VOID)
2552 :
2553 : BEGIN_CASE(JSOP_GETPROP)
2554 : BEGIN_CASE(JSOP_CALLPROP)
2555 : BEGIN_CASE(JSOP_LENGTH)
2556 505008 : if (!jsop_getprop(script->getName(GET_UINT32_INDEX(PC)), knownPushedType(0)))
2557 0 : return Compile_Error;
2558 505008 : END_CASE(JSOP_GETPROP)
2559 :
2560 : BEGIN_CASE(JSOP_GETELEM)
2561 : BEGIN_CASE(JSOP_CALLELEM)
2562 253711 : if (script->pcCounters)
2563 0 : updateElemCounters(PC, frame.peek(-2), frame.peek(-1));
2564 253711 : if (!jsop_getelem())
2565 0 : return Compile_Error;
2566 253711 : END_CASE(JSOP_GETELEM)
2567 :
2568 : BEGIN_CASE(JSOP_TOID)
2569 193990 : jsop_toid();
2570 193990 : END_CASE(JSOP_TOID)
2571 :
2572 : BEGIN_CASE(JSOP_SETELEM)
2573 : {
2574 211981 : if (script->pcCounters)
2575 0 : updateElemCounters(PC, frame.peek(-3), frame.peek(-2));
2576 211981 : jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
2577 211981 : bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
2578 211981 : if (!jsop_setelem(pop))
2579 0 : return Compile_Error;
2580 : }
2581 211981 : END_CASE(JSOP_SETELEM);
2582 :
2583 : BEGIN_CASE(JSOP_EVAL)
2584 : {
2585 1983 : JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
2586 1983 : emitEval(GET_ARGC(PC));
2587 1983 : JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
2588 : }
2589 1983 : END_CASE(JSOP_EVAL)
2590 :
2591 : BEGIN_CASE(JSOP_CALL)
2592 : BEGIN_CASE(JSOP_NEW)
2593 : BEGIN_CASE(JSOP_FUNAPPLY)
2594 : BEGIN_CASE(JSOP_FUNCALL)
2595 : {
2596 298177 : bool callingNew = (op == JSOP_NEW);
2597 :
2598 298177 : bool done = false;
2599 298177 : if ((op == JSOP_CALL || op == JSOP_NEW) && !monitored(PC)) {
2600 288457 : CompileStatus status = inlineNativeFunction(GET_ARGC(PC), callingNew);
2601 288457 : if (status == Compile_Okay)
2602 3233 : done = true;
2603 285224 : else if (status != Compile_InlineAbort)
2604 0 : return status;
2605 : }
2606 298177 : if (!done && inlining()) {
2607 59429 : CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), callingNew);
2608 59429 : if (status == Compile_Okay)
2609 2720 : done = true;
2610 56709 : else if (status != Compile_InlineAbort)
2611 0 : return status;
2612 59429 : if (script->pcCounters) {
2613 : /* Code generated while inlining has been accounted for. */
2614 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2615 : }
2616 : }
2617 :
2618 : FrameSize frameSize;
2619 298177 : frameSize.initStatic(frame.totalDepth(), GET_ARGC(PC));
2620 :
2621 298177 : if (!done) {
2622 292224 : JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
2623 292224 : if (!inlineCallHelper(GET_ARGC(PC), callingNew, frameSize))
2624 0 : return Compile_Error;
2625 292224 : JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
2626 : }
2627 : }
2628 298177 : END_CASE(JSOP_CALL)
2629 :
2630 : BEGIN_CASE(JSOP_NAME)
2631 : BEGIN_CASE(JSOP_CALLNAME)
2632 : {
2633 368466 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
2634 368466 : jsop_name(name, knownPushedType(0));
2635 368466 : frame.extra(frame.peek(-1)).name = name;
2636 : }
2637 368466 : END_CASE(JSOP_NAME)
2638 :
2639 : BEGIN_CASE(JSOP_IMPLICITTHIS)
2640 : {
2641 34093 : prepareStubCall(Uses(0));
2642 34093 : masm.move(ImmPtr(script->getName(GET_UINT32_INDEX(PC))), Registers::ArgReg1);
2643 34093 : INLINE_STUBCALL(stubs::ImplicitThis, REJOIN_FALLTHROUGH);
2644 34093 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
2645 : }
2646 34093 : END_CASE(JSOP_IMPLICITTHIS)
2647 :
2648 : BEGIN_CASE(JSOP_DOUBLE)
2649 : {
2650 7476 : double d = script->getConst(GET_UINT32_INDEX(PC)).toDouble();
2651 7476 : frame.push(Value(DoubleValue(d)));
2652 : }
2653 7476 : END_CASE(JSOP_DOUBLE)
2654 :
2655 : BEGIN_CASE(JSOP_STRING)
2656 213674 : frame.push(StringValue(script->getAtom(GET_UINT32_INDEX(PC))));
2657 213674 : END_CASE(JSOP_STRING)
2658 :
2659 : BEGIN_CASE(JSOP_ZERO)
2660 111654 : frame.push(JSVAL_ZERO);
2661 111654 : END_CASE(JSOP_ZERO)
2662 :
2663 : BEGIN_CASE(JSOP_ONE)
2664 262473 : frame.push(JSVAL_ONE);
2665 262473 : END_CASE(JSOP_ONE)
2666 :
2667 : BEGIN_CASE(JSOP_NULL)
2668 13105 : frame.push(NullValue());
2669 13105 : END_CASE(JSOP_NULL)
2670 :
2671 : BEGIN_CASE(JSOP_THIS)
2672 72204 : jsop_this();
2673 72204 : END_CASE(JSOP_THIS)
2674 :
2675 : BEGIN_CASE(JSOP_FALSE)
2676 11885 : frame.push(Value(BooleanValue(false)));
2677 11885 : END_CASE(JSOP_FALSE)
2678 :
2679 : BEGIN_CASE(JSOP_TRUE)
2680 16170 : frame.push(Value(BooleanValue(true)));
2681 16170 : END_CASE(JSOP_TRUE)
2682 :
2683 : BEGIN_CASE(JSOP_OR)
2684 : BEGIN_CASE(JSOP_AND)
2685 : {
2686 18839 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2687 18839 : fixDoubleTypes(target);
2688 18839 : if (!jsop_andor(op, target))
2689 0 : return Compile_Error;
2690 : }
2691 18839 : END_CASE(JSOP_AND)
2692 :
2693 : BEGIN_CASE(JSOP_TABLESWITCH)
2694 : /*
2695 : * Note: there is no need to syncForBranch for the various targets of
2696 : * switch statement. The liveness analysis has already marked these as
2697 : * allocated with no registers in use. There is also no need to fix
2698 : * double types, as we don't track types of slots in scripts with
2699 : * switch statements (could be fixed).
2700 : */
2701 310 : if (script->pcCounters)
2702 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2703 : #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
2704 : frame.syncAndKillEverything();
2705 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2706 :
2707 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2708 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
2709 : frame.pop();
2710 :
2711 : masm.jump(Registers::ReturnReg);
2712 : #else
2713 310 : if (!jsop_tableswitch(PC))
2714 0 : return Compile_Error;
2715 : #endif
2716 310 : PC += js_GetVariableBytecodeLength(PC);
2717 310 : break;
2718 : END_CASE(JSOP_TABLESWITCH)
2719 :
2720 : BEGIN_CASE(JSOP_LOOKUPSWITCH)
2721 191 : if (script->pcCounters)
2722 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2723 191 : frame.syncAndForgetEverything();
2724 191 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2725 :
2726 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2727 191 : INLINE_STUBCALL(stubs::LookupSwitch, REJOIN_NONE);
2728 191 : frame.pop();
2729 :
2730 191 : masm.jump(Registers::ReturnReg);
2731 191 : PC += js_GetVariableBytecodeLength(PC);
2732 191 : break;
2733 : END_CASE(JSOP_LOOKUPSWITCH)
2734 :
2735 : BEGIN_CASE(JSOP_CASE)
2736 : // X Y
2737 :
2738 1816 : frame.dupAt(-2);
2739 : // X Y X
2740 :
2741 1816 : jsop_stricteq(JSOP_STRICTEQ);
2742 : // X cond
2743 :
2744 1816 : if (!jsop_ifneq(JSOP_IFNE, PC + GET_JUMP_OFFSET(PC)))
2745 0 : return Compile_Error;
2746 1816 : END_CASE(JSOP_CASE)
2747 :
2748 : BEGIN_CASE(JSOP_STRICTEQ)
2749 : BEGIN_CASE(JSOP_STRICTNE)
2750 13257 : if (script->pcCounters) {
2751 0 : updateArithCounters(PC, NULL, arithFirstUseType, arithSecondUseType);
2752 0 : arithUpdated = true;
2753 : }
2754 13257 : jsop_stricteq(op);
2755 13257 : END_CASE(JSOP_STRICTEQ)
2756 :
2757 : BEGIN_CASE(JSOP_ITER)
2758 4405 : if (!iter(GET_UINT8(PC)))
2759 0 : return Compile_Error;
2760 4405 : END_CASE(JSOP_ITER)
2761 :
2762 : BEGIN_CASE(JSOP_MOREITER)
2763 : {
2764 : /* At the byte level, this is always fused with IFNE or IFNEX. */
2765 4286 : if (script->pcCounters)
2766 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2767 4286 : jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
2768 4286 : JSOp next = JSOp(*target);
2769 4286 : JS_ASSERT(next == JSOP_IFNE);
2770 :
2771 4286 : target += GET_JUMP_OFFSET(target);
2772 :
2773 4286 : fixDoubleTypes(target);
2774 4286 : if (!iterMore(target))
2775 0 : return Compile_Error;
2776 4286 : PC += JSOP_MOREITER_LENGTH;
2777 4286 : PC += js_CodeSpec[next].length;
2778 4286 : break;
2779 : }
2780 : END_CASE(JSOP_MOREITER)
2781 :
2782 : BEGIN_CASE(JSOP_ENDITER)
2783 4453 : iterEnd();
2784 4453 : END_CASE(JSOP_ENDITER)
2785 :
2786 : BEGIN_CASE(JSOP_POP)
2787 792199 : frame.pop();
2788 792199 : END_CASE(JSOP_POP)
2789 :
2790 : BEGIN_CASE(JSOP_GETARG)
2791 : BEGIN_CASE(JSOP_CALLARG)
2792 : {
2793 194484 : restoreVarType();
2794 194484 : uint32_t arg = GET_SLOTNO(PC);
2795 194484 : if (JSObject *singleton = pushedSingleton(0))
2796 724 : frame.push(ObjectValue(*singleton));
2797 : else
2798 193760 : frame.pushArg(arg);
2799 : }
2800 194484 : END_CASE(JSOP_GETARG)
2801 :
2802 : BEGIN_CASE(JSOP_BINDGNAME)
2803 72915 : jsop_bindgname();
2804 72915 : END_CASE(JSOP_BINDGNAME)
2805 :
2806 : BEGIN_CASE(JSOP_SETARG)
2807 : {
2808 3658 : jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
2809 3658 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2810 3658 : frame.storeArg(GET_SLOTNO(PC), pop);
2811 3658 : updateVarType();
2812 :
2813 3658 : if (pop) {
2814 3573 : frame.pop();
2815 3573 : PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
2816 3573 : break;
2817 : }
2818 : }
2819 85 : END_CASE(JSOP_SETARG)
2820 :
2821 : BEGIN_CASE(JSOP_GETLOCAL)
2822 : BEGIN_CASE(JSOP_CALLLOCAL)
2823 : {
2824 : /*
2825 : * Update the var type unless we are about to pop the variable.
2826 : * Sync is not guaranteed for types of dead locals, and GETLOCAL
2827 : * followed by POP is not regarded as a use of the variable.
2828 : */
2829 394138 : jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
2830 394138 : if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
2831 344630 : restoreVarType();
2832 394138 : uint32_t slot = GET_SLOTNO(PC);
2833 394138 : if (JSObject *singleton = pushedSingleton(0))
2834 36 : frame.push(ObjectValue(*singleton));
2835 : else
2836 394102 : frame.pushLocal(slot);
2837 : }
2838 394138 : END_CASE(JSOP_GETLOCAL)
2839 :
2840 : BEGIN_CASE(JSOP_SETLOCAL)
2841 : {
2842 227274 : jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
2843 227274 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2844 227274 : frame.storeLocal(GET_SLOTNO(PC), pop);
2845 227274 : updateVarType();
2846 :
2847 227274 : if (pop) {
2848 226746 : frame.pop();
2849 226746 : PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
2850 226746 : break;
2851 : }
2852 : }
2853 528 : END_CASE(JSOP_SETLOCAL)
2854 :
2855 : BEGIN_CASE(JSOP_SETLOCALPOP)
2856 : {
2857 19564 : uint32_t slot = GET_SLOTNO(PC);
2858 19564 : frame.storeLocal(slot, true);
2859 19564 : frame.pop();
2860 19564 : updateVarType();
2861 : }
2862 19564 : END_CASE(JSOP_SETLOCALPOP)
2863 :
2864 : BEGIN_CASE(JSOP_UINT16)
2865 188147 : frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
2866 188147 : END_CASE(JSOP_UINT16)
2867 :
2868 : BEGIN_CASE(JSOP_NEWINIT)
2869 2775 : if (!jsop_newinit())
2870 0 : return Compile_Error;
2871 2775 : END_CASE(JSOP_NEWINIT)
2872 :
2873 : BEGIN_CASE(JSOP_NEWARRAY)
2874 13541 : if (!jsop_newinit())
2875 0 : return Compile_Error;
2876 13541 : END_CASE(JSOP_NEWARRAY)
2877 :
2878 : BEGIN_CASE(JSOP_NEWOBJECT)
2879 5847 : if (!jsop_newinit())
2880 0 : return Compile_Error;
2881 5847 : END_CASE(JSOP_NEWOBJECT)
2882 :
2883 : BEGIN_CASE(JSOP_ENDINIT)
2884 22146 : END_CASE(JSOP_ENDINIT)
2885 :
2886 : BEGIN_CASE(JSOP_INITMETHOD)
2887 1455 : jsop_initmethod();
2888 1455 : frame.pop();
2889 1455 : END_CASE(JSOP_INITMETHOD)
2890 :
2891 : BEGIN_CASE(JSOP_INITPROP)
2892 7240 : jsop_initprop();
2893 7240 : frame.pop();
2894 7240 : END_CASE(JSOP_INITPROP)
2895 :
2896 : BEGIN_CASE(JSOP_INITELEM)
2897 41789 : jsop_initelem();
2898 41789 : frame.popn(2);
2899 41789 : END_CASE(JSOP_INITELEM)
2900 :
2901 : BEGIN_CASE(JSOP_INCARG)
2902 : BEGIN_CASE(JSOP_DECARG)
2903 : BEGIN_CASE(JSOP_ARGINC)
2904 : BEGIN_CASE(JSOP_ARGDEC)
2905 444 : if (script->pcCounters) {
2906 0 : restoreVarType();
2907 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2908 0 : if (fe->isTypeKnown())
2909 0 : arithFirstUseType = fe->getKnownType();
2910 : }
2911 :
2912 444 : if (!jsop_arginc(op, GET_SLOTNO(PC)))
2913 0 : return Compile_Retry;
2914 :
2915 444 : if (script->pcCounters) {
2916 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2917 0 : updateArithCounters(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2918 0 : arithUpdated = true;
2919 : }
2920 444 : END_CASE(JSOP_ARGDEC)
2921 :
2922 : BEGIN_CASE(JSOP_INCLOCAL)
2923 : BEGIN_CASE(JSOP_DECLOCAL)
2924 : BEGIN_CASE(JSOP_LOCALINC)
2925 : BEGIN_CASE(JSOP_LOCALDEC)
2926 38652 : if (script->pcCounters) {
2927 0 : restoreVarType();
2928 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2929 0 : if (fe->isTypeKnown())
2930 0 : arithFirstUseType = fe->getKnownType();
2931 : }
2932 :
2933 38652 : if (!jsop_localinc(op, GET_SLOTNO(PC)))
2934 1 : return Compile_Retry;
2935 :
2936 38651 : if (script->pcCounters) {
2937 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2938 0 : updateArithCounters(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2939 0 : arithUpdated = true;
2940 : }
2941 38651 : END_CASE(JSOP_LOCALDEC)
2942 :
2943 : BEGIN_CASE(JSOP_BINDNAME)
2944 7676 : jsop_bindname(script->getName(GET_UINT32_INDEX(PC)));
2945 7676 : END_CASE(JSOP_BINDNAME)
2946 :
2947 : BEGIN_CASE(JSOP_SETPROP)
2948 : {
2949 25480 : jsbytecode *next = &PC[JSOP_SETPROP_LENGTH];
2950 25480 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2951 25480 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2952 0 : return Compile_Error;
2953 : }
2954 25480 : END_CASE(JSOP_SETPROP)
2955 :
2956 : BEGIN_CASE(JSOP_SETNAME)
2957 : BEGIN_CASE(JSOP_SETMETHOD)
2958 : {
2959 10252 : jsbytecode *next = &PC[JSOP_SETNAME_LENGTH];
2960 10252 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2961 10252 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2962 0 : return Compile_Error;
2963 : }
2964 10252 : END_CASE(JSOP_SETNAME)
2965 :
2966 : BEGIN_CASE(JSOP_THROW)
2967 16842 : prepareStubCall(Uses(1));
2968 16842 : INLINE_STUBCALL(stubs::Throw, REJOIN_NONE);
2969 16842 : frame.pop();
2970 16842 : fallthrough = false;
2971 16842 : END_CASE(JSOP_THROW)
2972 :
2973 : BEGIN_CASE(JSOP_IN)
2974 : {
2975 18350 : jsop_in();
2976 : }
2977 18350 : END_CASE(JSOP_IN)
2978 :
2979 : BEGIN_CASE(JSOP_INSTANCEOF)
2980 2412 : if (!jsop_instanceof())
2981 0 : return Compile_Error;
2982 2412 : END_CASE(JSOP_INSTANCEOF)
2983 :
2984 : BEGIN_CASE(JSOP_EXCEPTION)
2985 : {
2986 19253 : prepareStubCall(Uses(0));
2987 19253 : INLINE_STUBCALL(stubs::Exception, REJOIN_FALLTHROUGH);
2988 19253 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
2989 : }
2990 19253 : END_CASE(JSOP_EXCEPTION)
2991 :
2992 : BEGIN_CASE(JSOP_LINENO)
2993 1983 : END_CASE(JSOP_LINENO)
2994 :
2995 : BEGIN_CASE(JSOP_ENUMELEM)
2996 : // Normally, SETELEM transforms the stack
2997 : // from: OBJ ID VALUE
2998 : // to: VALUE
2999 : //
3000 : // Here, the stack transition is
3001 : // from: VALUE OBJ ID
3002 : // to:
3003 : // So we make the stack look like a SETELEM, and re-use it.
3004 :
3005 : // Before: VALUE OBJ ID
3006 : // After: VALUE OBJ ID VALUE
3007 0 : frame.dupAt(-3);
3008 :
3009 : // Before: VALUE OBJ ID VALUE
3010 : // After: VALUE VALUE
3011 0 : if (!jsop_setelem(true))
3012 0 : return Compile_Error;
3013 :
3014 : // Before: VALUE VALUE
3015 : // After:
3016 0 : frame.popn(2);
3017 0 : END_CASE(JSOP_ENUMELEM)
3018 :
3019 : BEGIN_CASE(JSOP_CONDSWITCH)
3020 : /* No-op for the decompiler. */
3021 484 : END_CASE(JSOP_CONDSWITCH)
3022 :
3023 : BEGIN_CASE(JSOP_LABEL)
3024 108 : END_CASE(JSOP_LABEL)
3025 :
3026 : BEGIN_CASE(JSOP_DEFFUN)
3027 : {
3028 1507 : JSFunction *innerFun = script->getFunction(GET_UINT32_INDEX(PC));
3029 :
3030 1507 : prepareStubCall(Uses(0));
3031 1507 : masm.move(ImmPtr(innerFun), Registers::ArgReg1);
3032 1507 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun), REJOIN_FALLTHROUGH);
3033 : }
3034 1507 : END_CASE(JSOP_DEFFUN)
3035 :
3036 : BEGIN_CASE(JSOP_DEFVAR)
3037 : BEGIN_CASE(JSOP_DEFCONST)
3038 : {
3039 19257 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3040 :
3041 19257 : prepareStubCall(Uses(0));
3042 19257 : masm.move(ImmPtr(name), Registers::ArgReg1);
3043 19257 : INLINE_STUBCALL(stubs::DefVarOrConst, REJOIN_FALLTHROUGH);
3044 : }
3045 19257 : END_CASE(JSOP_DEFVAR)
3046 :
3047 : BEGIN_CASE(JSOP_SETCONST)
3048 : {
3049 16592 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3050 :
3051 16592 : prepareStubCall(Uses(1));
3052 16592 : masm.move(ImmPtr(name), Registers::ArgReg1);
3053 16592 : INLINE_STUBCALL(stubs::SetConst, REJOIN_FALLTHROUGH);
3054 : }
3055 16592 : END_CASE(JSOP_SETCONST)
3056 :
3057 : BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
3058 : {
3059 161 : uint32_t slot = GET_SLOTNO(PC);
3060 161 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC + SLOTNO_LEN));
3061 :
3062 : /* See JSOP_DEFLOCALFUN. */
3063 161 : markUndefinedLocal(PC - script->code, slot);
3064 :
3065 161 : prepareStubCall(Uses(frame.frameSlots()));
3066 161 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3067 161 : INLINE_STUBCALL(stubs::DefLocalFun_FC, REJOIN_DEFLOCALFUN);
3068 161 : frame.takeReg(Registers::ReturnReg);
3069 161 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3070 161 : frame.storeLocal(slot, true);
3071 161 : frame.pop();
3072 161 : updateVarType();
3073 : }
3074 161 : END_CASE(JSOP_DEFLOCALFUN_FC)
3075 :
3076 : BEGIN_CASE(JSOP_LAMBDA)
3077 : {
3078 44763 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC));
3079 :
3080 44763 : JSObjStubFun stub = stubs::Lambda;
3081 44763 : uint32_t uses = 0;
3082 :
3083 44763 : jsbytecode *pc2 = NULL;
3084 44763 : if (fun->joinable()) {
3085 33108 : pc2 = PC + JSOP_LAMBDA_LENGTH;
3086 33108 : JSOp next = JSOp(*pc2);
3087 :
3088 33108 : if (next == JSOP_INITMETHOD) {
3089 1455 : stub = stubs::LambdaJoinableForInit;
3090 31653 : } else if (next == JSOP_SETMETHOD) {
3091 2576 : stub = stubs::LambdaJoinableForSet;
3092 2576 : uses = 1;
3093 29077 : } else if (next == JSOP_CALL) {
3094 1618 : int iargc = GET_ARGC(pc2);
3095 1618 : if (iargc == 1 || iargc == 2) {
3096 1590 : stub = stubs::LambdaJoinableForCall;
3097 1590 : uses = frame.frameSlots();
3098 : }
3099 27459 : } else if (next == JSOP_NULL) {
3100 0 : pc2 += JSOP_NULL_LENGTH;
3101 0 : if (JSOp(*pc2) == JSOP_CALL && GET_ARGC(pc2) == 0)
3102 0 : stub = stubs::LambdaJoinableForNull;
3103 : }
3104 : }
3105 :
3106 44763 : prepareStubCall(Uses(uses));
3107 44763 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3108 :
3109 44763 : INLINE_STUBCALL(stub, REJOIN_PUSH_OBJECT);
3110 :
3111 44763 : frame.takeReg(Registers::ReturnReg);
3112 44763 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3113 : }
3114 44763 : END_CASE(JSOP_LAMBDA)
3115 :
3116 : BEGIN_CASE(JSOP_TRY)
3117 19253 : frame.syncAndForgetEverything();
3118 19253 : END_CASE(JSOP_TRY)
3119 :
3120 : BEGIN_CASE(JSOP_GETFCSLOT)
3121 : BEGIN_CASE(JSOP_CALLFCSLOT)
3122 : {
3123 2117 : unsigned index = GET_UINT16(PC);
3124 :
3125 : // Load the callee's payload into a register.
3126 2117 : frame.pushCallee();
3127 2117 : RegisterID reg = frame.copyDataIntoReg(frame.peek(-1));
3128 2117 : frame.pop();
3129 :
3130 : // obj->getFlatClosureUpvars()
3131 2117 : Address upvarAddress(reg, JSFunction::getFlatClosureUpvarsOffset());
3132 2117 : masm.loadPrivate(upvarAddress, reg);
3133 : // push ((Value *) reg)[index]
3134 :
3135 : BarrierState barrier = pushAddressMaybeBarrier(Address(reg, index * sizeof(Value)),
3136 2117 : knownPushedType(0), true);
3137 2117 : finishBarrier(barrier, REJOIN_GETTER, 0);
3138 : }
3139 2117 : END_CASE(JSOP_CALLFCSLOT)
3140 :
3141 : BEGIN_CASE(JSOP_DEFLOCALFUN)
3142 : {
3143 2453 : uint32_t slot = GET_SLOTNO(PC);
3144 2453 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC + SLOTNO_LEN));
3145 :
3146 : /*
3147 : * The liveness analysis will report that the value in |slot| is
3148 : * defined at the start of this opcode. However, we don't actually
3149 : * fill it in until the stub returns. This will cause a problem if
3150 : * we GC inside the stub. So we write a safe value here so that the
3151 : * GC won't crash.
3152 : */
3153 2453 : markUndefinedLocal(PC - script->code, slot);
3154 :
3155 2453 : prepareStubCall(Uses(0));
3156 2453 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3157 2453 : INLINE_STUBCALL(stubs::DefLocalFun, REJOIN_DEFLOCALFUN);
3158 2453 : frame.takeReg(Registers::ReturnReg);
3159 2453 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3160 2453 : frame.storeLocal(slot, true);
3161 2453 : frame.pop();
3162 2453 : updateVarType();
3163 : }
3164 2453 : END_CASE(JSOP_DEFLOCALFUN)
3165 :
3166 : BEGIN_CASE(JSOP_RETRVAL)
3167 1524 : emitReturn(NULL);
3168 1524 : fallthrough = false;
3169 1524 : END_CASE(JSOP_RETRVAL)
3170 :
3171 : BEGIN_CASE(JSOP_GETGNAME)
3172 : BEGIN_CASE(JSOP_CALLGNAME)
3173 : {
3174 491935 : uint32_t index = GET_UINT32_INDEX(PC);
3175 491935 : jsop_getgname(index);
3176 491935 : frame.extra(frame.peek(-1)).name = script->getName(index);
3177 : }
3178 491935 : END_CASE(JSOP_GETGNAME)
3179 :
3180 : BEGIN_CASE(JSOP_SETGNAME)
3181 : {
3182 72901 : jsbytecode *next = &PC[JSOP_SETGNAME_LENGTH];
3183 72901 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
3184 72901 : jsop_setgname(script->getName(GET_UINT32_INDEX(PC)), pop);
3185 : }
3186 72901 : END_CASE(JSOP_SETGNAME)
3187 :
3188 : BEGIN_CASE(JSOP_REGEXP)
3189 17804 : if (!jsop_regexp())
3190 0 : return Compile_Error;
3191 17804 : END_CASE(JSOP_REGEXP)
3192 :
3193 : BEGIN_CASE(JSOP_OBJECT)
3194 : {
3195 2700 : JSObject *object = script->getObject(GET_UINT32_INDEX(PC));
3196 2700 : RegisterID reg = frame.allocReg();
3197 2700 : masm.move(ImmPtr(object), reg);
3198 2700 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3199 : }
3200 2700 : END_CASE(JSOP_OBJECT)
3201 :
3202 : BEGIN_CASE(JSOP_UINT24)
3203 2472 : frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
3204 2472 : END_CASE(JSOP_UINT24)
3205 :
3206 : BEGIN_CASE(JSOP_STOP)
3207 90161 : if (script->pcCounters)
3208 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
3209 90161 : emitReturn(NULL);
3210 90161 : goto done;
3211 : END_CASE(JSOP_STOP)
3212 :
3213 : BEGIN_CASE(JSOP_GETXPROP)
3214 506 : if (!jsop_xname(script->getName(GET_UINT32_INDEX(PC))))
3215 0 : return Compile_Error;
3216 506 : END_CASE(JSOP_GETXPROP)
3217 :
3218 : BEGIN_CASE(JSOP_ENTERBLOCK)
3219 : BEGIN_CASE(JSOP_ENTERLET0)
3220 : BEGIN_CASE(JSOP_ENTERLET1)
3221 28566 : enterBlock(&script->getObject(GET_UINT32_INDEX(PC))->asStaticBlock());
3222 28566 : END_CASE(JSOP_ENTERBLOCK);
3223 :
3224 : BEGIN_CASE(JSOP_LEAVEBLOCK)
3225 28995 : leaveBlock();
3226 28995 : END_CASE(JSOP_LEAVEBLOCK)
3227 :
3228 : BEGIN_CASE(JSOP_INT8)
3229 165665 : frame.push(Value(Int32Value(GET_INT8(PC))));
3230 165665 : END_CASE(JSOP_INT8)
3231 :
3232 : BEGIN_CASE(JSOP_INT32)
3233 2486 : frame.push(Value(Int32Value(GET_INT32(PC))));
3234 2486 : END_CASE(JSOP_INT32)
3235 :
3236 : BEGIN_CASE(JSOP_HOLE)
3237 2188 : frame.push(MagicValue(JS_ARRAY_HOLE));
3238 2188 : END_CASE(JSOP_HOLE)
3239 :
3240 : BEGIN_CASE(JSOP_LAMBDA_FC)
3241 : {
3242 1265 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC));
3243 1265 : prepareStubCall(Uses(frame.frameSlots()));
3244 1265 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3245 1265 : INLINE_STUBCALL(stubs::FlatLambda, REJOIN_PUSH_OBJECT);
3246 1265 : frame.takeReg(Registers::ReturnReg);
3247 1265 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3248 : }
3249 1265 : END_CASE(JSOP_LAMBDA_FC)
3250 :
3251 : BEGIN_CASE(JSOP_LOOPHEAD)
3252 : {
3253 60367 : if (analysis->jumpTarget(PC)) {
3254 60307 : interruptCheckHelper();
3255 60307 : recompileCheckHelper();
3256 : }
3257 : }
3258 60367 : END_CASE(JSOP_LOOPHEAD)
3259 :
3260 : BEGIN_CASE(JSOP_LOOPENTRY)
3261 59981 : END_CASE(JSOP_LOOPENTRY)
3262 :
3263 : BEGIN_CASE(JSOP_DEBUGGER)
3264 : {
3265 2083 : prepareStubCall(Uses(0));
3266 2083 : masm.move(ImmPtr(PC), Registers::ArgReg1);
3267 2083 : INLINE_STUBCALL(stubs::DebuggerStatement, REJOIN_FALLTHROUGH);
3268 : }
3269 2083 : END_CASE(JSOP_DEBUGGER)
3270 :
3271 : default:
3272 0 : JS_NOT_REACHED("Opcode not implemented");
3273 : }
3274 :
3275 : /**********************
3276 : * END COMPILER OPS *
3277 : **********************/
3278 :
3279 8545165 : if (cx->typeInferenceEnabled() && PC == lastPC + GetBytecodeLength(lastPC)) {
3280 : /*
3281 : * Inform the frame of the type sets for values just pushed. Skip
3282 : * this if we did any opcode fusions, we don't keep track of the
3283 : * associated type sets in such cases.
3284 : */
3285 1995912 : unsigned nuses = GetUseCount(script, lastPC - script->code);
3286 1995912 : unsigned ndefs = GetDefCount(script, lastPC - script->code);
3287 3735770 : for (unsigned i = 0; i < ndefs; i++) {
3288 1739858 : FrameEntry *fe = frame.getStack(opinfo->stackDepth - nuses + i);
3289 1739858 : if (fe) {
3290 : /* fe may be NULL for conditionally pushed entries, e.g. JSOP_AND */
3291 1736405 : frame.extra(fe).types = analysis->pushedTypes(lastPC - script->code, i);
3292 : }
3293 : }
3294 : }
3295 :
3296 8545165 : if (script->pcCounters) {
3297 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3298 0 : bool typesUpdated = false;
3299 :
3300 : /* Update information about the type of value pushed by arithmetic ops. */
3301 0 : if ((js_CodeSpec[op].format & JOF_ARITH) && !arithUpdated) {
3302 0 : FrameEntry *pushed = NULL;
3303 0 : if (PC == lastPC + GetBytecodeLength(lastPC))
3304 0 : pushed = frame.peek(-1);
3305 0 : updateArithCounters(lastPC, pushed, arithFirstUseType, arithSecondUseType);
3306 0 : typesUpdated = true;
3307 : }
3308 :
3309 : /* Update information about the result type of access operations. */
3310 0 : if (OpcodeCounts::accessOp(op) &&
3311 : op != JSOP_SETPROP && op != JSOP_SETMETHOD && op != JSOP_SETELEM) {
3312 0 : FrameEntry *fe = (GetDefCount(script, lastPC - script->code) == 1)
3313 0 : ? frame.peek(-1)
3314 0 : : frame.peek(-2);
3315 0 : updatePCTypes(lastPC, fe);
3316 0 : typesUpdated = true;
3317 : }
3318 :
3319 0 : if (countersUpdated || typesUpdated || length != 0) {
3320 0 : if (!countersUpdated)
3321 0 : updatePCCounters(lastPC, &codeStart, &countersUpdated);
3322 :
3323 0 : if (pcLengths) {
3324 : /* Fill in the amount of inline code generated for the op. */
3325 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3326 0 : pcLengths[offset].codeLength += length;
3327 : }
3328 : }
3329 8545165 : } else if (pcLengths) {
3330 : /* Fill in the amount of inline code generated for the op. */
3331 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3332 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3333 0 : pcLengths[offset].codeLength += length;
3334 : }
3335 :
3336 8545165 : frame.assertValidRegisterState();
3337 : }
3338 :
3339 : done:
3340 131899 : return Compile_Okay;
3341 : }
3342 :
3343 : #undef END_CASE
3344 : #undef BEGIN_CASE
3345 :
3346 : void
3347 0 : mjit::Compiler::updatePCCounters(jsbytecode *pc, Label *start, bool *updated)
3348 : {
3349 0 : JS_ASSERT(script->pcCounters);
3350 :
3351 : /*
3352 : * Bump the METHODJIT count for the opcode, read the METHODJIT_CODE_LENGTH
3353 : * and METHODJIT_PICS_LENGTH counts, indicating the amounts of inline path
3354 : * code and generated code, respectively, and add them to the accumulated
3355 : * total for the op.
3356 : */
3357 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + pc - script->code;
3358 :
3359 : /*
3360 : * Base register for addresses, we can't use AbsoluteAddress in all places.
3361 : * This may hold a live value, so write it out to the top of the stack
3362 : * first. This cannot overflow the stack, as space is always reserved for
3363 : * an extra callee frame.
3364 : */
3365 0 : RegisterID reg = Registers::ReturnReg;
3366 0 : masm.storePtr(reg, frame.addressOfTop());
3367 :
3368 0 : OpcodeCounts counts = script->getCounts(pc);
3369 :
3370 0 : double *code = &counts.get(OpcodeCounts::BASE_METHODJIT_CODE);
3371 0 : double *codeLength = &pcLengths[offset].codeLength;
3372 0 : masm.addCounter(codeLength, code, reg);
3373 :
3374 0 : double *pics = &counts.get(OpcodeCounts::BASE_METHODJIT_PICS);
3375 0 : double *picsLength = &pcLengths[offset].picsLength;
3376 0 : masm.addCounter(picsLength, pics, reg);
3377 :
3378 0 : double *counter = &counts.get(OpcodeCounts::BASE_METHODJIT);
3379 0 : masm.bumpCounter(counter, reg);
3380 :
3381 : /* Reload the base register's original value. */
3382 0 : masm.loadPtr(frame.addressOfTop(), reg);
3383 :
3384 : /* The start label should reflect the code for the op, not instrumentation. */
3385 0 : *start = masm.label();
3386 0 : *updated = true;
3387 0 : }
3388 :
3389 : static inline bool
3390 0 : HasPayloadType(types::TypeSet *types)
3391 : {
3392 0 : if (types->unknown())
3393 0 : return false;
3394 :
3395 0 : types::TypeFlags flags = types->baseFlags();
3396 0 : bool objects = !!(flags & types::TYPE_FLAG_ANYOBJECT) || !!types->getObjectCount();
3397 :
3398 0 : if (objects && !!(flags & types::TYPE_FLAG_STRING))
3399 0 : return false;
3400 :
3401 0 : flags = flags & ~(types::TYPE_FLAG_ANYOBJECT | types::TYPE_FLAG_STRING);
3402 :
3403 : return (flags == types::TYPE_FLAG_UNDEFINED)
3404 : || (flags == types::TYPE_FLAG_NULL)
3405 0 : || (flags == types::TYPE_FLAG_BOOLEAN);
3406 : }
3407 :
3408 : void
3409 0 : mjit::Compiler::updatePCTypes(jsbytecode *pc, FrameEntry *fe)
3410 : {
3411 0 : JS_ASSERT(script->pcCounters);
3412 :
3413 : /*
3414 : * Get a temporary register, as for updatePCCounters. Don't overlap with
3415 : * the backing store for the entry's type tag, if there is one.
3416 : */
3417 0 : RegisterID reg = Registers::ReturnReg;
3418 0 : if (frame.peekTypeInRegister(fe) && reg == frame.tempRegForType(fe)) {
3419 : JS_STATIC_ASSERT(Registers::ReturnReg != Registers::ArgReg1);
3420 0 : reg = Registers::ArgReg1;
3421 : }
3422 0 : masm.push(reg);
3423 :
3424 0 : OpcodeCounts counts = script->getCounts(pc);
3425 :
3426 : /* Update the counters for pushed type tags and possible access types. */
3427 0 : if (fe->isTypeKnown()) {
3428 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_MONOMORPHIC), reg);
3429 0 : OpcodeCounts::AccessCounts counter = OpcodeCounts::ACCESS_OBJECT;
3430 0 : switch (fe->getKnownType()) {
3431 0 : case JSVAL_TYPE_UNDEFINED: counter = OpcodeCounts::ACCESS_UNDEFINED; break;
3432 0 : case JSVAL_TYPE_NULL: counter = OpcodeCounts::ACCESS_NULL; break;
3433 0 : case JSVAL_TYPE_BOOLEAN: counter = OpcodeCounts::ACCESS_BOOLEAN; break;
3434 0 : case JSVAL_TYPE_INT32: counter = OpcodeCounts::ACCESS_INT32; break;
3435 0 : case JSVAL_TYPE_DOUBLE: counter = OpcodeCounts::ACCESS_DOUBLE; break;
3436 0 : case JSVAL_TYPE_STRING: counter = OpcodeCounts::ACCESS_STRING; break;
3437 0 : case JSVAL_TYPE_OBJECT: counter = OpcodeCounts::ACCESS_OBJECT; break;
3438 : default:;
3439 : }
3440 0 : if (counter)
3441 0 : masm.bumpCounter(&counts.get(counter), reg);
3442 : } else {
3443 0 : types::TypeSet *types = frame.extra(fe).types;
3444 0 : if (types && HasPayloadType(types))
3445 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_DIMORPHIC), reg);
3446 : else
3447 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_POLYMORPHIC), reg);
3448 :
3449 0 : frame.loadTypeIntoReg(fe, reg);
3450 :
3451 0 : Jump j = masm.testUndefined(Assembler::NotEqual, reg);
3452 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_UNDEFINED), reg);
3453 0 : frame.loadTypeIntoReg(fe, reg);
3454 0 : j.linkTo(masm.label(), &masm);
3455 :
3456 0 : j = masm.testNull(Assembler::NotEqual, reg);
3457 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_NULL), reg);
3458 0 : frame.loadTypeIntoReg(fe, reg);
3459 0 : j.linkTo(masm.label(), &masm);
3460 :
3461 0 : j = masm.testBoolean(Assembler::NotEqual, reg);
3462 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_BOOLEAN), reg);
3463 0 : frame.loadTypeIntoReg(fe, reg);
3464 0 : j.linkTo(masm.label(), &masm);
3465 :
3466 0 : j = masm.testInt32(Assembler::NotEqual, reg);
3467 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_INT32), reg);
3468 0 : frame.loadTypeIntoReg(fe, reg);
3469 0 : j.linkTo(masm.label(), &masm);
3470 :
3471 0 : j = masm.testDouble(Assembler::NotEqual, reg);
3472 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_DOUBLE), reg);
3473 0 : frame.loadTypeIntoReg(fe, reg);
3474 0 : j.linkTo(masm.label(), &masm);
3475 :
3476 0 : j = masm.testString(Assembler::NotEqual, reg);
3477 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_STRING), reg);
3478 0 : frame.loadTypeIntoReg(fe, reg);
3479 0 : j.linkTo(masm.label(), &masm);
3480 :
3481 0 : j = masm.testObject(Assembler::NotEqual, reg);
3482 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_OBJECT), reg);
3483 0 : frame.loadTypeIntoReg(fe, reg);
3484 0 : j.linkTo(masm.label(), &masm);
3485 : }
3486 :
3487 : /* Update the counter for accesses with type barriers. */
3488 0 : if (js_CodeSpec[*pc].format & JOF_TYPESET) {
3489 0 : double *counter = &counts.get(hasTypeBarriers(pc)
3490 : ? OpcodeCounts::ACCESS_BARRIER
3491 0 : : OpcodeCounts::ACCESS_NOBARRIER);
3492 0 : masm.bumpCounter(counter, reg);
3493 : }
3494 :
3495 : /* Reload the base register's original value. */
3496 0 : masm.pop(reg);
3497 0 : }
3498 :
3499 : void
3500 0 : mjit::Compiler::updateArithCounters(jsbytecode *pc, FrameEntry *fe,
3501 : JSValueType firstUseType, JSValueType secondUseType)
3502 : {
3503 0 : JS_ASSERT(script->pcCounters);
3504 :
3505 0 : RegisterID reg = Registers::ReturnReg;
3506 0 : masm.push(reg);
3507 :
3508 : /*
3509 : * What counter we bump for arithmetic expressions depend on the
3510 : * known types of its operands.
3511 : *
3512 : * ARITH_INT: operands are known ints, result is int
3513 : * ARITH_OVERFLOW: operands are known ints, result is double
3514 : * ARITH_DOUBLE: either operand is a known double, result is double
3515 : * ARITH_OTHER: operands are monomorphic but not int or double
3516 : * ARITH_UNKNOWN: operands are polymorphic
3517 : */
3518 :
3519 : OpcodeCounts::ArithCounts counter;
3520 0 : if (firstUseType == JSVAL_TYPE_INT32 && secondUseType == JSVAL_TYPE_INT32 &&
3521 0 : (!fe || fe->isNotType(JSVAL_TYPE_DOUBLE))) {
3522 0 : counter = OpcodeCounts::ARITH_INT;
3523 0 : } else if (firstUseType == JSVAL_TYPE_INT32 || firstUseType == JSVAL_TYPE_DOUBLE ||
3524 : secondUseType == JSVAL_TYPE_INT32 || secondUseType == JSVAL_TYPE_DOUBLE) {
3525 0 : counter = OpcodeCounts::ARITH_DOUBLE;
3526 0 : } else if (firstUseType != JSVAL_TYPE_UNKNOWN && secondUseType != JSVAL_TYPE_UNKNOWN &&
3527 0 : (!fe || fe->isTypeKnown())) {
3528 0 : counter = OpcodeCounts::ARITH_OTHER;
3529 : } else {
3530 0 : counter = OpcodeCounts::ARITH_UNKNOWN;
3531 : }
3532 :
3533 0 : masm.bumpCounter(&script->getCounts(pc).get(counter), reg);
3534 0 : masm.pop(reg);
3535 0 : }
3536 :
3537 : void
3538 0 : mjit::Compiler::updateElemCounters(jsbytecode *pc, FrameEntry *obj, FrameEntry *id)
3539 : {
3540 0 : JS_ASSERT(script->pcCounters);
3541 :
3542 0 : RegisterID reg = Registers::ReturnReg;
3543 0 : masm.push(reg);
3544 :
3545 0 : OpcodeCounts counts = script->getCounts(pc);
3546 :
3547 : OpcodeCounts::ElementCounts counter;
3548 0 : if (id->isTypeKnown()) {
3549 0 : switch (id->getKnownType()) {
3550 0 : case JSVAL_TYPE_INT32: counter = OpcodeCounts::ELEM_ID_INT; break;
3551 0 : case JSVAL_TYPE_DOUBLE: counter = OpcodeCounts::ELEM_ID_DOUBLE; break;
3552 0 : default: counter = OpcodeCounts::ELEM_ID_OTHER; break;
3553 : }
3554 : } else {
3555 0 : counter = OpcodeCounts::ELEM_ID_UNKNOWN;
3556 : }
3557 0 : masm.bumpCounter(&counts.get(counter), reg);
3558 :
3559 0 : if (obj->mightBeType(JSVAL_TYPE_OBJECT)) {
3560 0 : types::TypeSet *types = frame.extra(obj).types;
3561 0 : if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY) &&
3562 0 : types->getTypedArrayType(cx) != TypedArray::TYPE_MAX) {
3563 0 : counter = OpcodeCounts::ELEM_OBJECT_TYPED;
3564 0 : } else if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
3565 0 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY))
3566 0 : counter = OpcodeCounts::ELEM_OBJECT_PACKED;
3567 : else
3568 0 : counter = OpcodeCounts::ELEM_OBJECT_DENSE;
3569 : } else {
3570 0 : counter = OpcodeCounts::ELEM_OBJECT_OTHER;
3571 : }
3572 0 : masm.bumpCounter(&counts.get(counter), reg);
3573 : } else {
3574 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ELEM_OBJECT_OTHER), reg);
3575 : }
3576 :
3577 0 : masm.pop(reg);
3578 0 : }
3579 :
3580 : void
3581 0 : mjit::Compiler::bumpPropCounter(jsbytecode *pc, int counter)
3582 : {
3583 : /* Don't accumulate counts for property ops fused with other ops. */
3584 0 : if (!(js_CodeSpec[*pc].format & JOF_PROP))
3585 0 : return;
3586 0 : RegisterID reg = Registers::ReturnReg;
3587 0 : masm.push(reg);
3588 0 : masm.bumpCounter(&script->getCounts(pc).get(counter), reg);
3589 0 : masm.pop(reg);
3590 : }
3591 :
3592 : JSC::MacroAssembler::Label
3593 403365 : mjit::Compiler::labelOf(jsbytecode *pc, uint32_t inlineIndex)
3594 : {
3595 403365 : ActiveFrame *a = (inlineIndex == UINT32_MAX) ? outer : inlineFrames[inlineIndex];
3596 403365 : JS_ASSERT(uint32_t(pc - a->script->code) < a->script->length);
3597 :
3598 403365 : uint32_t offs = uint32_t(pc - a->script->code);
3599 403365 : JS_ASSERT(a->jumpMap[offs].isSet());
3600 403365 : return a->jumpMap[offs];
3601 : }
3602 :
3603 : bool
3604 204473 : mjit::Compiler::knownJump(jsbytecode *pc)
3605 : {
3606 204473 : return pc < PC;
3607 : }
3608 :
3609 : bool
3610 288981 : mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
3611 : {
3612 288981 : JS_ASSERT(pc >= script->code && uint32_t(pc - script->code) < script->length);
3613 :
3614 288981 : if (pc < PC) {
3615 90087 : j.linkTo(a->jumpMap[uint32_t(pc - script->code)], &masm);
3616 90087 : return true;
3617 : }
3618 198894 : return branchPatches.append(BranchPatch(j, pc, a->inlineIndex));
3619 : }
3620 :
3621 : void
3622 236743 : mjit::Compiler::emitFinalReturn(Assembler &masm)
3623 : {
3624 236743 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg);
3625 236743 : masm.jump(Registers::ReturnReg);
3626 236743 : }
3627 :
3628 : // Emits code to load a return value of the frame into the scripted-ABI
3629 : // type & data register pair. If the return value is in fp->rval, then |fe|
3630 : // is NULL. Otherwise, |fe| contains the return value.
3631 : //
3632 : // If reading from fp->rval, |undefined| is loaded optimistically, before
3633 : // checking if fp->rval is set in the frame flags and loading that instead.
3634 : //
3635 : // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
3636 : // the FrameState can manage. If |masm| is the OOL path, the value is simply
3637 : // loaded from its slot in the frame, since the caller has guaranteed it's
3638 : // been synced.
3639 : //
3640 : void
3641 235181 : mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
3642 : {
3643 235181 : RegisterID typeReg = JSReturnReg_Type;
3644 235181 : RegisterID dataReg = JSReturnReg_Data;
3645 :
3646 235181 : if (fe) {
3647 : // If using the OOL assembler, the caller signifies that the |fe| is
3648 : // synced, but not to rely on its register state.
3649 44031 : if (masm != &this->masm) {
3650 21652 : if (fe->isConstant()) {
3651 5808 : stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
3652 : } else {
3653 15844 : Address rval(frame.addressOf(fe));
3654 15844 : if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE)) {
3655 3283 : stubcc.masm.loadPayload(rval, dataReg);
3656 3283 : stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
3657 : } else {
3658 12561 : stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
3659 : }
3660 : }
3661 : } else {
3662 22379 : frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
3663 : }
3664 : } else {
3665 : // Load a return value from POPV or SETRVAL into the return registers,
3666 : // otherwise return undefined.
3667 191150 : masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
3668 191150 : if (analysis->usesReturnValue()) {
3669 : Jump rvalClear = masm->branchTest32(Assembler::Zero,
3670 : FrameFlagsAddress(),
3671 137355 : Imm32(StackFrame::HAS_RVAL));
3672 137355 : Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue());
3673 137355 : masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
3674 137355 : rvalClear.linkTo(masm->label(), masm);
3675 : }
3676 : }
3677 235181 : }
3678 :
3679 : // This ensures that constructor return values are an object. If a non-object
3680 : // is returned, either explicitly or implicitly, the newly created object is
3681 : // loaded out of the frame. Otherwise, the explicitly returned object is kept.
3682 : //
3683 : void
3684 3413 : mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
3685 : {
3686 3413 : JS_ASSERT(isConstructing);
3687 :
3688 3413 : bool ool = (masm != &this->masm);
3689 3413 : Address thisv(JSFrameReg, StackFrame::offsetOfThis(script->function()));
3690 :
3691 : // We can just load |thisv| if either of the following is true:
3692 : // (1) There is no explicit return value, AND fp->rval is not used.
3693 : // (2) There is an explicit return value, and it's known to be primitive.
3694 3575 : if ((!fe && !analysis->usesReturnValue()) ||
3695 162 : (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
3696 : {
3697 1562 : if (ool)
3698 758 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3699 : else
3700 804 : frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
3701 1562 : return;
3702 : }
3703 :
3704 : // If the type is known to be an object, just load the return value as normal.
3705 1851 : if (fe && fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
3706 6 : loadReturnValue(masm, fe);
3707 6 : return;
3708 : }
3709 :
3710 : // There's a return value, and its type is unknown. Test the type and load
3711 : // |thisv| if necessary. Sync the 'this' entry before doing so, as it may
3712 : // be stored in registers if we constructed it inline.
3713 1845 : frame.syncThis();
3714 1845 : loadReturnValue(masm, fe);
3715 1845 : Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
3716 1845 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3717 1845 : j.linkTo(masm->label(), masm);
3718 : }
3719 :
3720 : // Loads the return value into the scripted ABI register pair, such that JS
3721 : // semantics in constructors are preserved.
3722 : //
3723 : void
3724 236743 : mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
3725 : {
3726 236743 : if (isConstructing)
3727 3413 : fixPrimitiveReturn(masm, fe);
3728 : else
3729 233330 : loadReturnValue(masm, fe);
3730 236743 : }
3731 :
3732 : void
3733 2552 : mjit::Compiler::emitInlineReturnValue(FrameEntry *fe)
3734 : {
3735 2552 : JS_ASSERT(!isConstructing && a->needReturnValue);
3736 :
3737 2552 : if (a->syncReturnValue) {
3738 : /* Needed return value with unknown type, the caller's entry is synced. */
3739 209 : Address address = frame.addressForInlineReturn();
3740 209 : if (fe)
3741 209 : frame.storeTo(fe, address);
3742 : else
3743 0 : masm.storeValue(UndefinedValue(), address);
3744 209 : return;
3745 : }
3746 :
3747 : /*
3748 : * For inlined functions that simply return an entry present in the outer
3749 : * script (e.g. a loop invariant term), mark the copy and propagate it
3750 : * after popping the frame.
3751 : */
3752 2343 : if (!a->exitState && fe && fe->isCopy() && frame.isOuterSlot(fe->backing())) {
3753 18 : a->returnEntry = fe->backing();
3754 18 : return;
3755 : }
3756 :
3757 2325 : if (a->returnValueDouble) {
3758 60 : JS_ASSERT(fe);
3759 60 : frame.ensureDouble(fe);
3760 : Registers mask(a->returnSet
3761 1 : ? Registers::maskReg(a->returnRegister)
3762 61 : : Registers::AvailFPRegs);
3763 : FPRegisterID fpreg;
3764 60 : if (!fe->isConstant()) {
3765 54 : fpreg = frame.tempRegInMaskForData(fe, mask.freeMask).fpreg();
3766 54 : frame.syncAndForgetFe(fe, true);
3767 54 : frame.takeReg(fpreg);
3768 : } else {
3769 6 : fpreg = frame.allocReg(mask.freeMask).fpreg();
3770 6 : masm.slowLoadConstantDouble(fe->getValue().toDouble(), fpreg);
3771 : }
3772 60 : JS_ASSERT_IF(a->returnSet, fpreg == a->returnRegister.fpreg());
3773 60 : a->returnRegister = fpreg;
3774 : } else {
3775 : Registers mask(a->returnSet
3776 298 : ? Registers::maskReg(a->returnRegister)
3777 2563 : : Registers::AvailRegs);
3778 : RegisterID reg;
3779 2265 : if (fe && !fe->isConstant()) {
3780 2150 : reg = frame.tempRegInMaskForData(fe, mask.freeMask).reg();
3781 2150 : frame.syncAndForgetFe(fe, true);
3782 2150 : frame.takeReg(reg);
3783 : } else {
3784 115 : reg = frame.allocReg(mask.freeMask).reg();
3785 115 : Value val = fe ? fe->getValue() : UndefinedValue();
3786 115 : masm.loadValuePayload(val, reg);
3787 : }
3788 2265 : JS_ASSERT_IF(a->returnSet, reg == a->returnRegister.reg());
3789 2265 : a->returnRegister = reg;
3790 : }
3791 :
3792 2325 : a->returnSet = true;
3793 2325 : if (a->exitState)
3794 524 : a->exitState->setUnassigned(a->returnRegister);
3795 : }
3796 :
3797 : void
3798 147457 : mjit::Compiler::emitReturn(FrameEntry *fe)
3799 : {
3800 147457 : JS_ASSERT_IF(!script->function(), JSOp(*PC) == JSOP_STOP);
3801 :
3802 : /* Only the top of the stack can be returned. */
3803 147457 : JS_ASSERT_IF(fe, fe == frame.peek(-1));
3804 :
3805 147457 : if (debugMode() || Probes::callTrackingActive(cx)) {
3806 : /* If the return value isn't in the frame's rval slot, move it there. */
3807 79337 : if (fe) {
3808 30773 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
3809 :
3810 : /* Set the frame flag indicating it's there. */
3811 30773 : RegisterID reg = frame.allocReg();
3812 30773 : masm.load32(FrameFlagsAddress(), reg);
3813 30773 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
3814 30773 : masm.store32(reg, FrameFlagsAddress());
3815 30773 : frame.freeReg(reg);
3816 :
3817 : /* Use the frame's return value when generating further code. */
3818 30773 : fe = NULL;
3819 : }
3820 :
3821 79337 : prepareStubCall(Uses(0));
3822 79337 : INLINE_STUBCALL(stubs::ScriptDebugEpilogue, REJOIN_RESUME);
3823 : }
3824 :
3825 147457 : if (a != outer) {
3826 : /*
3827 : * Returning from an inlined script. The checks we do for inlineability
3828 : * and recompilation triggered by args object construction ensure that
3829 : * there can't be an arguments or call object.
3830 : */
3831 :
3832 3153 : if (a->needReturnValue)
3833 2552 : emitInlineReturnValue(fe);
3834 :
3835 3153 : if (a->exitState) {
3836 : /*
3837 : * Restore the register state to reflect that at the original call,
3838 : * modulo entries which will be popped once the call finishes and any
3839 : * entry which will be clobbered by the return value register.
3840 : */
3841 770 : frame.syncForAllocation(a->exitState, true, Uses(0));
3842 : }
3843 :
3844 : /*
3845 : * Simple tests to see if we are at the end of the script and will
3846 : * fallthrough after the script body finishes, thus won't need to jump.
3847 : */
3848 : bool endOfScript =
3849 : (JSOp(*PC) == JSOP_STOP) ||
3850 : (JSOp(*PC) == JSOP_RETURN &&
3851 2596 : (JSOp(PC[JSOP_RETURN_LENGTH]) == JSOP_STOP &&
3852 5749 : !analysis->maybeCode(PC + JSOP_RETURN_LENGTH)));
3853 3153 : if (!endOfScript)
3854 269 : a->returnJumps->append(masm.jump());
3855 :
3856 3153 : if (a->returnSet)
3857 2325 : frame.freeReg(a->returnRegister);
3858 3153 : return;
3859 : }
3860 :
3861 : /*
3862 : * Outside the mjit, activation objects (call objects and arguments objects) are put
3863 : * by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
3864 : * popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
3865 : * responsible for pushing/popping the initial frame. However, an mjit function
3866 : * epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
3867 : * puts activation objects. And furthermore, if the last mjit frame throws, the mjit
3868 : * does *not* put the activation objects. So we can't assume any particular state of
3869 : * puttedness upon exit from the mjit.
3870 : *
3871 : * To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
3872 : * entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
3873 : * been put.
3874 : */
3875 144304 : if (script->function()) {
3876 98383 : types::TypeScriptNesting *nesting = script->nesting();
3877 98383 : if (script->function()->isHeavyweight() || (nesting && nesting->children)) {
3878 5944 : prepareStubCall(Uses(fe ? 1 : 0));
3879 5944 : INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3880 : } else {
3881 : /* if (hasCallObj() || hasArgsObj()) */
3882 : Jump putObjs = masm.branchTest32(Assembler::NonZero,
3883 92439 : Address(JSFrameReg, StackFrame::offsetOfFlags()),
3884 184878 : Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ));
3885 92439 : stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
3886 :
3887 92439 : stubcc.leave();
3888 92439 : OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3889 :
3890 92439 : emitReturnValue(&stubcc.masm, fe);
3891 92439 : emitFinalReturn(stubcc.masm);
3892 :
3893 : /*
3894 : * Do frame count balancing inline for inner functions in a nesting
3895 : * with no children of their own.
3896 : */
3897 92439 : if (nesting)
3898 2582 : masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
3899 : }
3900 : }
3901 :
3902 144304 : emitReturnValue(&masm, fe);
3903 144304 : emitFinalReturn(masm);
3904 :
3905 : /*
3906 : * After we've placed the call object, all tracked state can be
3907 : * thrown away. This will happen anyway because the next live opcode (if
3908 : * any) must have an incoming edge. It's an optimization to throw it away
3909 : * early - the tracker won't be spilled on further exits or join points.
3910 : */
3911 144304 : frame.discardFrame();
3912 : }
3913 :
3914 : void
3915 1067910 : mjit::Compiler::prepareStubCall(Uses uses)
3916 : {
3917 1067910 : JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
3918 1067910 : frame.syncAndKill(Registers(Registers::TempAnyRegs), uses);
3919 1067910 : JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
3920 1067910 : }
3921 :
3922 : JSC::MacroAssembler::Call
3923 1102306 : mjit::Compiler::emitStubCall(void *ptr, DataLabelPtr *pinline)
3924 : {
3925 1102306 : JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
3926 :
3927 1102306 : masm.bumpStubCounter(script, PC, Registers::tempCallReg());
3928 :
3929 1102306 : Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(),
3930 2204612 : ptr, outerPC(), pinline, frame.totalDepth());
3931 1102306 : JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
3932 : return cl;
3933 : }
3934 :
3935 : void
3936 354514 : mjit::Compiler::interruptCheckHelper()
3937 : {
3938 354514 : Jump jump;
3939 354514 : if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
3940 : /* For barrier verification, always take the interrupt so we can verify. */
3941 372 : jump = masm.jump();
3942 : } else {
3943 354142 : void *interrupt = (void*) &cx->runtime->interrupt;
3944 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM) || defined(JS_CPU_MIPS)
3945 354142 : jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
3946 : #else
3947 : /* Handle processors that can't load from absolute addresses. */
3948 : RegisterID reg = frame.allocReg();
3949 : masm.move(ImmPtr(interrupt), reg);
3950 : jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
3951 : frame.freeReg(reg);
3952 : #endif
3953 : }
3954 :
3955 354514 : stubcc.linkExitDirect(jump, stubcc.masm.label());
3956 :
3957 354514 : frame.sync(stubcc.masm, Uses(0));
3958 354514 : stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
3959 354514 : OOL_STUBCALL(stubs::Interrupt, REJOIN_RESUME);
3960 354514 : stubcc.rejoin(Changes(0));
3961 354514 : }
3962 :
3963 : void
3964 188199 : mjit::Compiler::recompileCheckHelper()
3965 : {
3966 227046 : if (inlining() || debugMode() || !globalObj ||
3967 38847 : !analysis->hasFunctionCalls() || !cx->typeInferenceEnabled()) {
3968 185456 : return;
3969 : }
3970 :
3971 2743 : size_t *addr = script->addressOfUseCount();
3972 2743 : masm.add32(Imm32(1), AbsoluteAddress(addr));
3973 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
3974 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, AbsoluteAddress(addr),
3975 2743 : Imm32(USES_BEFORE_INLINING));
3976 : #else
3977 : /* Handle processors that can't load from absolute addresses. */
3978 : RegisterID reg = frame.allocReg();
3979 : masm.move(ImmPtr(addr), reg);
3980 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, Address(reg, 0),
3981 : Imm32(USES_BEFORE_INLINING));
3982 : frame.freeReg(reg);
3983 : #endif
3984 2743 : stubcc.linkExit(jump, Uses(0));
3985 2743 : stubcc.leave();
3986 :
3987 2743 : OOL_STUBCALL(stubs::RecompileForInline, REJOIN_RESUME);
3988 2743 : stubcc.rejoin(Changes(0));
3989 : }
3990 :
3991 : void
3992 291921 : mjit::Compiler::addReturnSite()
3993 : {
3994 291921 : InternalCallSite site(masm.distanceOf(masm.label()), a->inlineIndex, PC,
3995 583842 : REJOIN_SCRIPTED, false);
3996 291921 : addCallSite(site);
3997 291921 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg);
3998 291921 : }
3999 :
4000 : void
4001 144212 : mjit::Compiler::emitUncachedCall(uint32_t argc, bool callingNew)
4002 : {
4003 144212 : CallPatchInfo callPatch;
4004 :
4005 144212 : RegisterID r0 = Registers::ReturnReg;
4006 144212 : VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
4007 :
4008 144212 : frame.syncAndKill(Uses(argc + 2));
4009 144212 : prepareStubCall(Uses(argc + 2));
4010 144212 : masm.move(Imm32(argc), Registers::ArgReg1);
4011 144212 : INLINE_STUBCALL(stub, REJOIN_CALL_PROLOGUE);
4012 :
4013 144212 : Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
4014 :
4015 144212 : masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
4016 144212 : callPatch.hasFastNcode = true;
4017 : callPatch.fastNcodePatch =
4018 : masm.storePtrWithPatch(ImmPtr(NULL),
4019 144212 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
4020 :
4021 144212 : masm.jump(r0);
4022 144212 : callPatch.joinPoint = masm.label();
4023 144212 : addReturnSite();
4024 :
4025 144212 : frame.popn(argc + 2);
4026 :
4027 144212 : frame.takeReg(JSReturnReg_Type);
4028 144212 : frame.takeReg(JSReturnReg_Data);
4029 144212 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, knownPushedType(0));
4030 :
4031 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
4032 : /* testUndefined = */ false,
4033 144212 : /* testReturn = */ true);
4034 :
4035 144212 : stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
4036 144212 : stubcc.rejoin(Changes(1));
4037 144212 : callPatches.append(callPatch);
4038 :
4039 144212 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
4040 144212 : }
4041 :
4042 : static bool
4043 295519 : IsLowerableFunCallOrApply(jsbytecode *pc)
4044 : {
4045 : #ifdef JS_MONOIC
4046 9184 : return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
4047 304703 : (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
4048 : #else
4049 : return false;
4050 : #endif
4051 : }
4052 :
4053 : void
4054 3102 : mjit::Compiler::checkCallApplySpeculation(uint32_t callImmArgc, uint32_t speculatedArgc,
4055 : FrameEntry *origCallee, FrameEntry *origThis,
4056 : MaybeRegisterID origCalleeType, RegisterID origCalleeData,
4057 : MaybeRegisterID origThisType, RegisterID origThisData,
4058 : Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
4059 : {
4060 3102 : JS_ASSERT(IsLowerableFunCallOrApply(PC));
4061 :
4062 : RegisterID temp;
4063 3102 : Registers tempRegs(Registers::AvailRegs);
4064 3102 : if (origCalleeType.isSet())
4065 1359 : tempRegs.takeReg(origCalleeType.reg());
4066 3102 : tempRegs.takeReg(origCalleeData);
4067 3102 : if (origThisType.isSet())
4068 1356 : tempRegs.takeReg(origThisType.reg());
4069 3102 : tempRegs.takeReg(origThisData);
4070 3102 : temp = tempRegs.takeAnyReg().reg();
4071 :
4072 : /*
4073 : * if (origCallee.isObject() &&
4074 : * origCallee.toObject().isFunction &&
4075 : * origCallee.toObject().toFunction() == js_fun_{call,apply})
4076 : */
4077 3102 : MaybeJump isObj;
4078 3102 : if (origCalleeType.isSet())
4079 1359 : isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
4080 3102 : Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData, temp);
4081 3102 : Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
4082 : Jump isNative = masm.branchPtr(Assembler::NotEqual,
4083 3102 : Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
4084 6204 : ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
4085 :
4086 : /*
4087 : * If speculation fails, we can't use the ic, since it is compiled on the
4088 : * assumption that speculation succeeds. Instead, just do an uncached call.
4089 : */
4090 : {
4091 3102 : if (isObj.isSet())
4092 1359 : stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
4093 3102 : stubcc.linkExitDirect(isFun, stubcc.masm.label());
4094 3102 : stubcc.linkExitDirect(isNative, stubcc.masm.label());
4095 :
4096 : int32_t frameDepthAdjust;
4097 3102 : if (applyTricks == LazyArgsObj) {
4098 252 : OOL_STUBCALL(stubs::Arguments, REJOIN_RESUME);
4099 252 : frameDepthAdjust = +1;
4100 : } else {
4101 2850 : frameDepthAdjust = 0;
4102 : }
4103 :
4104 3102 : stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
4105 3102 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
4106 3102 : OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::SlowCall),
4107 3102 : REJOIN_FALLTHROUGH, frame.totalDepth() + frameDepthAdjust);
4108 3102 : JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
4109 :
4110 : /*
4111 : * inlineCallHelper will link uncachedCallSlowRejoin to the join point
4112 : * at the end of the ic. At that join point, the return value of the
4113 : * call is assumed to be in registers, so load them before jumping.
4114 : */
4115 3102 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4116 3102 : Address rval = frame.addressOf(origCallee); /* vp[0] == rval */
4117 3102 : if (knownPushedType(0) == JSVAL_TYPE_DOUBLE)
4118 36 : stubcc.masm.ensureInMemoryDouble(rval);
4119 3102 : stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
4120 3102 : *uncachedCallSlowRejoin = stubcc.masm.jump();
4121 3102 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4122 : }
4123 :
4124 : /*
4125 : * For simplicity, we don't statically specialize calls to
4126 : * ic::SplatApplyArgs based on applyTricks. Rather, this state is
4127 : * communicated dynamically through the VMFrame.
4128 : */
4129 3102 : if (*PC == JSOP_FUNAPPLY) {
4130 : masm.store32(Imm32(applyTricks == LazyArgsObj),
4131 1666 : FrameAddress(VMFrame::offsetOfLazyArgsObj()));
4132 : }
4133 3102 : }
4134 :
4135 : /* This predicate must be called before the current op mutates the FrameState. */
4136 : bool
4137 2898 : mjit::Compiler::canUseApplyTricks()
4138 : {
4139 2898 : JS_ASSERT(*PC == JSOP_ARGUMENTS);
4140 2898 : jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
4141 : return *nextpc == JSOP_FUNAPPLY &&
4142 496 : IsLowerableFunCallOrApply(nextpc) &&
4143 496 : !analysis->jumpTarget(nextpc) &&
4144 489 : !debugMode() &&
4145 252 : !a->parent &&
4146 4631 : bytecodeInChunk(nextpc);
4147 : }
4148 :
4149 : /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
4150 : bool
4151 292224 : mjit::Compiler::inlineCallHelper(uint32_t callImmArgc, bool callingNew, FrameSize &callFrameSize)
4152 : {
4153 : int32_t speculatedArgc;
4154 292224 : if (applyTricks == LazyArgsObj) {
4155 252 : frame.pop();
4156 252 : speculatedArgc = 1;
4157 : } else {
4158 : /*
4159 : * Check for interrupts on function call. We don't do this for lazy
4160 : * arguments objects as the interrupt may kick this frame into the
4161 : * interpreter, which doesn't know about the apply tricks. Instead, we
4162 : * do the interrupt check at the start of the JSOP_ARGUMENTS.
4163 : */
4164 291972 : interruptCheckHelper();
4165 :
4166 291972 : speculatedArgc = callImmArgc;
4167 : }
4168 :
4169 292224 : FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
4170 292224 : FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
4171 :
4172 : /*
4173 : * 'this' does not need to be synced for constructing. :FIXME: is it
4174 : * possible that one of the arguments is directly copying the 'this'
4175 : * entry (something like 'new x.f(x)')?
4176 : */
4177 292224 : if (callingNew) {
4178 21167 : frame.discardFe(origThis);
4179 :
4180 : /*
4181 : * If inference is enabled, the 'this' value of the pushed frame always
4182 : * needs to be coherent. If a GC gets triggered before the callee can
4183 : * fill in the slot (i.e. the GC happens on constructing the 'new'
4184 : * object or the call object for a heavyweight callee), it needs to be
4185 : * able to read the 'this' value to tell whether newScript constraints
4186 : * will need to be regenerated afterwards.
4187 : */
4188 21167 : if (cx->typeInferenceEnabled())
4189 8778 : masm.storeValue(NullValue(), frame.addressOf(origThis));
4190 : }
4191 :
4192 292224 : if (!cx->typeInferenceEnabled()) {
4193 172437 : CompileStatus status = callArrayBuiltin(callImmArgc, callingNew);
4194 172437 : if (status != Compile_InlineAbort)
4195 303 : return (status == Compile_Okay);
4196 : }
4197 :
4198 : /*
4199 : * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
4200 : * going to call js_fun_{call,apply}. Normally, this call would go through
4201 : * js::Invoke to ultimately call 'this'. We can do much better by having
4202 : * the callIC cache and call 'this' directly. However, if it turns out that
4203 : * we are not actually calling js_fun_call, the callIC must act as normal.
4204 : *
4205 : * Note: do *NOT* use type information or inline state in any way when
4206 : * deciding whether to lower a CALL or APPLY. The stub calls here store
4207 : * their return values in a different slot, so when recompiling we need
4208 : * to go down the exact same path.
4209 : */
4210 291921 : bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
4211 :
4212 291921 : bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
4213 :
4214 : #ifdef JS_MONOIC
4215 291921 : if (debugMode() || newType) {
4216 : #endif
4217 144212 : if (applyTricks == LazyArgsObj) {
4218 : /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
4219 0 : jsop_arguments(REJOIN_RESUME);
4220 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4221 : }
4222 144212 : emitUncachedCall(callImmArgc, callingNew);
4223 144212 : applyTricks = NoApplyTricks;
4224 144212 : return true;
4225 : #ifdef JS_MONOIC
4226 : }
4227 :
4228 147709 : frame.forgetMismatchedObject(origCallee);
4229 147709 : if (lowerFunCallOrApply)
4230 3102 : frame.forgetMismatchedObject(origThis);
4231 :
4232 : /* Initialized by both branches below. */
4233 147709 : CallGenInfo callIC;
4234 147709 : CallPatchInfo callPatch;
4235 147709 : MaybeRegisterID icCalleeType; /* type to test for function-ness */
4236 : RegisterID icCalleeData; /* data to call */
4237 147709 : Address icRvalAddr; /* return slot on slow-path rejoin */
4238 :
4239 : /*
4240 : * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
4241 : * following labels (as used in finishThisUp):
4242 : * - funGuard -> hotJump
4243 : * - funGuard -> joinPoint
4244 : * - funGuard -> hotPathLabel
4245 : * - slowPathStart -> oolCall
4246 : * - slowPathStart -> oolJump
4247 : * - slowPathStart -> icCall
4248 : * - slowPathStart -> slowJoinPoint
4249 : * Because the call ICs are fairly long (compared to PICs), we don't reserve the space in each
4250 : * path until the first usage of funGuard (for the in-line path) or slowPathStart (for the
4251 : * out-of-line path).
4252 : */
4253 :
4254 : /* Initialized only on lowerFunCallOrApply branch. */
4255 147709 : Jump uncachedCallSlowRejoin;
4256 147709 : CallPatchInfo uncachedCallPatch;
4257 :
4258 : {
4259 147709 : MaybeRegisterID origCalleeType, maybeOrigCalleeData;
4260 : RegisterID origCalleeData;
4261 :
4262 : /* Get the callee in registers. */
4263 147709 : frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
4264 147709 : origCalleeData = maybeOrigCalleeData.reg();
4265 295418 : PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
4266 :
4267 147709 : if (lowerFunCallOrApply) {
4268 3102 : MaybeRegisterID origThisType, maybeOrigThisData;
4269 : RegisterID origThisData;
4270 : {
4271 : /* Get thisv in registers. */
4272 3102 : frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
4273 3102 : origThisData = maybeOrigThisData.reg();
4274 6204 : PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
4275 :
4276 : /* Leaves pinned regs untouched. */
4277 3102 : frame.syncAndKill(Uses(speculatedArgc + 2));
4278 : }
4279 :
4280 : checkCallApplySpeculation(callImmArgc, speculatedArgc,
4281 : origCallee, origThis,
4282 : origCalleeType, origCalleeData,
4283 : origThisType, origThisData,
4284 3102 : &uncachedCallSlowRejoin, &uncachedCallPatch);
4285 :
4286 3102 : icCalleeType = origThisType;
4287 3102 : icCalleeData = origThisData;
4288 3102 : icRvalAddr = frame.addressOf(origThis);
4289 :
4290 : /*
4291 : * For f.call(), since we compile the ic under the (checked)
4292 : * assumption that call == js_fun_call, we still have a static
4293 : * frame size. For f.apply(), the frame size depends on the dynamic
4294 : * length of the array passed to apply.
4295 : */
4296 3102 : if (*PC == JSOP_FUNCALL)
4297 1436 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc - 1);
4298 : else
4299 1666 : callIC.frameSize.initDynamic();
4300 : } else {
4301 : /* Leaves pinned regs untouched. */
4302 144607 : frame.syncAndKill(Uses(speculatedArgc + 2));
4303 :
4304 144607 : icCalleeType = origCalleeType;
4305 144607 : icCalleeData = origCalleeData;
4306 144607 : icRvalAddr = frame.addressOf(origCallee);
4307 144607 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc);
4308 : }
4309 : }
4310 :
4311 147709 : callFrameSize = callIC.frameSize;
4312 :
4313 147709 : callIC.typeMonitored = monitored(PC) || hasTypeBarriers(PC);
4314 :
4315 : /* Test the type if necessary. Failing this always takes a really slow path. */
4316 147709 : MaybeJump notObjectJump;
4317 147709 : if (icCalleeType.isSet())
4318 111846 : notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
4319 :
4320 : /*
4321 : * For an optimized apply, keep icCalleeData in a callee-saved register for
4322 : * the subsequent ic::SplatApplyArgs call.
4323 : */
4324 147709 : Registers tempRegs(Registers::AvailRegs);
4325 147709 : if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
4326 1362 : RegisterID x = tempRegs.takeAnyReg(Registers::SavedRegs).reg();
4327 1362 : masm.move(icCalleeData, x);
4328 1362 : icCalleeData = x;
4329 : } else {
4330 146347 : tempRegs.takeReg(icCalleeData);
4331 : }
4332 :
4333 : /* Reserve space just before initialization of funGuard. */
4334 : RESERVE_IC_SPACE(masm);
4335 :
4336 : /*
4337 : * Guard on the callee identity. This misses on the first run. If the
4338 : * callee is scripted, compiled/compilable, and argc == nargs, then this
4339 : * guard is patched, and the compiled code address is baked in.
4340 : */
4341 147709 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
4342 147709 : callIC.funJump = j;
4343 :
4344 : /* Reserve space just before initialization of slowPathStart. */
4345 : RESERVE_OOL_SPACE(stubcc.masm);
4346 :
4347 147709 : Jump rejoin1, rejoin2;
4348 : {
4349 : RESERVE_OOL_SPACE(stubcc.masm);
4350 147709 : stubcc.linkExitDirect(j, stubcc.masm.label());
4351 147709 : callIC.slowPathStart = stubcc.masm.label();
4352 :
4353 147709 : RegisterID tmp = tempRegs.takeAnyReg().reg();
4354 :
4355 : /*
4356 : * Test if the callee is even a function. If this doesn't match, we
4357 : * take a _really_ slow path later.
4358 : */
4359 147709 : Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData, tmp);
4360 :
4361 : /* Test if the function is scripted. */
4362 147709 : stubcc.masm.load16(Address(icCalleeData, offsetof(JSFunction, flags)), tmp);
4363 147709 : stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
4364 147709 : Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
4365 147709 : tempRegs.putReg(tmp);
4366 :
4367 : /*
4368 : * N.B. After this call, the frame will have a dynamic frame size.
4369 : * Check after the function is known not to be a native so that the
4370 : * catch-all/native path has a static depth.
4371 : */
4372 147709 : if (callIC.frameSize.isDynamic())
4373 1666 : OOL_STUBCALL(ic::SplatApplyArgs, REJOIN_CALL_SPLAT);
4374 :
4375 : /*
4376 : * No-op jump that gets patched by ic::New/Call to the stub generated
4377 : * by generateFullCallStub.
4378 : */
4379 147709 : Jump toPatch = stubcc.masm.jump();
4380 147709 : toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
4381 147709 : callIC.oolJump = toPatch;
4382 147709 : callIC.icCall = stubcc.masm.label();
4383 :
4384 147709 : RejoinState rejoinState = callIC.frameSize.rejoinState(PC, false);
4385 :
4386 : /*
4387 : * At this point the function is definitely scripted, so we try to
4388 : * compile it and patch either funGuard/funJump or oolJump. This code
4389 : * is only executed once.
4390 : */
4391 147709 : callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4392 147709 : void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
4393 147709 : if (callIC.frameSize.isStatic()) {
4394 146043 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, frame.totalDepth());
4395 : } else {
4396 1666 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, -1);
4397 : }
4398 :
4399 147709 : callIC.funObjReg = icCalleeData;
4400 :
4401 : /*
4402 : * The IC call either returns NULL, meaning call completed, or a
4403 : * function pointer to jump to.
4404 : */
4405 : rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4406 147709 : Registers::ReturnReg);
4407 147709 : if (callIC.frameSize.isStatic())
4408 146043 : stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
4409 : else
4410 1666 : stubcc.masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
4411 147709 : stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
4412 147709 : callPatch.hasSlowNcode = true;
4413 : callPatch.slowNcodePatch =
4414 : stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
4415 147709 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
4416 147709 : stubcc.masm.jump(Registers::ReturnReg);
4417 :
4418 :
4419 :
4420 : /*
4421 : * This ool path is the catch-all for everything but scripted function
4422 : * callees. For native functions, ic::NativeNew/NativeCall will repatch
4423 : * funGaurd/funJump with a fast call stub. All other cases
4424 : * (non-function callable objects and invalid callees) take the slow
4425 : * path through js::Invoke.
4426 : */
4427 147709 : if (notObjectJump.isSet())
4428 111846 : stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
4429 147709 : notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
4430 147709 : isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
4431 :
4432 147709 : callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4433 147709 : OOL_STUBCALL(callingNew ? ic::NativeNew : ic::NativeCall, rejoinState);
4434 :
4435 147709 : rejoin2 = stubcc.masm.jump();
4436 : }
4437 :
4438 : /*
4439 : * If the call site goes to a closure over the same function, it will
4440 : * generate an out-of-line stub that joins back here.
4441 : */
4442 147709 : callIC.hotPathLabel = masm.label();
4443 :
4444 147709 : uint32_t flags = 0;
4445 147709 : if (callingNew)
4446 10692 : flags |= StackFrame::CONSTRUCTING;
4447 :
4448 147709 : InlineFrameAssembler inlFrame(masm, callIC, flags);
4449 147709 : callPatch.hasFastNcode = true;
4450 147709 : callPatch.fastNcodePatch = inlFrame.assemble(NULL, PC);
4451 :
4452 147709 : callIC.hotJump = masm.jump();
4453 147709 : callIC.joinPoint = callPatch.joinPoint = masm.label();
4454 147709 : callIC.callIndex = callSites.length();
4455 147709 : addReturnSite();
4456 147709 : if (lowerFunCallOrApply)
4457 3102 : uncachedCallPatch.joinPoint = callIC.joinPoint;
4458 :
4459 : /*
4460 : * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
4461 : * in the in-line path so we can check the IC space now.
4462 : */
4463 : CHECK_IC_SPACE();
4464 :
4465 147709 : JSValueType type = knownPushedType(0);
4466 :
4467 147709 : frame.popn(speculatedArgc + 2);
4468 147709 : frame.takeReg(JSReturnReg_Type);
4469 147709 : frame.takeReg(JSReturnReg_Data);
4470 147709 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, type);
4471 :
4472 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
4473 : /* testUndefined = */ false,
4474 147709 : /* testReturn = */ true);
4475 :
4476 : /*
4477 : * Now that the frame state is set, generate the rejoin path. Note that, if
4478 : * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
4479 : * value has been placed at vp[1] which is not the stack address associated
4480 : * with frame.peek(-1).
4481 : */
4482 147709 : callIC.slowJoinPoint = stubcc.masm.label();
4483 147709 : rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4484 147709 : rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4485 147709 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4486 147709 : frame.reloadEntry(stubcc.masm, icRvalAddr, frame.peek(-1));
4487 147709 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
4488 147709 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4489 :
4490 : CHECK_OOL_SPACE();
4491 :
4492 147709 : if (lowerFunCallOrApply)
4493 3102 : stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
4494 :
4495 147709 : callICs.append(callIC);
4496 147709 : callPatches.append(callPatch);
4497 147709 : if (lowerFunCallOrApply)
4498 3102 : callPatches.append(uncachedCallPatch);
4499 :
4500 147709 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
4501 :
4502 147709 : applyTricks = NoApplyTricks;
4503 147709 : return true;
4504 : #endif
4505 : }
4506 :
4507 : CompileStatus
4508 172437 : mjit::Compiler::callArrayBuiltin(uint32_t argc, bool callingNew)
4509 : {
4510 172437 : if (!globalObj)
4511 121310 : return Compile_InlineAbort;
4512 :
4513 51127 : if (applyTricks == LazyArgsObj)
4514 34 : return Compile_InlineAbort;
4515 :
4516 51093 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4517 51093 : if (origCallee->isNotType(JSVAL_TYPE_OBJECT))
4518 10 : return Compile_InlineAbort;
4519 :
4520 51083 : if (frame.extra(origCallee).name != cx->runtime->atomState.classAtoms[JSProto_Array])
4521 50740 : return Compile_InlineAbort;
4522 :
4523 : JSObject *arrayObj;
4524 343 : if (!js_GetClassObject(cx, globalObj, JSProto_Array, &arrayObj))
4525 0 : return Compile_Error;
4526 :
4527 343 : JSObject *arrayProto = globalObj->global().getOrCreateArrayPrototype(cx);
4528 343 : if (!arrayProto)
4529 0 : return Compile_Error;
4530 :
4531 343 : if (argc > 1)
4532 38 : return Compile_InlineAbort;
4533 305 : FrameEntry *origArg = (argc == 1) ? frame.peek(-1) : NULL;
4534 305 : if (origArg) {
4535 174 : if (origArg->isNotType(JSVAL_TYPE_INT32))
4536 2 : return Compile_InlineAbort;
4537 172 : if (origArg->isConstant() && origArg->getValue().toInt32() < 0)
4538 0 : return Compile_InlineAbort;
4539 : }
4540 :
4541 303 : if (!origCallee->isTypeKnown()) {
4542 303 : Jump notObject = frame.testObject(Assembler::NotEqual, origCallee);
4543 303 : stubcc.linkExit(notObject, Uses(argc + 2));
4544 : }
4545 :
4546 303 : RegisterID reg = frame.tempRegForData(origCallee);
4547 303 : Jump notArray = masm.branchPtr(Assembler::NotEqual, reg, ImmPtr(arrayObj));
4548 303 : stubcc.linkExit(notArray, Uses(argc + 2));
4549 :
4550 303 : int32_t knownSize = 0;
4551 303 : MaybeRegisterID sizeReg;
4552 303 : if (origArg) {
4553 172 : if (origArg->isConstant()) {
4554 103 : knownSize = origArg->getValue().toInt32();
4555 : } else {
4556 69 : if (!origArg->isTypeKnown()) {
4557 67 : Jump notInt = frame.testInt32(Assembler::NotEqual, origArg);
4558 67 : stubcc.linkExit(notInt, Uses(argc + 2));
4559 : }
4560 69 : sizeReg = frame.tempRegForData(origArg);
4561 69 : Jump belowZero = masm.branch32(Assembler::LessThan, sizeReg.reg(), Imm32(0));
4562 69 : stubcc.linkExit(belowZero, Uses(argc + 2));
4563 : }
4564 : } else {
4565 131 : knownSize = 0;
4566 : }
4567 :
4568 303 : stubcc.leave();
4569 303 : stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
4570 303 : OOL_STUBCALL(callingNew ? stubs::SlowNew : stubs::SlowCall, REJOIN_FALLTHROUGH);
4571 :
4572 : {
4573 606 : PinRegAcrossSyncAndKill p1(frame, sizeReg);
4574 303 : frame.popn(argc + 2);
4575 303 : frame.syncAndKill(Uses(0));
4576 : }
4577 :
4578 303 : prepareStubCall(Uses(0));
4579 303 : masm.storePtr(ImmPtr(arrayProto), FrameAddress(offsetof(VMFrame, scratch)));
4580 303 : if (sizeReg.isSet())
4581 69 : masm.move(sizeReg.reg(), Registers::ArgReg1);
4582 : else
4583 234 : masm.move(Imm32(knownSize), Registers::ArgReg1);
4584 303 : INLINE_STUBCALL(stubs::NewDenseUnallocatedArray, REJOIN_PUSH_OBJECT);
4585 :
4586 303 : frame.takeReg(Registers::ReturnReg);
4587 303 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4588 303 : frame.forgetType(frame.peek(-1));
4589 :
4590 303 : stubcc.rejoin(Changes(1));
4591 :
4592 303 : return Compile_Okay;
4593 : }
4594 :
4595 : /* Maximum number of calls we will inline at the same site. */
4596 : static const uint32_t INLINE_SITE_LIMIT = 5;
4597 :
4598 : CompileStatus
4599 59429 : mjit::Compiler::inlineScriptedFunction(uint32_t argc, bool callingNew)
4600 : {
4601 59429 : JS_ASSERT(inlining());
4602 :
4603 : /* We already know which frames we are inlining at each PC, so scan the list of inline frames. */
4604 59429 : bool calleeMultipleReturns = false;
4605 118858 : Vector<JSScript *> inlineCallees(CompilerAllocPolicy(cx, *this));
4606 277649 : for (unsigned i = 0; i < ssa.numFrames(); i++) {
4607 218220 : if (ssa.iterFrame(i).parent == a->inlineIndex && ssa.iterFrame(i).parentpc == PC) {
4608 2884 : JSScript *script = ssa.iterFrame(i).script;
4609 2884 : inlineCallees.append(script);
4610 2884 : if (script->analysis()->numReturnSites() > 1)
4611 186 : calleeMultipleReturns = true;
4612 : }
4613 : }
4614 :
4615 59429 : if (inlineCallees.empty())
4616 56709 : return Compile_InlineAbort;
4617 :
4618 2720 : JS_ASSERT(!monitored(PC));
4619 :
4620 : /*
4621 : * Remove all dead entries from the frame's tracker. We will not recognize
4622 : * them as dead after pushing the new frame.
4623 : */
4624 2720 : frame.pruneDeadEntries();
4625 :
4626 2720 : RegisterAllocation *exitState = NULL;
4627 2720 : if (inlineCallees.length() > 1 || calleeMultipleReturns) {
4628 : /*
4629 : * Multiple paths through the callees, get a register allocation for
4630 : * the various incoming edges.
4631 : */
4632 337 : exitState = frame.computeAllocation(PC + JSOP_CALL_LENGTH);
4633 : }
4634 :
4635 : /*
4636 : * If this is a polymorphic callsite, get a register for the callee too.
4637 : * After this, do not touch the register state in the current frame until
4638 : * stubs for all callees have been generated.
4639 : */
4640 2720 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4641 2720 : FrameEntry *entrySnapshot = NULL;
4642 2720 : MaybeRegisterID calleeReg;
4643 2720 : if (inlineCallees.length() > 1) {
4644 151 : frame.forgetMismatchedObject(origCallee);
4645 151 : calleeReg = frame.tempRegForData(origCallee);
4646 :
4647 151 : entrySnapshot = frame.snapshotState();
4648 151 : if (!entrySnapshot)
4649 0 : return Compile_Error;
4650 : }
4651 2720 : MaybeJump calleePrevious;
4652 :
4653 2720 : JSValueType returnType = knownPushedType(0);
4654 :
4655 2720 : bool needReturnValue = JSOP_POP != (JSOp)*(PC + JSOP_CALL_LENGTH);
4656 2720 : bool syncReturnValue = needReturnValue && returnType == JSVAL_TYPE_UNKNOWN;
4657 :
4658 : /* Track register state after the call. */
4659 2720 : bool returnSet = false;
4660 2720 : AnyRegisterID returnRegister;
4661 2720 : const FrameEntry *returnEntry = NULL;
4662 :
4663 5440 : Vector<Jump, 4, CompilerAllocPolicy> returnJumps(CompilerAllocPolicy(cx, *this));
4664 :
4665 5604 : for (unsigned i = 0; i < inlineCallees.length(); i++) {
4666 2884 : if (entrySnapshot)
4667 315 : frame.restoreFromSnapshot(entrySnapshot);
4668 :
4669 2884 : JSScript *script = inlineCallees[i];
4670 : CompileStatus status;
4671 :
4672 2884 : status = pushActiveFrame(script, argc);
4673 2884 : if (status != Compile_Okay)
4674 0 : return status;
4675 :
4676 2884 : a->exitState = exitState;
4677 :
4678 : JaegerSpew(JSpew_Inlining, "inlining call to script (file \"%s\") (line \"%d\")\n",
4679 2884 : script->filename, script->lineno);
4680 :
4681 2884 : if (calleePrevious.isSet()) {
4682 164 : calleePrevious.get().linkTo(masm.label(), &masm);
4683 164 : calleePrevious = MaybeJump();
4684 : }
4685 :
4686 2884 : if (i + 1 != inlineCallees.length()) {
4687 : /* Guard on the callee, except when this object must be the callee. */
4688 164 : JS_ASSERT(calleeReg.isSet());
4689 164 : calleePrevious = masm.branchPtr(Assembler::NotEqual, calleeReg.reg(), ImmPtr(script->function()));
4690 : }
4691 :
4692 2884 : a->returnJumps = &returnJumps;
4693 2884 : a->needReturnValue = needReturnValue;
4694 2884 : a->syncReturnValue = syncReturnValue;
4695 2884 : a->returnValueDouble = returnType == JSVAL_TYPE_DOUBLE;
4696 2884 : if (returnSet) {
4697 87 : a->returnSet = true;
4698 87 : a->returnRegister = returnRegister;
4699 : }
4700 :
4701 : /*
4702 : * Update the argument frame entries in place if the callee has had an
4703 : * argument inferred as double but we are passing an int.
4704 : */
4705 2884 : ensureDoubleArguments();
4706 :
4707 2884 : markUndefinedLocals();
4708 :
4709 2884 : status = generateMethod();
4710 2884 : if (status != Compile_Okay) {
4711 0 : popActiveFrame();
4712 0 : if (status == Compile_Abort) {
4713 : /* The callee is uncompileable, mark it as uninlineable and retry. */
4714 0 : script->uninlineable = true;
4715 0 : types::MarkTypeObjectFlags(cx, script->function(),
4716 0 : types::OBJECT_FLAG_UNINLINEABLE);
4717 0 : return Compile_Retry;
4718 : }
4719 0 : return status;
4720 : }
4721 :
4722 2884 : if (needReturnValue && !returnSet) {
4723 2217 : if (a->returnSet) {
4724 2026 : returnSet = true;
4725 2026 : returnRegister = a->returnRegister;
4726 : } else {
4727 191 : returnEntry = a->returnEntry;
4728 : }
4729 : }
4730 :
4731 2884 : popActiveFrame();
4732 :
4733 2884 : if (i + 1 != inlineCallees.length())
4734 164 : returnJumps.append(masm.jump());
4735 : }
4736 :
4737 3153 : for (unsigned i = 0; i < returnJumps.length(); i++)
4738 433 : returnJumps[i].linkTo(masm.label(), &masm);
4739 :
4740 2720 : frame.popn(argc + 2);
4741 :
4742 2720 : if (entrySnapshot)
4743 151 : cx->array_delete(entrySnapshot);
4744 :
4745 2720 : if (exitState)
4746 337 : frame.discardForJoin(exitState, analysis->getCode(PC).stackDepth - (argc + 2));
4747 :
4748 2720 : if (returnSet) {
4749 2026 : frame.takeReg(returnRegister);
4750 2026 : if (returnRegister.isReg())
4751 1967 : frame.pushTypedPayload(returnType, returnRegister.reg());
4752 : else
4753 59 : frame.pushDouble(returnRegister.fpreg());
4754 694 : } else if (returnEntry) {
4755 18 : frame.pushCopyOf((FrameEntry *) returnEntry);
4756 : } else {
4757 676 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4758 : }
4759 :
4760 : JaegerSpew(JSpew_Inlining, "finished inlining call to script (file \"%s\") (line \"%d\")\n",
4761 2720 : script->filename, script->lineno);
4762 :
4763 2720 : return Compile_Okay;
4764 : }
4765 :
4766 : /*
4767 : * This function must be called immediately after any instruction which could
4768 : * cause a new StackFrame to be pushed and could lead to a new debug trap
4769 : * being set. This includes any API callbacks and any scripted or native call.
4770 : */
4771 : void
4772 5068304 : mjit::Compiler::addCallSite(const InternalCallSite &site)
4773 : {
4774 5068304 : callSites.append(site);
4775 5068304 : }
4776 :
4777 : void
4778 1101913 : mjit::Compiler::inlineStubCall(void *stub, RejoinState rejoin, Uses uses)
4779 : {
4780 1101913 : DataLabelPtr inlinePatch;
4781 1101913 : Call cl = emitStubCall(stub, &inlinePatch);
4782 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
4783 1101913 : rejoin, false);
4784 1101913 : site.inlinePatch = inlinePatch;
4785 1101913 : if (loop && loop->generatingInvariants()) {
4786 1230 : Jump j = masm.jump();
4787 1230 : Label l = masm.label();
4788 1230 : loop->addInvariantCall(j, l, false, false, callSites.length(), uses);
4789 : }
4790 1101913 : addCallSite(site);
4791 1101913 : }
4792 :
4793 : bool
4794 249 : mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
4795 : {
4796 249 : JS_ASSERT(lhs.isPrimitive());
4797 249 : JS_ASSERT(rhs.isPrimitive());
4798 :
4799 249 : if (lhs.isString() && rhs.isString()) {
4800 : int32_t cmp;
4801 90 : CompareStrings(cx, lhs.toString(), rhs.toString(), &cmp);
4802 90 : switch (op) {
4803 : case JSOP_LT:
4804 0 : return cmp < 0;
4805 : case JSOP_LE:
4806 0 : return cmp <= 0;
4807 : case JSOP_GT:
4808 0 : return cmp > 0;
4809 : case JSOP_GE:
4810 0 : return cmp >= 0;
4811 : case JSOP_EQ:
4812 59 : return cmp == 0;
4813 : case JSOP_NE:
4814 31 : return cmp != 0;
4815 : default:
4816 0 : JS_NOT_REACHED("NYI");
4817 : }
4818 : } else {
4819 : double ld, rd;
4820 :
4821 : /* These should be infallible w/ primitives. */
4822 159 : JS_ALWAYS_TRUE(ToNumber(cx, lhs, &ld));
4823 159 : JS_ALWAYS_TRUE(ToNumber(cx, rhs, &rd));
4824 159 : switch(op) {
4825 : case JSOP_LT:
4826 36 : return ld < rd;
4827 : case JSOP_LE:
4828 15 : return ld <= rd;
4829 : case JSOP_GT:
4830 44 : return ld > rd;
4831 : case JSOP_GE:
4832 12 : return ld >= rd;
4833 : case JSOP_EQ: /* fall through */
4834 : case JSOP_NE:
4835 : /* Special case null/undefined/void comparisons. */
4836 52 : if (lhs.isNullOrUndefined()) {
4837 4 : if (rhs.isNullOrUndefined())
4838 0 : return op == JSOP_EQ;
4839 4 : return op == JSOP_NE;
4840 : }
4841 48 : if (rhs.isNullOrUndefined())
4842 16 : return op == JSOP_NE;
4843 :
4844 : /* Normal return. */
4845 32 : return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
4846 : default:
4847 0 : JS_NOT_REACHED("NYI");
4848 : }
4849 : }
4850 :
4851 : JS_NOT_REACHED("NYI");
4852 : return false;
4853 : }
4854 :
4855 : bool
4856 266 : mjit::Compiler::constantFoldBranch(jsbytecode *target, bool taken)
4857 : {
4858 266 : if (taken) {
4859 171 : if (!frame.syncForBranch(target, Uses(0)))
4860 0 : return false;
4861 171 : Jump j = masm.jump();
4862 171 : if (!jumpAndRun(j, target))
4863 0 : return false;
4864 : } else {
4865 : /*
4866 : * Branch is never taken, but clean up any loop
4867 : * if this is a backedge.
4868 : */
4869 95 : if (target < PC && !finishLoop(target))
4870 0 : return false;
4871 : }
4872 266 : return true;
4873 : }
4874 :
4875 : bool
4876 3046 : mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
4877 : {
4878 3046 : if (target)
4879 803 : frame.syncAndKillEverything();
4880 : else
4881 2243 : frame.syncAndKill(Uses(2));
4882 :
4883 3046 : prepareStubCall(Uses(2));
4884 3046 : INLINE_STUBCALL(stub, target ? REJOIN_BRANCH : REJOIN_PUSH_BOOLEAN);
4885 3046 : frame.popn(2);
4886 :
4887 3046 : if (!target) {
4888 2243 : frame.takeReg(Registers::ReturnReg);
4889 2243 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4890 2243 : return true;
4891 : }
4892 :
4893 803 : JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
4894 : Jump j = masm.branchTest32(GetStubCompareCondition(fused), Registers::ReturnReg,
4895 803 : Registers::ReturnReg);
4896 803 : return jumpAndRun(j, target);
4897 : }
4898 :
4899 : void
4900 220 : mjit::Compiler::jsop_setprop_slow(PropertyName *name)
4901 : {
4902 220 : prepareStubCall(Uses(2));
4903 220 : masm.move(ImmPtr(name), Registers::ArgReg1);
4904 220 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
4905 : JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
4906 220 : frame.shimmy(1);
4907 220 : if (script->pcCounters)
4908 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
4909 220 : }
4910 :
4911 : void
4912 7184 : mjit::Compiler::jsop_getprop_slow(PropertyName *name, bool forPrototype)
4913 : {
4914 : /* See ::jsop_getprop */
4915 7184 : RejoinState rejoin = forPrototype ? REJOIN_THIS_PROTOTYPE : REJOIN_GETTER;
4916 :
4917 7184 : prepareStubCall(Uses(1));
4918 7184 : masm.move(ImmPtr(name), Registers::ArgReg1);
4919 7184 : INLINE_STUBCALL(forPrototype ? stubs::GetPropNoCache : stubs::GetProp, rejoin);
4920 :
4921 7184 : if (!forPrototype)
4922 7184 : testPushedType(rejoin, -1, /* ool = */ false);
4923 :
4924 7184 : frame.pop();
4925 7184 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4926 :
4927 7184 : if (script->pcCounters)
4928 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
4929 7184 : }
4930 :
4931 : #ifdef JS_MONOIC
4932 : void
4933 408907 : mjit::Compiler::passMICAddress(GlobalNameICInfo &ic)
4934 : {
4935 408907 : ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4936 408907 : }
4937 : #endif
4938 :
4939 : #if defined JS_POLYIC
4940 : void
4941 854639 : mjit::Compiler::passICAddress(BaseICInfo *ic)
4942 : {
4943 854639 : ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4944 854639 : }
4945 :
4946 : bool
4947 509034 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType,
4948 : bool doTypeCheck, bool forPrototype)
4949 : {
4950 509034 : FrameEntry *top = frame.peek(-1);
4951 :
4952 : /*
4953 : * Use a different rejoin for GETPROP computing the 'this' object, as we
4954 : * can't use the current bytecode within InternalInterpret to tell this is
4955 : * fetching the 'this' value.
4956 : */
4957 509034 : RejoinState rejoin = REJOIN_GETTER;
4958 509034 : if (forPrototype) {
4959 1614 : JS_ASSERT(top->isType(JSVAL_TYPE_OBJECT) &&
4960 1614 : name == cx->runtime->atomState.classPrototypeAtom);
4961 1614 : rejoin = REJOIN_THIS_PROTOTYPE;
4962 : }
4963 :
4964 : /* Handle length accesses on known strings without using a PIC. */
4965 538675 : if (name == cx->runtime->atomState.lengthAtom &&
4966 29080 : top->isType(JSVAL_TYPE_STRING) &&
4967 561 : (!cx->typeInferenceEnabled() || knownPushedType(0) == JSVAL_TYPE_INT32)) {
4968 205 : if (top->isConstant()) {
4969 2 : JSString *str = top->getValue().toString();
4970 : Value v;
4971 2 : v.setNumber(uint32_t(str->length()));
4972 2 : frame.pop();
4973 2 : frame.push(v);
4974 : } else {
4975 203 : RegisterID str = frame.ownRegForData(top);
4976 203 : masm.loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), str);
4977 203 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), str);
4978 203 : frame.pop();
4979 203 : frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
4980 : }
4981 205 : return true;
4982 : }
4983 :
4984 562072 : if (top->mightBeType(JSVAL_TYPE_OBJECT) &&
4985 28506 : JSOp(*PC) == JSOP_LENGTH && cx->typeInferenceEnabled() &&
4986 24737 : !hasTypeBarriers(PC) && knownPushedType(0) == JSVAL_TYPE_INT32) {
4987 : /* Check if this is an array we can make a loop invariant entry for. */
4988 7868 : if (loop && loop->generatingInvariants()) {
4989 235 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
4990 235 : FrameEntry *fe = loop->invariantLength(topv);
4991 235 : if (fe) {
4992 170 : frame.learnType(fe, JSVAL_TYPE_INT32, false);
4993 170 : frame.pop();
4994 170 : frame.pushCopyOf(fe);
4995 170 : if (script->pcCounters)
4996 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
4997 170 : return true;
4998 : }
4999 : }
5000 :
5001 7698 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
5002 :
5003 : /*
5004 : * Check if we are accessing the 'length' property of a known dense array.
5005 : * Note that if the types are known to indicate dense arrays, their lengths
5006 : * must fit in an int32.
5007 : */
5008 7698 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
5009 7172 : bool isObject = top->isTypeKnown();
5010 7172 : if (!isObject) {
5011 372 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5012 372 : stubcc.linkExit(notObject, Uses(1));
5013 372 : stubcc.leave();
5014 372 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5015 372 : OOL_STUBCALL(stubs::GetProp, rejoin);
5016 372 : if (rejoin == REJOIN_GETTER)
5017 372 : testPushedType(rejoin, -1);
5018 : }
5019 7172 : RegisterID result = frame.allocReg();
5020 7172 : RegisterID reg = frame.tempRegForData(top);
5021 7172 : frame.pop();
5022 7172 : masm.loadPtr(Address(reg, JSObject::offsetOfElements()), result);
5023 7172 : masm.load32(Address(result, ObjectElements::offsetOfLength()), result);
5024 7172 : frame.pushTypedPayload(JSVAL_TYPE_INT32, result);
5025 7172 : if (script->pcCounters)
5026 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5027 7172 : if (!isObject)
5028 372 : stubcc.rejoin(Changes(1));
5029 7172 : return true;
5030 : }
5031 :
5032 : /*
5033 : * Check if we're accessing the 'length' property of a typed array.
5034 : * The typed array length always fits in an int32.
5035 : */
5036 526 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY)) {
5037 320 : bool isObject = top->isTypeKnown();
5038 320 : if (!isObject) {
5039 176 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5040 176 : stubcc.linkExit(notObject, Uses(1));
5041 176 : stubcc.leave();
5042 176 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5043 176 : OOL_STUBCALL(stubs::GetProp, rejoin);
5044 176 : if (rejoin == REJOIN_GETTER)
5045 176 : testPushedType(rejoin, -1);
5046 : }
5047 320 : RegisterID reg = frame.copyDataIntoReg(top);
5048 320 : frame.pop();
5049 320 : masm.loadPayload(Address(reg, TypedArray::lengthOffset()), reg);
5050 320 : frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
5051 320 : if (script->pcCounters)
5052 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5053 320 : if (!isObject)
5054 176 : stubcc.rejoin(Changes(1));
5055 320 : return true;
5056 : }
5057 :
5058 : /*
5059 : * Check if we are accessing the 'length' of the lazy arguments for the
5060 : * current frame.
5061 : */
5062 206 : if (types->isLazyArguments(cx)) {
5063 0 : frame.pop();
5064 0 : frame.pushWord(Address(JSFrameReg, StackFrame::offsetOfNumActual()), JSVAL_TYPE_INT32);
5065 0 : if (script->pcCounters)
5066 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5067 0 : return true;
5068 : }
5069 : }
5070 :
5071 : /* If the access will definitely be fetching a particular value, nop it. */
5072 : bool testObject;
5073 : JSObject *singleton =
5074 501167 : (*PC == JSOP_GETPROP || *PC == JSOP_CALLPROP) ? pushedSingleton(0) : NULL;
5075 534904 : if (singleton && singleton->isFunction() && !hasTypeBarriers(PC) &&
5076 33737 : testSingletonPropertyTypes(top, ATOM_TO_JSID(name), &testObject)) {
5077 32198 : if (testObject) {
5078 1430 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5079 1430 : stubcc.linkExit(notObject, Uses(1));
5080 1430 : stubcc.leave();
5081 1430 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5082 1430 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
5083 1430 : testPushedType(REJOIN_FALLTHROUGH, -1);
5084 : }
5085 :
5086 32198 : frame.pop();
5087 32198 : frame.push(ObjectValue(*singleton));
5088 :
5089 32198 : if (script->pcCounters && cx->typeInferenceEnabled())
5090 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
5091 :
5092 32198 : if (testObject)
5093 1430 : stubcc.rejoin(Changes(1));
5094 :
5095 32198 : return true;
5096 : }
5097 :
5098 : /* Check if this is a property access we can make a loop invariant entry for. */
5099 468969 : if (loop && loop->generatingInvariants() && !hasTypeBarriers(PC)) {
5100 935 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
5101 935 : if (FrameEntry *fe = loop->invariantProperty(topv, ATOM_TO_JSID(name))) {
5102 82 : if (knownType != JSVAL_TYPE_UNKNOWN && knownType != JSVAL_TYPE_DOUBLE)
5103 82 : frame.learnType(fe, knownType, false);
5104 82 : frame.pop();
5105 82 : frame.pushCopyOf(fe);
5106 82 : if (script->pcCounters)
5107 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
5108 82 : return true;
5109 : }
5110 : }
5111 :
5112 : /* If the incoming type will never PIC, take slow path. */
5113 468887 : if (top->isNotType(JSVAL_TYPE_OBJECT)) {
5114 7184 : jsop_getprop_slow(name, forPrototype);
5115 7184 : return true;
5116 : }
5117 :
5118 461703 : frame.forgetMismatchedObject(top);
5119 :
5120 : /*
5121 : * Check if we are accessing a known type which always has the property
5122 : * in a particular inline slot. Get the property directly in this case,
5123 : * without using an IC.
5124 : */
5125 461703 : jsid id = ATOM_TO_JSID(name);
5126 461703 : types::TypeSet *types = frame.extra(top).types;
5127 611506 : if (types && !types->unknownObject() &&
5128 57595 : types->getObjectCount() == 1 &&
5129 32531 : types->getTypeObject(0) != NULL &&
5130 29861 : !types->getTypeObject(0)->unknownProperties() &&
5131 29816 : id == types::MakeTypeId(cx, id)) {
5132 29796 : JS_ASSERT(!forPrototype);
5133 29796 : types::TypeObject *object = types->getTypeObject(0);
5134 29796 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5135 29796 : if (!propertyTypes)
5136 0 : return false;
5137 38695 : if (propertyTypes->isDefiniteProperty() &&
5138 8899 : !propertyTypes->isOwnProperty(cx, object, true)) {
5139 8889 : types->addFreeze(cx);
5140 8889 : uint32_t slot = propertyTypes->definiteSlot();
5141 8889 : bool isObject = top->isTypeKnown();
5142 8889 : if (!isObject) {
5143 2875 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5144 2875 : stubcc.linkExit(notObject, Uses(1));
5145 2875 : stubcc.leave();
5146 2875 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5147 2875 : OOL_STUBCALL(stubs::GetProp, rejoin);
5148 2875 : if (rejoin == REJOIN_GETTER)
5149 2875 : testPushedType(rejoin, -1);
5150 : }
5151 8889 : RegisterID reg = frame.tempRegForData(top);
5152 8889 : frame.pop();
5153 :
5154 8889 : if (script->pcCounters)
5155 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5156 :
5157 8889 : Address address(reg, JSObject::getFixedSlotOffset(slot));
5158 8889 : BarrierState barrier = pushAddressMaybeBarrier(address, knownType, false);
5159 8889 : if (!isObject)
5160 2875 : stubcc.rejoin(Changes(1));
5161 8889 : finishBarrier(barrier, rejoin, 0);
5162 :
5163 8889 : return true;
5164 : }
5165 : }
5166 :
5167 : /* Check for a dynamic dispatch. */
5168 452814 : if (cx->typeInferenceEnabled()) {
5169 55639 : if (*PC == JSOP_CALLPROP && jsop_getprop_dispatch(name))
5170 6942 : return true;
5171 : }
5172 :
5173 445872 : if (script->pcCounters)
5174 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
5175 :
5176 : /*
5177 : * These two must be loaded first. The objReg because the string path
5178 : * wants to read it, and the shapeReg because it could cause a spill that
5179 : * the string path wouldn't sink back.
5180 : */
5181 445872 : RegisterID objReg = frame.copyDataIntoReg(top);
5182 445872 : RegisterID shapeReg = frame.allocReg();
5183 :
5184 : RESERVE_IC_SPACE(masm);
5185 :
5186 445872 : PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC));
5187 :
5188 : /*
5189 : * If this access has been on a shape with a getter hook, make preparations
5190 : * so that we can generate a stub to call the hook directly (rather than be
5191 : * forced to make a stub call). Sync the stack up front and kill all
5192 : * registers so that PIC stubs can contain calls, and always generate a
5193 : * type barrier if inference is enabled (known property types do not
5194 : * reflect properties with getter hooks).
5195 : */
5196 : pic.canCallHook = pic.forcedTypeBarrier =
5197 445872 : !forPrototype &&
5198 : JSOp(*PC) == JSOP_GETPROP &&
5199 445872 : analysis->getCode(PC).accessGetter;
5200 :
5201 : /* Guard that the type is an object. */
5202 445872 : Label typeCheck;
5203 445872 : if (doTypeCheck && !top->isTypeKnown()) {
5204 392148 : RegisterID reg = frame.tempRegForType(top);
5205 392148 : pic.typeReg = reg;
5206 :
5207 392148 : if (pic.canCallHook) {
5208 17418 : PinRegAcrossSyncAndKill p1(frame, reg);
5209 8709 : frame.syncAndKillEverything();
5210 : }
5211 :
5212 : /* Start the hot path where it's easy to patch it. */
5213 392148 : pic.fastPathStart = masm.label();
5214 392148 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5215 392148 : typeCheck = masm.label();
5216 392148 : RETURN_IF_OOM(false);
5217 :
5218 392148 : pic.typeCheck = stubcc.linkExit(j, Uses(1));
5219 392148 : pic.hasTypeCheck = true;
5220 : } else {
5221 53724 : if (pic.canCallHook)
5222 4580 : frame.syncAndKillEverything();
5223 :
5224 53724 : pic.fastPathStart = masm.label();
5225 53724 : pic.hasTypeCheck = false;
5226 53724 : pic.typeReg = Registers::ReturnReg;
5227 : }
5228 :
5229 445872 : pic.shapeReg = shapeReg;
5230 445872 : pic.name = name;
5231 :
5232 : /* Guard on shape. */
5233 445872 : masm.loadShape(objReg, shapeReg);
5234 445872 : pic.shapeGuard = masm.label();
5235 :
5236 445872 : DataLabelPtr inlineShapeLabel;
5237 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5238 445872 : inlineShapeLabel, ImmPtr(NULL));
5239 445872 : Label inlineShapeJump = masm.label();
5240 :
5241 : RESERVE_OOL_SPACE(stubcc.masm);
5242 445872 : pic.slowPathStart = stubcc.linkExit(j, Uses(1));
5243 :
5244 445872 : stubcc.leave();
5245 445872 : passICAddress(&pic);
5246 445872 : pic.slowPathCall = OOL_STUBCALL(forPrototype ? ic::GetPropNoCache : ic::GetProp, rejoin);
5247 : CHECK_OOL_SPACE();
5248 445872 : if (rejoin == REJOIN_GETTER)
5249 444258 : testPushedType(rejoin, -1);
5250 :
5251 : /* Load the base slot address. */
5252 445872 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5253 445872 : objReg);
5254 :
5255 : /* Copy the slot value to the expression stack. */
5256 445872 : Address slot(objReg, 1 << 24);
5257 445872 : frame.pop();
5258 :
5259 445872 : Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
5260 445872 : pic.fastPathRejoin = masm.label();
5261 :
5262 445872 : RETURN_IF_OOM(false);
5263 :
5264 : /* Initialize op labels. */
5265 445872 : GetPropLabels &labels = pic.getPropLabels();
5266 445872 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5267 445872 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeLabel);
5268 :
5269 445872 : labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
5270 445872 : if (pic.hasTypeCheck)
5271 392148 : labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
5272 445872 : labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
5273 :
5274 : CHECK_IC_SPACE();
5275 :
5276 445872 : pic.objReg = objReg;
5277 445872 : frame.pushRegs(shapeReg, objReg, knownType);
5278 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, false, false,
5279 445872 : /* force = */ pic.canCallHook);
5280 :
5281 445872 : stubcc.rejoin(Changes(1));
5282 445872 : pics.append(pic);
5283 :
5284 445872 : finishBarrier(barrier, rejoin, 0);
5285 445872 : return true;
5286 : }
5287 :
5288 : bool
5289 70110 : mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id)
5290 : {
5291 : /*
5292 : * We would like to completely no-op property/global accesses which can
5293 : * produce only a particular JSObject or undefined, provided we can
5294 : * determine the pushed value must not be undefined (or, if it could be
5295 : * undefined, a recompilation will be triggered).
5296 : *
5297 : * If the access definitely goes through obj, either directly or on the
5298 : * prototype chain, then if obj has a defined property now, and the
5299 : * property has a default or method shape, the only way it can produce
5300 : * undefined in the future is if it is deleted. Deletion causes type
5301 : * properties to be explicitly marked with undefined.
5302 : */
5303 :
5304 70110 : JSObject *nobj = obj;
5305 285694 : while (nobj) {
5306 145476 : if (!nobj->isNative())
5307 2 : return false;
5308 145474 : if (nobj->getClass()->ops.lookupGeneric)
5309 0 : return false;
5310 145474 : nobj = nobj->getProto();
5311 : }
5312 :
5313 : JSObject *holder;
5314 70108 : JSProperty *prop = NULL;
5315 70108 : if (!obj->lookupGeneric(cx, id, &holder, &prop))
5316 0 : return false;
5317 70108 : if (!prop)
5318 114 : return false;
5319 :
5320 69994 : Shape *shape = (Shape *) prop;
5321 69994 : if (shape->hasDefaultGetter()) {
5322 66726 : if (!shape->hasSlot())
5323 0 : return false;
5324 66726 : if (holder->getSlot(shape->slot()).isUndefined())
5325 0 : return false;
5326 3268 : } else if (!shape->isMethod()) {
5327 4 : return false;
5328 : }
5329 :
5330 69990 : return true;
5331 : }
5332 :
5333 : bool
5334 33737 : mjit::Compiler::testSingletonPropertyTypes(FrameEntry *top, jsid id, bool *testObject)
5335 : {
5336 33737 : *testObject = false;
5337 :
5338 33737 : types::TypeSet *types = frame.extra(top).types;
5339 33737 : if (!types || types->unknownObject())
5340 16 : return false;
5341 :
5342 33721 : JSObject *singleton = types->getSingleton(cx);
5343 33721 : if (singleton)
5344 6759 : return testSingletonProperty(singleton, id);
5345 :
5346 26962 : if (!globalObj)
5347 0 : return false;
5348 :
5349 : JSProtoKey key;
5350 26962 : JSValueType type = types->getKnownTypeTag(cx);
5351 26962 : switch (type) {
5352 : case JSVAL_TYPE_STRING:
5353 8673 : key = JSProto_String;
5354 8673 : break;
5355 :
5356 : case JSVAL_TYPE_INT32:
5357 : case JSVAL_TYPE_DOUBLE:
5358 6 : key = JSProto_Number;
5359 6 : break;
5360 :
5361 : case JSVAL_TYPE_BOOLEAN:
5362 0 : key = JSProto_Boolean;
5363 0 : break;
5364 :
5365 : case JSVAL_TYPE_OBJECT:
5366 : case JSVAL_TYPE_UNKNOWN:
5367 18283 : if (types->getObjectCount() == 1 && !top->isNotType(JSVAL_TYPE_OBJECT)) {
5368 16786 : JS_ASSERT_IF(top->isTypeKnown(), top->isType(JSVAL_TYPE_OBJECT));
5369 16786 : types::TypeObject *object = types->getTypeObject(0);
5370 16786 : if (object && object->proto) {
5371 16786 : if (!testSingletonProperty(object->proto, id))
5372 22 : return false;
5373 16764 : types->addFreeze(cx);
5374 :
5375 : /* If we don't know this is an object, we will need a test. */
5376 16764 : *testObject = (type != JSVAL_TYPE_OBJECT) && !top->isTypeKnown();
5377 16764 : return true;
5378 : }
5379 : }
5380 1497 : return false;
5381 :
5382 : default:
5383 0 : return false;
5384 : }
5385 :
5386 : JSObject *proto;
5387 8679 : if (!js_GetClassPrototype(cx, globalObj, key, &proto, NULL))
5388 0 : return NULL;
5389 :
5390 8679 : return testSingletonProperty(proto, id);
5391 : }
5392 :
5393 : bool
5394 23655 : mjit::Compiler::jsop_getprop_dispatch(PropertyName *name)
5395 : {
5396 : /*
5397 : * Check for a CALLPROP which is a dynamic dispatch: every value it can
5398 : * push is a singleton, and the pushed value is determined by the type of
5399 : * the object being accessed. Return true if the CALLPROP has been fully
5400 : * processed, false if no code was generated.
5401 : */
5402 23655 : FrameEntry *top = frame.peek(-1);
5403 23655 : if (top->isNotType(JSVAL_TYPE_OBJECT))
5404 0 : return false;
5405 :
5406 23655 : jsid id = ATOM_TO_JSID(name);
5407 23655 : if (id != types::MakeTypeId(cx, id))
5408 0 : return false;
5409 :
5410 23655 : types::TypeSet *pushedTypes = pushedTypeSet(0);
5411 23655 : if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
5412 988 : return false;
5413 :
5414 : /* Check every pushed value is a singleton. */
5415 25992 : for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
5416 4513 : if (pushedTypes->getTypeObject(i) != NULL)
5417 1188 : return false;
5418 : }
5419 :
5420 21479 : types::TypeSet *objTypes = analysis->poppedTypes(PC, 0);
5421 21479 : if (objTypes->unknownObject() || objTypes->getObjectCount() == 0)
5422 12871 : return false;
5423 :
5424 8608 : pushedTypes->addFreeze(cx);
5425 :
5426 : /* Map each type in the object to the resulting pushed value. */
5427 17216 : Vector<JSObject *> results(CompilerAllocPolicy(cx, *this));
5428 :
5429 : /*
5430 : * For each type of the base object, check it has no 'own' property for the
5431 : * accessed id and that its prototype does have such a property.
5432 : */
5433 8608 : uint32_t last = 0;
5434 19734 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5435 12792 : if (objTypes->getSingleObject(i) != NULL)
5436 1017 : return false;
5437 11775 : types::TypeObject *object = objTypes->getTypeObject(i);
5438 11775 : if (!object) {
5439 1951 : results.append((JSObject *) NULL);
5440 1951 : continue;
5441 : }
5442 9824 : if (object->unknownProperties() || !object->proto)
5443 4 : return false;
5444 9820 : types::TypeSet *ownTypes = object->getProperty(cx, id, false);
5445 9820 : if (ownTypes->isOwnProperty(cx, object, false))
5446 31 : return false;
5447 :
5448 9789 : if (!testSingletonProperty(object->proto, id))
5449 90 : return false;
5450 :
5451 9699 : if (object->proto->getType(cx)->unknownProperties())
5452 4 : return false;
5453 9695 : types::TypeSet *protoTypes = object->proto->type()->getProperty(cx, id, false);
5454 9695 : if (!protoTypes)
5455 0 : return false;
5456 9695 : JSObject *singleton = protoTypes->getSingleton(cx);
5457 9695 : if (!singleton)
5458 520 : return false;
5459 :
5460 9175 : results.append(singleton);
5461 9175 : last = i;
5462 : }
5463 :
5464 6942 : if (oomInVector)
5465 0 : return false;
5466 :
5467 6942 : objTypes->addFreeze(cx);
5468 :
5469 : /* Done filtering, now generate code which dispatches on the type. */
5470 :
5471 6942 : frame.forgetMismatchedObject(top);
5472 :
5473 6942 : if (!top->isType(JSVAL_TYPE_OBJECT)) {
5474 1037 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5475 1037 : stubcc.linkExit(notObject, Uses(1));
5476 : }
5477 :
5478 6942 : RegisterID reg = frame.tempRegForData(top);
5479 6942 : frame.pinReg(reg);
5480 6942 : RegisterID pushreg = frame.allocReg();
5481 6942 : frame.unpinReg(reg);
5482 :
5483 6942 : Address typeAddress(reg, JSObject::offsetOfType());
5484 :
5485 13884 : Vector<Jump> rejoins(CompilerAllocPolicy(cx, *this));
5486 6942 : MaybeJump lastMiss;
5487 :
5488 10355 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5489 10355 : types::TypeObject *object = objTypes->getTypeObject(i);
5490 10355 : if (!object) {
5491 1403 : JS_ASSERT(results[i] == NULL);
5492 1403 : continue;
5493 : }
5494 8952 : if (lastMiss.isSet())
5495 2010 : lastMiss.get().linkTo(masm.label(), &masm);
5496 :
5497 : /*
5498 : * Check that the pushed result is actually in the known pushed types
5499 : * for the bytecode; this bytecode may have type barriers. Redirect to
5500 : * the stub to update said pushed types.
5501 : */
5502 8952 : if (!pushedTypes->hasType(types::Type::ObjectType(results[i]))) {
5503 6040 : JS_ASSERT(hasTypeBarriers(PC));
5504 6040 : if (i == last) {
5505 6001 : stubcc.linkExit(masm.jump(), Uses(1));
5506 6001 : break;
5507 : } else {
5508 39 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5509 39 : stubcc.linkExit(masm.jump(), Uses(1));
5510 39 : continue;
5511 : }
5512 : }
5513 :
5514 2912 : if (i == last) {
5515 941 : masm.move(ImmPtr(results[i]), pushreg);
5516 941 : break;
5517 : } else {
5518 1971 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5519 1971 : masm.move(ImmPtr(results[i]), pushreg);
5520 1971 : rejoins.append(masm.jump());
5521 : }
5522 : }
5523 :
5524 8913 : for (unsigned i = 0; i < rejoins.length(); i++)
5525 1971 : rejoins[i].linkTo(masm.label(), &masm);
5526 :
5527 6942 : stubcc.leave();
5528 6942 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5529 6942 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
5530 6942 : testPushedType(REJOIN_FALLTHROUGH, -1);
5531 :
5532 6942 : frame.pop();
5533 6942 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pushreg);
5534 :
5535 6942 : if (script->pcCounters)
5536 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5537 :
5538 6942 : stubcc.rejoin(Changes(2));
5539 6942 : return true;
5540 : }
5541 :
5542 : bool
5543 35732 : mjit::Compiler::jsop_setprop(PropertyName *name, bool popGuaranteed)
5544 : {
5545 35732 : FrameEntry *lhs = frame.peek(-2);
5546 35732 : FrameEntry *rhs = frame.peek(-1);
5547 :
5548 : /* If the incoming type will never PIC, take slow path. */
5549 35732 : if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
5550 176 : jsop_setprop_slow(name);
5551 176 : return true;
5552 : }
5553 :
5554 : /*
5555 : * If this is a SETNAME to a variable of a non-reentrant outer function,
5556 : * set the variable's slot directly for the active call object.
5557 : */
5558 35556 : if (cx->typeInferenceEnabled() && js_CodeSpec[*PC].format & JOF_NAME) {
5559 : ScriptAnalysis::NameAccess access =
5560 2283 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5561 2283 : if (access.nesting) {
5562 : /* Use a SavedReg so it isn't clobbered by the stub call. */
5563 510 : RegisterID nameReg = frame.allocReg(Registers::SavedRegs).reg();
5564 510 : Address address = frame.loadNameAddress(access, nameReg);
5565 :
5566 : #ifdef JSGC_INCREMENTAL_MJ
5567 : /* Write barrier. */
5568 510 : if (cx->compartment->needsBarrier()) {
5569 0 : stubcc.linkExit(masm.jump(), Uses(0));
5570 0 : stubcc.leave();
5571 :
5572 : /* sync() may have overwritten nameReg, so we reload its data. */
5573 0 : JS_ASSERT(address.base == nameReg);
5574 0 : stubcc.masm.move(ImmPtr(access.basePointer()), nameReg);
5575 0 : stubcc.masm.loadPtr(Address(nameReg), nameReg);
5576 0 : stubcc.masm.addPtr(Imm32(address.offset), nameReg, Registers::ArgReg1);
5577 :
5578 0 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
5579 0 : stubcc.rejoin(Changes(0));
5580 : }
5581 : #endif
5582 :
5583 510 : frame.storeTo(rhs, address, popGuaranteed);
5584 510 : frame.shimmy(1);
5585 510 : frame.freeReg(address.base);
5586 510 : return true;
5587 : }
5588 : }
5589 :
5590 : /*
5591 : * Set the property directly if we are accessing a known object which
5592 : * always has the property in a particular inline slot.
5593 : */
5594 35046 : jsid id = ATOM_TO_JSID(name);
5595 35046 : types::TypeSet *types = frame.extra(lhs).types;
5596 59908 : if (JSOp(*PC) == JSOP_SETPROP && id == types::MakeTypeId(cx, id) &&
5597 7976 : types && !types->unknownObject() &&
5598 7196 : types->getObjectCount() == 1 &&
5599 4981 : types->getTypeObject(0) != NULL &&
5600 4709 : !types->getTypeObject(0)->unknownProperties()) {
5601 4697 : types::TypeObject *object = types->getTypeObject(0);
5602 4697 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5603 4697 : if (!propertyTypes)
5604 0 : return false;
5605 6498 : if (propertyTypes->isDefiniteProperty() &&
5606 1801 : !propertyTypes->isOwnProperty(cx, object, true)) {
5607 1787 : types->addFreeze(cx);
5608 1787 : uint32_t slot = propertyTypes->definiteSlot();
5609 1787 : RegisterID reg = frame.tempRegForData(lhs);
5610 1787 : bool isObject = lhs->isTypeKnown();
5611 1787 : MaybeJump notObject;
5612 1787 : if (!isObject)
5613 384 : notObject = frame.testObject(Assembler::NotEqual, lhs);
5614 : #ifdef JSGC_INCREMENTAL_MJ
5615 1787 : frame.pinReg(reg);
5616 1787 : if (cx->compartment->needsBarrier() && propertyTypes->needsBarrier(cx)) {
5617 : /* Write barrier. */
5618 4 : Jump j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
5619 4 : stubcc.linkExit(j, Uses(0));
5620 4 : stubcc.leave();
5621 4 : stubcc.masm.addPtr(Imm32(JSObject::getFixedSlotOffset(slot)),
5622 4 : reg, Registers::ArgReg1);
5623 4 : OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
5624 4 : stubcc.rejoin(Changes(0));
5625 : }
5626 1787 : frame.unpinReg(reg);
5627 : #endif
5628 1787 : if (!isObject) {
5629 384 : stubcc.linkExit(notObject.get(), Uses(2));
5630 384 : stubcc.leave();
5631 384 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5632 384 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5633 : }
5634 1787 : frame.storeTo(rhs, Address(reg, JSObject::getFixedSlotOffset(slot)), popGuaranteed);
5635 1787 : frame.shimmy(1);
5636 1787 : if (!isObject)
5637 384 : stubcc.rejoin(Changes(1));
5638 1787 : if (script->pcCounters)
5639 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5640 1787 : return true;
5641 : }
5642 : }
5643 :
5644 33259 : if (script->pcCounters)
5645 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
5646 :
5647 33259 : JSOp op = JSOp(*PC);
5648 :
5649 : #ifdef JSGC_INCREMENTAL_MJ
5650 : /* Write barrier. We don't have type information for JSOP_SETNAME. */
5651 33295 : if (cx->compartment->needsBarrier() &&
5652 36 : (!types || op == JSOP_SETNAME || types->propertyNeedsBarrier(cx, id)))
5653 : {
5654 44 : jsop_setprop_slow(name);
5655 44 : return true;
5656 : }
5657 : #endif
5658 :
5659 : ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
5660 : ? ic::PICInfo::SETMETHOD
5661 33215 : : ic::PICInfo::SET;
5662 33215 : PICGenInfo pic(kind, op);
5663 33215 : pic.name = name;
5664 :
5665 33215 : if (monitored(PC)) {
5666 2487 : pic.typeMonitored = true;
5667 2487 : types::TypeSet *types = frame.extra(rhs).types;
5668 2487 : if (!types) {
5669 : /* Handle FORNAME and other compound opcodes. Yuck. */
5670 0 : types = types::TypeSet::make(cx, "unknownRHS");
5671 0 : if (!types)
5672 0 : return false;
5673 0 : types->addType(cx, types::Type::UnknownType());
5674 : }
5675 2487 : pic.rhsTypes = types;
5676 : } else {
5677 30728 : pic.typeMonitored = false;
5678 30728 : pic.rhsTypes = NULL;
5679 : }
5680 :
5681 : RESERVE_IC_SPACE(masm);
5682 : RESERVE_OOL_SPACE(stubcc.masm);
5683 :
5684 : /* Guard that the type is an object. */
5685 33215 : Jump typeCheck;
5686 33215 : if (!lhs->isTypeKnown()) {
5687 12045 : RegisterID reg = frame.tempRegForType(lhs);
5688 12045 : pic.typeReg = reg;
5689 :
5690 : /* Start the hot path where it's easy to patch it. */
5691 12045 : pic.fastPathStart = masm.label();
5692 12045 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5693 :
5694 12045 : pic.typeCheck = stubcc.linkExit(j, Uses(2));
5695 12045 : stubcc.leave();
5696 :
5697 12045 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5698 12045 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5699 :
5700 12045 : typeCheck = stubcc.masm.jump();
5701 12045 : pic.hasTypeCheck = true;
5702 : } else {
5703 21170 : pic.fastPathStart = masm.label();
5704 21170 : pic.hasTypeCheck = false;
5705 21170 : pic.typeReg = Registers::ReturnReg;
5706 : }
5707 :
5708 33215 : frame.forgetMismatchedObject(lhs);
5709 :
5710 : /* Get the object into a mutable register. */
5711 33215 : RegisterID objReg = frame.copyDataIntoReg(lhs);
5712 33215 : pic.objReg = objReg;
5713 :
5714 : /* Get info about the RHS and pin it. */
5715 : ValueRemat vr;
5716 33215 : frame.pinEntry(rhs, vr);
5717 33215 : pic.vr = vr;
5718 :
5719 33215 : RegisterID shapeReg = frame.allocReg();
5720 33215 : pic.shapeReg = shapeReg;
5721 :
5722 33215 : frame.unpinEntry(vr);
5723 :
5724 : /* Guard on shape. */
5725 33215 : masm.loadShape(objReg, shapeReg);
5726 33215 : pic.shapeGuard = masm.label();
5727 33215 : DataLabelPtr inlineShapeData;
5728 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5729 33215 : inlineShapeData, ImmPtr(NULL));
5730 33215 : Label afterInlineShapeJump = masm.label();
5731 :
5732 : /* Slow path. */
5733 : {
5734 33215 : pic.slowPathStart = stubcc.linkExit(j, Uses(2));
5735 :
5736 33215 : stubcc.leave();
5737 33215 : passICAddress(&pic);
5738 33215 : pic.slowPathCall = OOL_STUBCALL(ic::SetProp, REJOIN_FALLTHROUGH);
5739 : CHECK_OOL_SPACE();
5740 : }
5741 :
5742 : /* Load dslots. */
5743 33215 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5744 33215 : objReg);
5745 :
5746 : /* Store RHS into object slot. */
5747 33215 : Address slot(objReg, 1 << 24);
5748 33215 : DataLabel32 inlineValueStore = masm.storeValueWithAddressOffsetPatch(vr, slot);
5749 33215 : pic.fastPathRejoin = masm.label();
5750 :
5751 33215 : frame.freeReg(objReg);
5752 33215 : frame.freeReg(shapeReg);
5753 :
5754 : /* "Pop under", taking out object (LHS) and leaving RHS. */
5755 33215 : frame.shimmy(1);
5756 :
5757 : /* Finish slow path. */
5758 : {
5759 33215 : if (pic.hasTypeCheck)
5760 12045 : typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
5761 33215 : stubcc.rejoin(Changes(1));
5762 : }
5763 :
5764 33215 : RETURN_IF_OOM(false);
5765 :
5766 33215 : SetPropLabels &labels = pic.setPropLabels();
5767 33215 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData);
5768 33215 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5769 33215 : labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore);
5770 33215 : labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump);
5771 :
5772 33215 : pics.append(pic);
5773 33215 : return true;
5774 : }
5775 :
5776 : void
5777 368466 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type)
5778 : {
5779 : /*
5780 : * If this is a NAME for a variable of a non-reentrant outer function, get
5781 : * the variable's slot directly for the active call object. We always need
5782 : * to check for undefined, however.
5783 : */
5784 368466 : if (cx->typeInferenceEnabled()) {
5785 : ScriptAnalysis::NameAccess access =
5786 47049 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5787 47049 : if (access.nesting) {
5788 27917 : Address address = frame.loadNameAddress(access);
5789 27917 : JSValueType type = knownPushedType(0);
5790 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5791 27917 : /* testUndefined = */ true);
5792 27917 : finishBarrier(barrier, REJOIN_GETTER, 0);
5793 27917 : return;
5794 : }
5795 : }
5796 :
5797 340549 : PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC));
5798 :
5799 : RESERVE_IC_SPACE(masm);
5800 :
5801 340549 : pic.shapeReg = frame.allocReg();
5802 340549 : pic.objReg = frame.allocReg();
5803 340549 : pic.typeReg = Registers::ReturnReg;
5804 340549 : pic.name = name;
5805 340549 : pic.hasTypeCheck = false;
5806 340549 : pic.fastPathStart = masm.label();
5807 :
5808 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5809 340549 : pic.shapeGuard = masm.label();
5810 340549 : Jump inlineJump = masm.jump();
5811 : {
5812 : RESERVE_OOL_SPACE(stubcc.masm);
5813 340549 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5814 340549 : stubcc.leave();
5815 340549 : passICAddress(&pic);
5816 340549 : pic.slowPathCall = OOL_STUBCALL(ic::Name, REJOIN_GETTER);
5817 : CHECK_OOL_SPACE();
5818 340549 : testPushedType(REJOIN_GETTER, 0);
5819 : }
5820 340549 : pic.fastPathRejoin = masm.label();
5821 :
5822 : /* Initialize op labels. */
5823 340549 : ScopeNameLabels &labels = pic.scopeNameLabels();
5824 340549 : labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
5825 :
5826 : CHECK_IC_SPACE();
5827 :
5828 : /*
5829 : * We can't optimize away the PIC for the NAME access itself, but if we've
5830 : * only seen a single value pushed by this access, mark it as such and
5831 : * recompile if a different value becomes possible.
5832 : */
5833 340549 : JSObject *singleton = pushedSingleton(0);
5834 340549 : if (singleton) {
5835 5369 : frame.push(ObjectValue(*singleton));
5836 5369 : frame.freeReg(pic.shapeReg);
5837 5369 : frame.freeReg(pic.objReg);
5838 : } else {
5839 335180 : frame.pushRegs(pic.shapeReg, pic.objReg, type);
5840 : }
5841 340549 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5842 :
5843 340549 : stubcc.rejoin(Changes(1));
5844 :
5845 340549 : pics.append(pic);
5846 :
5847 340549 : finishBarrier(barrier, REJOIN_GETTER, 0);
5848 : }
5849 :
5850 : bool
5851 506 : mjit::Compiler::jsop_xname(PropertyName *name)
5852 : {
5853 : /*
5854 : * If this is a GETXPROP for a variable of a non-reentrant outer function,
5855 : * treat in the same way as a NAME.
5856 : */
5857 506 : if (cx->typeInferenceEnabled()) {
5858 : ScriptAnalysis::NameAccess access =
5859 258 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5860 258 : if (access.nesting) {
5861 116 : frame.pop();
5862 116 : Address address = frame.loadNameAddress(access);
5863 116 : JSValueType type = knownPushedType(0);
5864 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5865 116 : /* testUndefined = */ true);
5866 116 : finishBarrier(barrier, REJOIN_GETTER, 0);
5867 116 : return true;
5868 : }
5869 : }
5870 :
5871 390 : PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC));
5872 :
5873 390 : FrameEntry *fe = frame.peek(-1);
5874 390 : if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
5875 0 : return jsop_getprop(name, knownPushedType(0));
5876 : }
5877 :
5878 390 : if (!fe->isTypeKnown()) {
5879 0 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
5880 0 : stubcc.linkExit(notObject, Uses(1));
5881 : }
5882 :
5883 390 : frame.forgetMismatchedObject(fe);
5884 :
5885 : RESERVE_IC_SPACE(masm);
5886 :
5887 390 : pic.shapeReg = frame.allocReg();
5888 390 : pic.objReg = frame.copyDataIntoReg(fe);
5889 390 : pic.typeReg = Registers::ReturnReg;
5890 390 : pic.name = name;
5891 390 : pic.hasTypeCheck = false;
5892 390 : pic.fastPathStart = masm.label();
5893 :
5894 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5895 390 : pic.shapeGuard = masm.label();
5896 390 : Jump inlineJump = masm.jump();
5897 : {
5898 : RESERVE_OOL_SPACE(stubcc.masm);
5899 390 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(1));
5900 390 : stubcc.leave();
5901 390 : passICAddress(&pic);
5902 390 : pic.slowPathCall = OOL_STUBCALL(ic::XName, REJOIN_GETTER);
5903 : CHECK_OOL_SPACE();
5904 390 : testPushedType(REJOIN_GETTER, -1);
5905 : }
5906 :
5907 390 : pic.fastPathRejoin = masm.label();
5908 :
5909 390 : RETURN_IF_OOM(false);
5910 :
5911 : /* Initialize op labels. */
5912 390 : ScopeNameLabels &labels = pic.scopeNameLabels();
5913 390 : labels.setInlineJumpOffset(masm.differenceBetween(pic.fastPathStart, inlineJump));
5914 :
5915 : CHECK_IC_SPACE();
5916 :
5917 390 : frame.pop();
5918 390 : frame.pushRegs(pic.shapeReg, pic.objReg, knownPushedType(0));
5919 :
5920 390 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5921 :
5922 390 : stubcc.rejoin(Changes(1));
5923 :
5924 390 : pics.append(pic);
5925 :
5926 390 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
5927 390 : return true;
5928 : }
5929 :
5930 : void
5931 7676 : mjit::Compiler::jsop_bindname(PropertyName *name)
5932 : {
5933 : /*
5934 : * If this is a BINDNAME for a variable of a non-reentrant outer function,
5935 : * the object is definitely the outer function's active call object.
5936 : */
5937 7676 : if (cx->typeInferenceEnabled()) {
5938 : ScriptAnalysis::NameAccess access =
5939 2283 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5940 2283 : if (access.nesting) {
5941 510 : RegisterID reg = frame.allocReg();
5942 510 : JSObject **pobj = &access.nesting->activeCall;
5943 510 : masm.move(ImmPtr(pobj), reg);
5944 510 : masm.loadPtr(Address(reg), reg);
5945 510 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
5946 510 : return;
5947 : }
5948 : }
5949 :
5950 7166 : PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC));
5951 :
5952 : // This code does not check the frame flags to see if scopeChain has been
5953 : // set. Rather, it relies on the up-front analysis statically determining
5954 : // whether BINDNAME can be used, which reifies the scope chain at the
5955 : // prologue.
5956 7166 : JS_ASSERT(analysis->usesScopeChain());
5957 :
5958 7166 : pic.shapeReg = frame.allocReg();
5959 7166 : pic.objReg = frame.allocReg();
5960 7166 : pic.typeReg = Registers::ReturnReg;
5961 7166 : pic.name = name;
5962 7166 : pic.hasTypeCheck = false;
5963 :
5964 : RESERVE_IC_SPACE(masm);
5965 7166 : pic.fastPathStart = masm.label();
5966 :
5967 7166 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
5968 7166 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfShape()), pic.shapeReg);
5969 7166 : masm.loadPtr(Address(pic.shapeReg, Shape::offsetOfBase()), pic.shapeReg);
5970 7166 : Address parent(pic.shapeReg, BaseShape::offsetOfParent());
5971 :
5972 7166 : pic.shapeGuard = masm.label();
5973 7166 : Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(NULL));
5974 : {
5975 : RESERVE_OOL_SPACE(stubcc.masm);
5976 7166 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5977 7166 : stubcc.leave();
5978 7166 : passICAddress(&pic);
5979 7166 : pic.slowPathCall = OOL_STUBCALL(ic::BindName, REJOIN_FALLTHROUGH);
5980 : CHECK_OOL_SPACE();
5981 : }
5982 :
5983 7166 : pic.fastPathRejoin = masm.label();
5984 :
5985 : /* Initialize op labels. */
5986 7166 : BindNameLabels &labels = pic.bindNameLabels();
5987 7166 : labels.setInlineJump(masm, pic.shapeGuard, inlineJump);
5988 :
5989 7166 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
5990 7166 : frame.freeReg(pic.shapeReg);
5991 :
5992 7166 : stubcc.rejoin(Changes(1));
5993 :
5994 7166 : pics.append(pic);
5995 : }
5996 :
5997 : #else /* !JS_POLYIC */
5998 :
5999 : void
6000 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type, bool isCall)
6001 : {
6002 : prepareStubCall(Uses(0));
6003 : INLINE_STUBCALL(isCall ? stubs::CallName : stubs::Name, REJOIN_FALLTHROUGH);
6004 : testPushedType(REJOIN_FALLTHROUGH, 0, /* ool = */ false);
6005 : frame.pushSynced(type);
6006 : if (isCall)
6007 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6008 : }
6009 :
6010 : bool
6011 : mjit::Compiler::jsop_xname(PropertyName *name)
6012 : {
6013 : return jsop_getprop(name, knownPushedType(0), pushedTypeSet(0));
6014 : }
6015 :
6016 : bool
6017 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType, types::TypeSet *typeSet,
6018 : bool typecheck, bool forPrototype)
6019 : {
6020 : jsop_getprop_slow(name, forPrototype);
6021 : return true;
6022 : }
6023 :
6024 : bool
6025 : mjit::Compiler::jsop_setprop(PropertyName *name)
6026 : {
6027 : jsop_setprop_slow(name);
6028 : return true;
6029 : }
6030 :
6031 : void
6032 : mjit::Compiler::jsop_bindname(PropertyName *name)
6033 : {
6034 : RegisterID reg = frame.allocReg();
6035 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
6036 : masm.loadPtr(scopeChain, reg);
6037 :
6038 : Address address(reg, offsetof(JSObject, parent));
6039 :
6040 : Jump j = masm.branchPtr(Assembler::NotEqual, address, ImmPtr(0));
6041 :
6042 : stubcc.linkExit(j, Uses(0));
6043 : stubcc.leave();
6044 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
6045 : OOL_STUBCALL(stubs::BindName, REJOIN_FALLTHROUGH);
6046 :
6047 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
6048 :
6049 : stubcc.rejoin(Changes(1));
6050 : }
6051 : #endif
6052 :
6053 : void
6054 72204 : mjit::Compiler::jsop_this()
6055 : {
6056 72204 : frame.pushThis();
6057 :
6058 : /*
6059 : * In strict mode code, we don't wrap 'this'.
6060 : * In direct-call eval code, we wrapped 'this' before entering the eval.
6061 : * In global code, 'this' is always an object.
6062 : */
6063 72204 : if (script->function() && !script->strictModeCode) {
6064 52997 : FrameEntry *thisFe = frame.peek(-1);
6065 :
6066 52997 : if (!thisFe->isType(JSVAL_TYPE_OBJECT)) {
6067 : /*
6068 : * Watch out for an obscure case where we don't know we are pushing
6069 : * an object: the script has not yet had a 'this' value assigned,
6070 : * so no pushed 'this' type has been inferred. Don't mark the type
6071 : * as known in this case, preserving the invariant that compiler
6072 : * types reflect inferred types.
6073 : */
6074 37757 : if (cx->typeInferenceEnabled() && knownPushedType(0) != JSVAL_TYPE_OBJECT) {
6075 2348 : prepareStubCall(Uses(1));
6076 2348 : INLINE_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
6077 2348 : return;
6078 : }
6079 :
6080 35409 : JSValueType type = cx->typeInferenceEnabled()
6081 5883 : ? types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx)
6082 41292 : : JSVAL_TYPE_UNKNOWN;
6083 35409 : if (type != JSVAL_TYPE_OBJECT) {
6084 29548 : Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
6085 29548 : stubcc.linkExit(notObj, Uses(1));
6086 29548 : stubcc.leave();
6087 29548 : OOL_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
6088 29548 : stubcc.rejoin(Changes(1));
6089 : }
6090 :
6091 : // Now we know that |this| is an object.
6092 35409 : frame.pop();
6093 35409 : frame.learnThisIsObject(type != JSVAL_TYPE_OBJECT);
6094 35409 : frame.pushThis();
6095 : }
6096 :
6097 50649 : JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
6098 : }
6099 : }
6100 :
6101 : bool
6102 4405 : mjit::Compiler::iter(unsigned flags)
6103 : {
6104 4405 : FrameEntry *fe = frame.peek(-1);
6105 :
6106 : /*
6107 : * Stub the call if this is not a simple 'for in' loop or if the iterated
6108 : * value is known to not be an object.
6109 : */
6110 4405 : if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
6111 1287 : prepareStubCall(Uses(1));
6112 1287 : masm.move(Imm32(flags), Registers::ArgReg1);
6113 1287 : INLINE_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6114 1287 : frame.pop();
6115 1287 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6116 1287 : return true;
6117 : }
6118 :
6119 3118 : if (!fe->isTypeKnown()) {
6120 1865 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
6121 1865 : stubcc.linkExit(notObject, Uses(1));
6122 : }
6123 :
6124 3118 : frame.forgetMismatchedObject(fe);
6125 :
6126 3118 : RegisterID reg = frame.tempRegForData(fe);
6127 :
6128 3118 : frame.pinReg(reg);
6129 3118 : RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
6130 3118 : RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
6131 3118 : RegisterID T1 = frame.allocReg();
6132 3118 : RegisterID T2 = frame.allocReg();
6133 3118 : frame.unpinReg(reg);
6134 :
6135 : /* Fetch the most recent iterator. */
6136 3118 : masm.loadPtr(&script->compartment()->nativeIterCache.last, ioreg);
6137 :
6138 : /* Test for NULL. */
6139 3118 : Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
6140 3118 : stubcc.linkExit(nullIterator, Uses(1));
6141 :
6142 : /* Get NativeIterator from iter obj. */
6143 3118 : masm.loadObjPrivate(ioreg, nireg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6144 :
6145 : /* Test for active iterator. */
6146 3118 : Address flagsAddr(nireg, offsetof(NativeIterator, flags));
6147 3118 : masm.load32(flagsAddr, T1);
6148 : Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1,
6149 3118 : Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE));
6150 3118 : stubcc.linkExit(activeIterator, Uses(1));
6151 :
6152 : /* Compare shape of object with iterator. */
6153 3118 : masm.loadShape(reg, T1);
6154 3118 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6155 3118 : masm.loadPtr(Address(T2, 0), T2);
6156 3118 : Jump mismatchedObject = masm.branchPtr(Assembler::NotEqual, T1, T2);
6157 3118 : stubcc.linkExit(mismatchedObject, Uses(1));
6158 :
6159 : /* Compare shape of object's prototype with iterator. */
6160 3118 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6161 3118 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6162 3118 : masm.loadShape(T1, T1);
6163 3118 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6164 3118 : masm.loadPtr(Address(T2, sizeof(Shape *)), T2);
6165 3118 : Jump mismatchedProto = masm.branchPtr(Assembler::NotEqual, T1, T2);
6166 3118 : stubcc.linkExit(mismatchedProto, Uses(1));
6167 :
6168 : /*
6169 : * Compare object's prototype's prototype with NULL. The last native
6170 : * iterator will always have a prototype chain length of one
6171 : * (i.e. it must be a plain object), so we do not need to generate
6172 : * a loop here.
6173 : */
6174 3118 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6175 3118 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6176 3118 : masm.loadPtr(Address(T1, JSObject::offsetOfType()), T1);
6177 3118 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6178 3118 : Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
6179 3118 : stubcc.linkExit(overlongChain, Uses(1));
6180 :
6181 : #ifdef JSGC_INCREMENTAL_MJ
6182 : /*
6183 : * Write barrier for stores to the iterator. We only need to take a write
6184 : * barrier if NativeIterator::obj is actually going to change.
6185 : */
6186 3118 : if (cx->compartment->needsBarrier()) {
6187 : Jump j = masm.branchPtr(Assembler::NotEqual,
6188 10 : Address(nireg, offsetof(NativeIterator, obj)), reg);
6189 10 : stubcc.linkExit(j, Uses(1));
6190 : }
6191 : #endif
6192 :
6193 : /* Found a match with the most recent iterator. Hooray! */
6194 :
6195 : /* Mark iterator as active. */
6196 3118 : masm.storePtr(reg, Address(nireg, offsetof(NativeIterator, obj)));
6197 3118 : masm.load32(flagsAddr, T1);
6198 3118 : masm.or32(Imm32(JSITER_ACTIVE), T1);
6199 3118 : masm.store32(T1, flagsAddr);
6200 :
6201 : /* Chain onto the active iterator stack. */
6202 3118 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
6203 3118 : masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
6204 3118 : masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
6205 3118 : masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
6206 :
6207 3118 : frame.freeReg(nireg);
6208 3118 : frame.freeReg(T1);
6209 3118 : frame.freeReg(T2);
6210 :
6211 3118 : stubcc.leave();
6212 3118 : stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
6213 3118 : OOL_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6214 :
6215 : /* Push the iterator object. */
6216 3118 : frame.pop();
6217 3118 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
6218 :
6219 3118 : stubcc.rejoin(Changes(1));
6220 :
6221 3118 : return true;
6222 : }
6223 :
6224 : /*
6225 : * This big nasty function implements JSOP_ITERNEXT, which is used in the head
6226 : * of a for-in loop to put the next value on the stack.
6227 : */
6228 : void
6229 4645 : mjit::Compiler::iterNext(ptrdiff_t offset)
6230 : {
6231 4645 : FrameEntry *fe = frame.peek(-offset);
6232 4645 : RegisterID reg = frame.tempRegForData(fe);
6233 :
6234 : /* Is it worth trying to pin this longer? Prolly not. */
6235 4645 : frame.pinReg(reg);
6236 4645 : RegisterID T1 = frame.allocReg();
6237 4645 : frame.unpinReg(reg);
6238 :
6239 : /* Test clasp */
6240 4645 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6241 4645 : stubcc.linkExit(notFast, Uses(1));
6242 :
6243 : /* Get private from iter obj. */
6244 4645 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6245 :
6246 4645 : RegisterID T3 = frame.allocReg();
6247 4645 : RegisterID T4 = frame.allocReg();
6248 :
6249 : /* Test for a value iterator, which could come through an Iterator object. */
6250 4645 : masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
6251 4645 : notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
6252 4645 : stubcc.linkExit(notFast, Uses(1));
6253 :
6254 4645 : RegisterID T2 = frame.allocReg();
6255 :
6256 : /* Get cursor. */
6257 4645 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
6258 :
6259 : /* Get the next string in the iterator. */
6260 4645 : masm.loadPtr(T2, T3);
6261 :
6262 : /* It's safe to increase the cursor now. */
6263 4645 : masm.addPtr(Imm32(sizeof(JSString*)), T2, T4);
6264 4645 : masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
6265 :
6266 4645 : frame.freeReg(T4);
6267 4645 : frame.freeReg(T1);
6268 4645 : frame.freeReg(T2);
6269 :
6270 4645 : stubcc.leave();
6271 4645 : stubcc.masm.move(Imm32(offset), Registers::ArgReg1);
6272 4645 : OOL_STUBCALL(stubs::IterNext, REJOIN_FALLTHROUGH);
6273 :
6274 4645 : frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
6275 :
6276 : /* Join with the stub call. */
6277 4645 : stubcc.rejoin(Changes(1));
6278 4645 : }
6279 :
6280 : bool
6281 4286 : mjit::Compiler::iterMore(jsbytecode *target)
6282 : {
6283 4286 : if (!frame.syncForBranch(target, Uses(1)))
6284 0 : return false;
6285 :
6286 4286 : FrameEntry *fe = frame.peek(-1);
6287 4286 : RegisterID reg = frame.tempRegForData(fe);
6288 4286 : RegisterID tempreg = frame.allocReg();
6289 :
6290 : /* Test clasp */
6291 4286 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, tempreg, &IteratorClass);
6292 4286 : stubcc.linkExitForBranch(notFast);
6293 :
6294 : /* Get private from iter obj. */
6295 4286 : masm.loadObjPrivate(reg, reg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6296 :
6297 : /* Test that the iterator supports fast iteration. */
6298 : notFast = masm.branchTest32(Assembler::NonZero, Address(reg, offsetof(NativeIterator, flags)),
6299 4286 : Imm32(JSITER_FOREACH));
6300 4286 : stubcc.linkExitForBranch(notFast);
6301 :
6302 : /* Get props_cursor, test */
6303 4286 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_cursor)), tempreg);
6304 4286 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_end)), reg);
6305 :
6306 4286 : Jump jFast = masm.branchPtr(Assembler::LessThan, tempreg, reg);
6307 :
6308 4286 : stubcc.leave();
6309 4286 : OOL_STUBCALL(stubs::IterMore, REJOIN_BRANCH);
6310 : Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
6311 4286 : Registers::ReturnReg);
6312 :
6313 4286 : stubcc.rejoin(Changes(1));
6314 4286 : frame.freeReg(tempreg);
6315 :
6316 4286 : return jumpAndRun(jFast, target, &j);
6317 : }
6318 :
6319 : void
6320 4453 : mjit::Compiler::iterEnd()
6321 : {
6322 4453 : FrameEntry *fe= frame.peek(-1);
6323 4453 : RegisterID reg = frame.tempRegForData(fe);
6324 :
6325 4453 : frame.pinReg(reg);
6326 4453 : RegisterID T1 = frame.allocReg();
6327 4453 : frame.unpinReg(reg);
6328 :
6329 : /* Test clasp */
6330 4453 : Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6331 4453 : stubcc.linkExit(notIterator, Uses(1));
6332 :
6333 : /* Get private from iter obj. */
6334 4453 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6335 :
6336 4453 : RegisterID T2 = frame.allocReg();
6337 :
6338 : /* Load flags. */
6339 4453 : Address flagAddr(T1, offsetof(NativeIterator, flags));
6340 4453 : masm.loadPtr(flagAddr, T2);
6341 :
6342 : /* Test for a normal enumerate iterator. */
6343 4453 : Jump notEnumerate = masm.branchTest32(Assembler::Zero, T2, Imm32(JSITER_ENUMERATE));
6344 4453 : stubcc.linkExit(notEnumerate, Uses(1));
6345 :
6346 : /* Clear active bit. */
6347 4453 : masm.and32(Imm32(~JSITER_ACTIVE), T2);
6348 4453 : masm.storePtr(T2, flagAddr);
6349 :
6350 : /* Reset property cursor. */
6351 4453 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
6352 4453 : masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
6353 :
6354 : /* Advance enumerators list. */
6355 4453 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
6356 4453 : masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
6357 4453 : masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
6358 :
6359 4453 : frame.freeReg(T1);
6360 4453 : frame.freeReg(T2);
6361 :
6362 4453 : stubcc.leave();
6363 4453 : OOL_STUBCALL(stubs::EndIter, REJOIN_FALLTHROUGH);
6364 :
6365 4453 : frame.pop();
6366 :
6367 4453 : stubcc.rejoin(Changes(1));
6368 4453 : }
6369 :
6370 : void
6371 0 : mjit::Compiler::jsop_getgname_slow(uint32_t index)
6372 : {
6373 0 : prepareStubCall(Uses(0));
6374 0 : INLINE_STUBCALL(stubs::Name, REJOIN_GETTER);
6375 0 : testPushedType(REJOIN_GETTER, 0, /* ool = */ false);
6376 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6377 0 : }
6378 :
6379 : void
6380 451061 : mjit::Compiler::jsop_bindgname()
6381 : {
6382 451061 : if (globalObj) {
6383 451061 : frame.push(ObjectValue(*globalObj));
6384 451061 : return;
6385 : }
6386 :
6387 : /* :TODO: this is slower than it needs to be. */
6388 0 : prepareStubCall(Uses(0));
6389 0 : INLINE_STUBCALL(stubs::BindGlobalName, REJOIN_NONE);
6390 0 : frame.takeReg(Registers::ReturnReg);
6391 0 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
6392 : }
6393 :
6394 : void
6395 491935 : mjit::Compiler::jsop_getgname(uint32_t index)
6396 : {
6397 : /* Optimize undefined, NaN and Infinity. */
6398 491935 : PropertyName *name = script->getName(index);
6399 491935 : if (name == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
6400 3493 : frame.push(UndefinedValue());
6401 3493 : return;
6402 : }
6403 488442 : if (name == cx->runtime->atomState.NaNAtom) {
6404 980 : frame.push(cx->runtime->NaNValue);
6405 980 : return;
6406 : }
6407 487462 : if (name == cx->runtime->atomState.InfinityAtom) {
6408 643 : frame.push(cx->runtime->positiveInfinityValue);
6409 643 : return;
6410 : }
6411 :
6412 : /* Optimize singletons like Math for JSOP_CALLPROP. */
6413 486819 : JSObject *obj = pushedSingleton(0);
6414 486819 : if (obj && !hasTypeBarriers(PC) && testSingletonProperty(globalObj, ATOM_TO_JSID(name))) {
6415 28093 : frame.push(ObjectValue(*obj));
6416 28093 : return;
6417 : }
6418 :
6419 458726 : jsid id = ATOM_TO_JSID(name);
6420 458726 : JSValueType type = knownPushedType(0);
6421 558331 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6422 99605 : !globalObj->getType(cx)->unknownProperties()) {
6423 99589 : types::TypeSet *propertyTypes = globalObj->getType(cx)->getProperty(cx, id, false);
6424 99589 : if (!propertyTypes)
6425 0 : return;
6426 :
6427 : /*
6428 : * If we are accessing a defined global which is a normal data property
6429 : * then bake its address into the jitcode and guard against future
6430 : * reallocation of the global object's slots.
6431 : */
6432 99589 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6433 99589 : if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) {
6434 94308 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6435 175489 : if (!value->isUndefined() &&
6436 81181 : !propertyTypes->isOwnProperty(cx, globalObj->getType(cx), true)) {
6437 80580 : watchGlobalReallocation();
6438 80580 : RegisterID reg = frame.allocReg();
6439 80580 : masm.move(ImmPtr(value), reg);
6440 :
6441 80580 : BarrierState barrier = pushAddressMaybeBarrier(Address(reg), type, true);
6442 80580 : finishBarrier(barrier, REJOIN_GETTER, 0);
6443 80580 : return;
6444 : }
6445 : }
6446 : }
6447 :
6448 : #if defined JS_MONOIC
6449 378146 : jsop_bindgname();
6450 :
6451 378146 : FrameEntry *fe = frame.peek(-1);
6452 378146 : JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
6453 :
6454 378146 : GetGlobalNameICInfo ic;
6455 : RESERVE_IC_SPACE(masm);
6456 : RegisterID objReg;
6457 378146 : Jump shapeGuard;
6458 :
6459 378146 : ic.fastPathStart = masm.label();
6460 378146 : if (fe->isConstant()) {
6461 378146 : JSObject *obj = &fe->getValue().toObject();
6462 378146 : frame.pop();
6463 378146 : JS_ASSERT(obj->isNative());
6464 :
6465 378146 : objReg = frame.allocReg();
6466 :
6467 378146 : masm.loadPtrFromImm(obj->addressOfShape(), objReg);
6468 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, objReg,
6469 378146 : ic.shape, ImmPtr(NULL));
6470 378146 : masm.move(ImmPtr(obj), objReg);
6471 : } else {
6472 0 : objReg = frame.ownRegForData(fe);
6473 0 : frame.pop();
6474 0 : RegisterID reg = frame.allocReg();
6475 :
6476 0 : masm.loadShape(objReg, reg);
6477 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, reg,
6478 0 : ic.shape, ImmPtr(NULL));
6479 0 : frame.freeReg(reg);
6480 : }
6481 378146 : stubcc.linkExit(shapeGuard, Uses(0));
6482 :
6483 378146 : stubcc.leave();
6484 378146 : passMICAddress(ic);
6485 378146 : ic.slowPathCall = OOL_STUBCALL(ic::GetGlobalName, REJOIN_GETTER);
6486 :
6487 : CHECK_IC_SPACE();
6488 :
6489 378146 : testPushedType(REJOIN_GETTER, 0);
6490 :
6491 : /* Garbage value. */
6492 378146 : uint32_t slot = 1 << 24;
6493 :
6494 378146 : masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg);
6495 378146 : Address address(objReg, slot);
6496 :
6497 : /* Allocate any register other than objReg. */
6498 378146 : RegisterID treg = frame.allocReg();
6499 : /* After dreg is loaded, it's safe to clobber objReg. */
6500 378146 : RegisterID dreg = objReg;
6501 :
6502 378146 : ic.load = masm.loadValueWithAddressOffsetPatch(address, treg, dreg);
6503 :
6504 378146 : frame.pushRegs(treg, dreg, type);
6505 :
6506 : /*
6507 : * Note: no undefined check is needed for GNAME opcodes. These were not
6508 : * declared with 'var', so cannot be undefined without triggering an error
6509 : * or having been a pre-existing global whose value is undefined (which
6510 : * type inference will know about).
6511 : */
6512 378146 : BarrierState barrier = testBarrier(treg, dreg);
6513 :
6514 378146 : stubcc.rejoin(Changes(1));
6515 :
6516 378146 : getGlobalNames.append(ic);
6517 378146 : finishBarrier(barrier, REJOIN_GETTER, 0);
6518 : #else
6519 : jsop_getgname_slow(index);
6520 : #endif
6521 :
6522 : }
6523 :
6524 : void
6525 37 : mjit::Compiler::jsop_setgname_slow(PropertyName *name)
6526 : {
6527 37 : prepareStubCall(Uses(2));
6528 37 : masm.move(ImmPtr(name), Registers::ArgReg1);
6529 37 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalName), REJOIN_FALLTHROUGH);
6530 37 : frame.popn(2);
6531 37 : pushSyncedEntry(0);
6532 37 : }
6533 :
6534 : void
6535 72901 : mjit::Compiler::jsop_setgname(PropertyName *name, bool popGuaranteed)
6536 : {
6537 72901 : if (monitored(PC)) {
6538 : /* Global accesses are monitored only for a few names like __proto__. */
6539 0 : jsop_setgname_slow(name);
6540 0 : return;
6541 : }
6542 :
6543 72901 : jsid id = ATOM_TO_JSID(name);
6544 117515 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6545 44614 : !globalObj->getType(cx)->unknownProperties()) {
6546 : /*
6547 : * Note: object branding is disabled when inference is enabled. With
6548 : * branding there is no way to ensure that a non-function property
6549 : * can't get a function later and cause the global object to become
6550 : * branded, requiring a shape change if it changes again.
6551 : */
6552 44604 : types::TypeSet *types = globalObj->getType(cx)->getProperty(cx, id, false);
6553 44604 : if (!types)
6554 0 : return;
6555 44604 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6556 171035 : if (shape && !shape->isMethod() && shape->hasDefaultSetter() &&
6557 84312 : shape->writable() && shape->hasSlot() &&
6558 42119 : !types->isOwnProperty(cx, globalObj->getType(cx), true)) {
6559 42103 : watchGlobalReallocation();
6560 42103 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6561 42103 : RegisterID reg = frame.allocReg();
6562 : #ifdef JSGC_INCREMENTAL_MJ
6563 : /* Write barrier. */
6564 42103 : if (cx->compartment->needsBarrier() && types->needsBarrier(cx)) {
6565 65 : stubcc.linkExit(masm.jump(), Uses(0));
6566 65 : stubcc.leave();
6567 65 : stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
6568 65 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
6569 65 : stubcc.rejoin(Changes(0));
6570 : }
6571 : #endif
6572 42103 : masm.move(ImmPtr(value), reg);
6573 42103 : frame.storeTo(frame.peek(-1), Address(reg), popGuaranteed);
6574 42103 : frame.shimmy(1);
6575 42103 : frame.freeReg(reg);
6576 42103 : return;
6577 : }
6578 : }
6579 :
6580 : #ifdef JSGC_INCREMENTAL_MJ
6581 : /* Write barrier. */
6582 30798 : if (cx->compartment->needsBarrier()) {
6583 37 : jsop_setgname_slow(name);
6584 37 : return;
6585 : }
6586 : #endif
6587 :
6588 : #if defined JS_MONOIC
6589 30761 : FrameEntry *objFe = frame.peek(-2);
6590 30761 : FrameEntry *fe = frame.peek(-1);
6591 30761 : JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
6592 :
6593 30761 : if (!fe->isConstant() && fe->isType(JSVAL_TYPE_DOUBLE))
6594 2 : frame.forgetKnownDouble(fe);
6595 :
6596 30761 : SetGlobalNameICInfo ic;
6597 :
6598 30761 : frame.pinEntry(fe, ic.vr);
6599 30761 : Jump shapeGuard;
6600 :
6601 : RESERVE_IC_SPACE(masm);
6602 :
6603 30761 : ic.fastPathStart = masm.label();
6604 30761 : if (objFe->isConstant()) {
6605 30737 : JSObject *obj = &objFe->getValue().toObject();
6606 30737 : JS_ASSERT(obj->isNative());
6607 :
6608 30737 : ic.objReg = frame.allocReg();
6609 30737 : ic.shapeReg = ic.objReg;
6610 30737 : ic.objConst = true;
6611 :
6612 30737 : masm.loadPtrFromImm(obj->addressOfShape(), ic.shapeReg);
6613 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6614 30737 : ic.shape, ImmPtr(NULL));
6615 30737 : masm.move(ImmPtr(obj), ic.objReg);
6616 : } else {
6617 24 : ic.objReg = frame.copyDataIntoReg(objFe);
6618 24 : ic.shapeReg = frame.allocReg();
6619 24 : ic.objConst = false;
6620 :
6621 24 : masm.loadShape(ic.objReg, ic.shapeReg);
6622 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6623 24 : ic.shape, ImmPtr(NULL));
6624 24 : frame.freeReg(ic.shapeReg);
6625 : }
6626 30761 : ic.shapeGuardJump = shapeGuard;
6627 30761 : ic.slowPathStart = stubcc.linkExit(shapeGuard, Uses(2));
6628 :
6629 30761 : stubcc.leave();
6630 30761 : passMICAddress(ic);
6631 30761 : ic.slowPathCall = OOL_STUBCALL(ic::SetGlobalName, REJOIN_FALLTHROUGH);
6632 :
6633 : /* Garbage value. */
6634 30761 : uint32_t slot = 1 << 24;
6635 :
6636 30761 : masm.loadPtr(Address(ic.objReg, JSObject::offsetOfSlots()), ic.objReg);
6637 30761 : Address address(ic.objReg, slot);
6638 :
6639 30761 : if (ic.vr.isConstant()) {
6640 6269 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.value(), address);
6641 24492 : } else if (ic.vr.isTypeKnown()) {
6642 15280 : ic.store = masm.storeValueWithAddressOffsetPatch(ImmType(ic.vr.knownType()),
6643 30560 : ic.vr.dataReg(), address);
6644 : } else {
6645 9212 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.typeReg(), ic.vr.dataReg(), address);
6646 : }
6647 :
6648 30761 : frame.freeReg(ic.objReg);
6649 30761 : frame.unpinEntry(ic.vr);
6650 30761 : frame.shimmy(1);
6651 :
6652 30761 : stubcc.rejoin(Changes(1));
6653 :
6654 30761 : ic.fastPathRejoin = masm.label();
6655 30761 : setGlobalNames.append(ic);
6656 : #else
6657 : jsop_setgname_slow(name);
6658 : #endif
6659 : }
6660 :
6661 : void
6662 197707 : mjit::Compiler::jsop_setelem_slow()
6663 : {
6664 197707 : prepareStubCall(Uses(3));
6665 197707 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem), REJOIN_FALLTHROUGH);
6666 197707 : frame.popn(3);
6667 197707 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6668 197707 : }
6669 :
6670 : void
6671 206208 : mjit::Compiler::jsop_getelem_slow()
6672 : {
6673 206208 : prepareStubCall(Uses(2));
6674 206208 : INLINE_STUBCALL(stubs::GetElem, REJOIN_FALLTHROUGH);
6675 206208 : testPushedType(REJOIN_FALLTHROUGH, -2, /* ool = */ false);
6676 206208 : frame.popn(2);
6677 206208 : pushSyncedEntry(0);
6678 206208 : }
6679 :
6680 : bool
6681 2412 : mjit::Compiler::jsop_instanceof()
6682 : {
6683 2412 : FrameEntry *lhs = frame.peek(-2);
6684 2412 : FrameEntry *rhs = frame.peek(-1);
6685 :
6686 : // The fast path applies only when both operands are objects.
6687 2412 : if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
6688 82 : stubcc.linkExit(masm.jump(), Uses(2));
6689 82 : frame.discardFe(lhs);
6690 82 : frame.discardFe(rhs);
6691 : }
6692 :
6693 2412 : MaybeJump firstSlow;
6694 2412 : if (!rhs->isTypeKnown()) {
6695 1542 : Jump j = frame.testObject(Assembler::NotEqual, rhs);
6696 1542 : stubcc.linkExit(j, Uses(2));
6697 : }
6698 :
6699 2412 : frame.forgetMismatchedObject(lhs);
6700 2412 : frame.forgetMismatchedObject(rhs);
6701 :
6702 2412 : RegisterID tmp = frame.allocReg();
6703 2412 : RegisterID obj = frame.tempRegForData(rhs);
6704 :
6705 2412 : masm.loadBaseShape(obj, tmp);
6706 : Jump notFunction = masm.branchPtr(Assembler::NotEqual,
6707 2412 : Address(tmp, BaseShape::offsetOfClass()),
6708 4824 : ImmPtr(&FunctionClass));
6709 :
6710 2412 : stubcc.linkExit(notFunction, Uses(2));
6711 :
6712 : /* Test for bound functions. */
6713 : Jump isBound = masm.branchTest32(Assembler::NonZero,
6714 2412 : Address(tmp, BaseShape::offsetOfFlags()),
6715 4824 : Imm32(BaseShape::BOUND_FUNCTION));
6716 : {
6717 2412 : stubcc.linkExit(isBound, Uses(2));
6718 2412 : stubcc.leave();
6719 2412 : OOL_STUBCALL(stubs::InstanceOf, REJOIN_FALLTHROUGH);
6720 2412 : firstSlow = stubcc.masm.jump();
6721 : }
6722 :
6723 2412 : frame.freeReg(tmp);
6724 :
6725 : /* This is sadly necessary because the error case needs the object. */
6726 2412 : frame.dup();
6727 :
6728 2412 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN))
6729 0 : return false;
6730 :
6731 : /* Primitive prototypes are invalid. */
6732 2412 : rhs = frame.peek(-1);
6733 2412 : Jump j = frame.testPrimitive(Assembler::Equal, rhs);
6734 2412 : stubcc.linkExit(j, Uses(3));
6735 :
6736 : /* Allocate registers up front, because of branchiness. */
6737 2412 : obj = frame.copyDataIntoReg(lhs);
6738 2412 : RegisterID proto = frame.copyDataIntoReg(rhs);
6739 2412 : RegisterID temp = frame.allocReg();
6740 :
6741 2412 : MaybeJump isFalse;
6742 2412 : if (!lhs->isTypeKnown())
6743 2095 : isFalse = frame.testPrimitive(Assembler::Equal, lhs);
6744 :
6745 2412 : Label loop = masm.label();
6746 :
6747 : /* Walk prototype chain, break out on NULL or hit. */
6748 2412 : masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
6749 2412 : masm.loadPtr(Address(obj, offsetof(types::TypeObject, proto)), obj);
6750 2412 : Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
6751 2412 : Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
6752 2412 : isTrue.linkTo(loop, &masm);
6753 2412 : masm.move(Imm32(1), temp);
6754 2412 : isTrue = masm.jump();
6755 :
6756 2412 : if (isFalse.isSet())
6757 2095 : isFalse.getJump().linkTo(masm.label(), &masm);
6758 2412 : isFalse2.linkTo(masm.label(), &masm);
6759 2412 : masm.move(Imm32(0), temp);
6760 2412 : isTrue.linkTo(masm.label(), &masm);
6761 :
6762 2412 : frame.freeReg(proto);
6763 2412 : frame.freeReg(obj);
6764 :
6765 2412 : stubcc.leave();
6766 2412 : OOL_STUBCALL(stubs::FastInstanceOf, REJOIN_FALLTHROUGH);
6767 :
6768 2412 : frame.popn(3);
6769 2412 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
6770 :
6771 2412 : if (firstSlow.isSet())
6772 2412 : firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
6773 2412 : stubcc.rejoin(Changes(1));
6774 2412 : return true;
6775 : }
6776 :
6777 : void
6778 1983 : mjit::Compiler::emitEval(uint32_t argc)
6779 : {
6780 : /* Check for interrupts on function call */
6781 1983 : interruptCheckHelper();
6782 :
6783 1983 : frame.syncAndKill(Uses(argc + 2));
6784 1983 : prepareStubCall(Uses(argc + 2));
6785 1983 : masm.move(Imm32(argc), Registers::ArgReg1);
6786 1983 : INLINE_STUBCALL(stubs::Eval, REJOIN_FALLTHROUGH);
6787 1983 : frame.popn(argc + 2);
6788 1983 : pushSyncedEntry(0);
6789 1983 : }
6790 :
6791 : void
6792 2367 : mjit::Compiler::jsop_arguments(RejoinState rejoin)
6793 : {
6794 2367 : prepareStubCall(Uses(0));
6795 2367 : INLINE_STUBCALL(stubs::Arguments, rejoin);
6796 2367 : }
6797 :
6798 : bool
6799 22163 : mjit::Compiler::jsop_newinit()
6800 : {
6801 : bool isArray;
6802 22163 : unsigned count = 0;
6803 22163 : JSObject *baseobj = NULL;
6804 22163 : switch (*PC) {
6805 : case JSOP_NEWINIT:
6806 2775 : isArray = (GET_UINT8(PC) == JSProto_Array);
6807 2775 : break;
6808 : case JSOP_NEWARRAY:
6809 13541 : isArray = true;
6810 13541 : count = GET_UINT24(PC);
6811 13541 : break;
6812 : case JSOP_NEWOBJECT:
6813 : /*
6814 : * Scripts with NEWOBJECT must be compileAndGo, but treat these like
6815 : * NEWINIT if the script's associated global is not known (or is not
6816 : * actually a global object). This should only happen in chrome code.
6817 : */
6818 5847 : isArray = false;
6819 5847 : baseobj = globalObj ? script->getObject(GET_UINT32_INDEX(PC)) : NULL;
6820 5847 : break;
6821 : default:
6822 0 : JS_NOT_REACHED("Bad op");
6823 : return false;
6824 : }
6825 :
6826 : void *stub, *stubArg;
6827 22163 : if (isArray) {
6828 13541 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitArray);
6829 13541 : stubArg = (void *) uintptr_t(count);
6830 : } else {
6831 8622 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitObject);
6832 8622 : stubArg = (void *) baseobj;
6833 : }
6834 :
6835 : /* Don't bake in types for non-compileAndGo scripts. */
6836 22163 : types::TypeObject *type = NULL;
6837 22163 : if (globalObj) {
6838 : type = types::TypeScript::InitObject(cx, script, PC,
6839 18936 : isArray ? JSProto_Array : JSProto_Object);
6840 18936 : if (!type)
6841 0 : return false;
6842 : }
6843 :
6844 : size_t maxArraySlots =
6845 22163 : gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST) - ObjectElements::VALUES_PER_HEADER;
6846 :
6847 65755 : if (!cx->typeInferenceEnabled() ||
6848 13829 : !globalObj ||
6849 : (isArray && count > maxArraySlots) ||
6850 13373 : (!isArray && !baseobj) ||
6851 16390 : (!isArray && baseobj->hasDynamicSlots())) {
6852 9519 : prepareStubCall(Uses(0));
6853 9519 : masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6854 9519 : masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6855 9519 : INLINE_STUBCALL(stub, REJOIN_FALLTHROUGH);
6856 9519 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6857 :
6858 9519 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6859 9519 : frame.extra(frame.peek(-1)).initObject = baseobj;
6860 :
6861 9519 : return true;
6862 : }
6863 :
6864 : JSObject *templateObject;
6865 12644 : if (isArray) {
6866 8898 : templateObject = NewDenseUnallocatedArray(cx, count);
6867 8898 : if (!templateObject)
6868 0 : return false;
6869 8898 : templateObject->setType(type);
6870 : } else {
6871 3746 : templateObject = CopyInitializerObject(cx, baseobj, type);
6872 3746 : if (!templateObject)
6873 0 : return false;
6874 : }
6875 :
6876 12644 : RegisterID result = frame.allocReg();
6877 12644 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
6878 :
6879 12644 : stubcc.linkExit(emptyFreeList, Uses(0));
6880 12644 : stubcc.leave();
6881 :
6882 12644 : stubcc.masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6883 12644 : stubcc.masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6884 12644 : OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
6885 :
6886 12644 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6887 :
6888 12644 : stubcc.rejoin(Changes(1));
6889 :
6890 12644 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6891 12644 : frame.extra(frame.peek(-1)).initObject = baseobj;
6892 :
6893 12644 : return true;
6894 : }
6895 :
6896 : bool
6897 17804 : mjit::Compiler::jsop_regexp()
6898 : {
6899 17804 : JSObject *obj = script->getRegExp(GET_UINT32_INDEX(PC));
6900 17804 : RegExpStatics *res = globalObj ? globalObj->getRegExpStatics() : NULL;
6901 :
6902 40582 : if (!globalObj ||
6903 6281 : &obj->global() != globalObj ||
6904 6281 : !cx->typeInferenceEnabled() ||
6905 5256 : analysis->localsAliasStack() ||
6906 : types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
6907 4960 : types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
6908 : cx->runtime->gcIncrementalState == gc::MARK)
6909 : {
6910 12850 : prepareStubCall(Uses(0));
6911 12850 : masm.move(ImmPtr(obj), Registers::ArgReg1);
6912 12850 : INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6913 12850 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6914 12850 : return true;
6915 : }
6916 :
6917 4954 : RegExpObject *reobj = &obj->asRegExp();
6918 :
6919 9908 : DebugOnly<uint32_t> origFlags = reobj->getFlags();
6920 9908 : DebugOnly<uint32_t> staticsFlags = res->getFlags();
6921 4954 : JS_ASSERT((origFlags & staticsFlags) == staticsFlags);
6922 :
6923 : /*
6924 : * JS semantics require regular expression literals to create different
6925 : * objects every time they execute. We only need to do this cloning if the
6926 : * script could actually observe the effect of such cloning, by getting
6927 : * or setting properties on it. Particular RegExp and String natives take
6928 : * regular expressions as 'this' or an argument, and do not let that
6929 : * expression escape and be accessed by the script, so avoid cloning in
6930 : * these cases.
6931 : */
6932 : analyze::SSAUseChain *uses =
6933 4954 : analysis->useChain(analyze::SSAValue::PushedValue(PC - script->code, 0));
6934 4954 : if (uses && uses->popped && !uses->next && !reobj->global() && !reobj->sticky()) {
6935 931 : jsbytecode *use = script->code + uses->offset;
6936 931 : uint32_t which = uses->u.which;
6937 931 : if (JSOp(*use) == JSOP_CALLPROP) {
6938 0 : JSObject *callee = analysis->pushedTypes(use, 0)->getSingleton(cx);
6939 0 : if (callee && callee->isFunction()) {
6940 0 : Native native = callee->toFunction()->maybeNative();
6941 0 : if (native == js::regexp_exec || native == js::regexp_test) {
6942 0 : frame.push(ObjectValue(*obj));
6943 0 : return true;
6944 : }
6945 : }
6946 931 : } else if (JSOp(*use) == JSOP_CALL && which == 0) {
6947 69 : uint32_t argc = GET_ARGC(use);
6948 69 : JSObject *callee = analysis->poppedTypes(use, argc + 1)->getSingleton(cx);
6949 69 : if (callee && callee->isFunction() && argc >= 1 && which == argc - 1) {
6950 31 : Native native = callee->toFunction()->maybeNative();
6951 31 : if (native == js::str_match ||
6952 : native == js::str_search ||
6953 : native == js::str_replace ||
6954 : native == js::str_split) {
6955 31 : frame.push(ObjectValue(*obj));
6956 31 : return true;
6957 : }
6958 : }
6959 : }
6960 : }
6961 :
6962 : /*
6963 : * Force creation of the RegExpShared in the script's RegExpObject so that
6964 : * we grab it in the getNewObject template copy. Note that JIT code is
6965 : * discarded on every GC, which permits us to burn in the pointer to the
6966 : * RegExpShared. We don't do this during an incremental
6967 : * GC, since we don't discard JIT code after every marking slice.
6968 : */
6969 9846 : RegExpGuard g;
6970 4923 : if (!reobj->getShared(cx, &g))
6971 0 : return false;
6972 :
6973 4923 : RegisterID result = frame.allocReg();
6974 4923 : Jump emptyFreeList = masm.getNewObject(cx, result, obj);
6975 :
6976 4923 : stubcc.linkExit(emptyFreeList, Uses(0));
6977 4923 : stubcc.leave();
6978 :
6979 4923 : stubcc.masm.move(ImmPtr(obj), Registers::ArgReg1);
6980 4923 : OOL_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6981 :
6982 4923 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6983 :
6984 4923 : stubcc.rejoin(Changes(1));
6985 4923 : return true;
6986 : }
6987 :
6988 : bool
6989 33626 : mjit::Compiler::startLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget)
6990 : {
6991 33626 : JS_ASSERT(cx->typeInferenceEnabled() && script == outerScript);
6992 33626 : JS_ASSERT(shouldStartLoop(head));
6993 :
6994 33626 : if (loop) {
6995 : /*
6996 : * Convert all loop registers in the outer loop into unassigned registers.
6997 : * We don't keep track of which registers the inner loop uses, so the only
6998 : * registers that can be carried in the outer loop must be mentioned before
6999 : * the inner loop starts.
7000 : */
7001 3756 : loop->clearLoopRegisters();
7002 : }
7003 :
7004 33626 : LoopState *nloop = OffTheBooks::new_<LoopState>(cx, &ssa, this, &frame);
7005 33626 : if (!nloop || !nloop->init(head, entry, entryTarget)) {
7006 0 : js_ReportOutOfMemory(cx);
7007 0 : return false;
7008 : }
7009 :
7010 33626 : nloop->outer = loop;
7011 33626 : loop = nloop;
7012 33626 : frame.setLoop(loop);
7013 :
7014 33626 : return true;
7015 : }
7016 :
7017 : bool
7018 59870 : mjit::Compiler::finishLoop(jsbytecode *head)
7019 : {
7020 59870 : if (!cx->typeInferenceEnabled() || !bytecodeInChunk(head))
7021 26256 : return true;
7022 :
7023 : /*
7024 : * We're done processing the current loop. Every loop has exactly one backedge
7025 : * at the end ('continue' statements are forward jumps to the loop test),
7026 : * and after jumpAndRun'ing on that edge we can pop it from the frame.
7027 : */
7028 33614 : JS_ASSERT(loop && loop->headOffset() == uint32_t(head - script->code));
7029 :
7030 33614 : jsbytecode *entryTarget = script->code + loop->entryOffset();
7031 :
7032 : /*
7033 : * Fix up the jump entering the loop. We are doing this after all code has
7034 : * been emitted for the backedge, so that we are now in the loop's fallthrough
7035 : * (where we will emit the entry code).
7036 : */
7037 33614 : Jump fallthrough = masm.jump();
7038 :
7039 : #ifdef DEBUG
7040 33614 : if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
7041 0 : RegisterAllocation *alloc = analysis->getAllocation(head);
7042 0 : JaegerSpew(JSpew_Regalloc, "loop allocation at %u:", unsigned(head - script->code));
7043 0 : frame.dumpAllocation(alloc);
7044 : }
7045 : #endif
7046 :
7047 33614 : loop->entryJump().linkTo(masm.label(), &masm);
7048 :
7049 33614 : jsbytecode *oldPC = PC;
7050 :
7051 33614 : PC = entryTarget;
7052 : {
7053 33614 : OOL_STUBCALL(stubs::MissedBoundsCheckEntry, REJOIN_RESUME);
7054 :
7055 33614 : if (loop->generatingInvariants()) {
7056 : /*
7057 : * To do the initial load of the invariants, jump to the invariant
7058 : * restore point after the call just emitted. :XXX: fix hackiness.
7059 : */
7060 2777 : if (oomInVector)
7061 0 : return false;
7062 2777 : Label label = callSites[callSites.length() - 1].loopJumpLabel;
7063 2777 : stubcc.linkExitDirect(masm.jump(), label);
7064 : }
7065 33614 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
7066 : }
7067 33614 : PC = oldPC;
7068 :
7069 33614 : frame.prepareForJump(entryTarget, masm, true);
7070 :
7071 33614 : if (!jumpInScript(masm.jump(), entryTarget))
7072 0 : return false;
7073 :
7074 33614 : PC = head;
7075 33614 : if (!analysis->getCode(head).safePoint) {
7076 : /*
7077 : * Emit a stub into the OOL path which loads registers from a synced state
7078 : * and jumps to the loop head, for rejoining from the interpreter.
7079 : */
7080 33608 : LoopEntry entry;
7081 33608 : entry.pcOffset = head - script->code;
7082 :
7083 33608 : OOL_STUBCALL(stubs::MissedBoundsCheckHead, REJOIN_RESUME);
7084 :
7085 33608 : if (loop->generatingInvariants()) {
7086 2777 : if (oomInVector)
7087 0 : return false;
7088 2777 : entry.label = callSites[callSites.length() - 1].loopJumpLabel;
7089 : } else {
7090 30831 : entry.label = stubcc.masm.label();
7091 : }
7092 :
7093 : /*
7094 : * The interpreter may store integers in slots we assume are doubles,
7095 : * make sure state is consistent before joining. Note that we don't
7096 : * need any handling for other safe points the interpreter can enter
7097 : * from, i.e. from switch and try blocks, as we don't assume double
7098 : * variables are coherent in such cases.
7099 : */
7100 156243 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
7101 122635 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
7102 697 : FrameEntry *fe = frame.getSlotEntry(slot);
7103 697 : stubcc.masm.ensureInMemoryDouble(frame.addressOf(fe));
7104 : }
7105 : }
7106 :
7107 33608 : frame.prepareForJump(head, stubcc.masm, true);
7108 33608 : if (!stubcc.jumpInScript(stubcc.masm.jump(), head))
7109 0 : return false;
7110 :
7111 33608 : loopEntries.append(entry);
7112 : }
7113 33614 : PC = oldPC;
7114 :
7115 : /* Write out loads and tests of loop invariants at all calls in the loop body. */
7116 33614 : loop->flushLoop(stubcc);
7117 :
7118 33614 : LoopState *nloop = loop->outer;
7119 33614 : cx->delete_(loop);
7120 33614 : loop = nloop;
7121 33614 : frame.setLoop(loop);
7122 :
7123 33614 : fallthrough.linkTo(masm.label(), &masm);
7124 :
7125 : /*
7126 : * Clear all registers used for loop temporaries. In the case of loop
7127 : * nesting, we do not allocate temporaries for the outer loop.
7128 : */
7129 33614 : frame.clearTemporaries();
7130 :
7131 33614 : return true;
7132 : }
7133 :
7134 : /*
7135 : * The state at the fast jump must reflect the frame's current state. If specified
7136 : * the state at the slow jump must be fully synced.
7137 : *
7138 : * The 'trampoline' argument indicates whether a trampoline was emitted into
7139 : * the OOL path loading some registers for the target. If this is the case,
7140 : * the fast path jump was redirected to the stub code's initial label, and the
7141 : * same must happen for any other fast paths for the target (i.e. paths from
7142 : * inline caches).
7143 : *
7144 : * The 'fallthrough' argument indicates this is a jump emitted for a fallthrough
7145 : * at the end of the compiled chunk. In this case the opcode may not be a
7146 : * JOF_JUMP opcode, and the compiler should not watch for fusions.
7147 : */
7148 : bool
7149 260548 : mjit::Compiler::jumpAndRun(Jump j, jsbytecode *target, Jump *slow, bool *trampoline,
7150 : bool fallthrough)
7151 : {
7152 260548 : if (trampoline)
7153 12920 : *trampoline = false;
7154 :
7155 260548 : if (!a->parent && !bytecodeInChunk(target)) {
7156 : /*
7157 : * syncForBranch() must have ensured the stack is synced. Figure out
7158 : * the source of the jump, which may be the opcode after PC if two ops
7159 : * were fused for a branch.
7160 : */
7161 1776 : OutgoingChunkEdge edge;
7162 1776 : edge.source = PC - outerScript->code;
7163 1776 : JSOp op = JSOp(*PC);
7164 1776 : if (!fallthrough && !(js_CodeSpec[op].format & JOF_JUMP) && op != JSOP_TABLESWITCH)
7165 55 : edge.source += GetBytecodeLength(PC);
7166 1776 : edge.target = target - outerScript->code;
7167 1776 : edge.fastJump = j;
7168 1776 : if (slow)
7169 81 : edge.slowJump = *slow;
7170 1776 : chunkEdges.append(edge);
7171 1776 : return true;
7172 : }
7173 :
7174 : /*
7175 : * Unless we are coming from a branch which synced everything, syncForBranch
7176 : * must have been called and ensured an allocation at the target.
7177 : */
7178 258772 : RegisterAllocation *lvtarget = NULL;
7179 258772 : bool consistent = true;
7180 258772 : if (cx->typeInferenceEnabled()) {
7181 77860 : RegisterAllocation *&alloc = analysis->getAllocation(target);
7182 77860 : if (!alloc) {
7183 6873 : alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
7184 6873 : if (!alloc) {
7185 0 : js_ReportOutOfMemory(cx);
7186 0 : return false;
7187 : }
7188 : }
7189 77860 : lvtarget = alloc;
7190 77860 : consistent = frame.consistentRegisters(target);
7191 : }
7192 :
7193 258772 : if (!lvtarget || lvtarget->synced()) {
7194 244078 : JS_ASSERT(consistent);
7195 244078 : if (!jumpInScript(j, target))
7196 0 : return false;
7197 244078 : if (slow && !stubcc.jumpInScript(*slow, target))
7198 0 : return false;
7199 : } else {
7200 14694 : if (consistent) {
7201 11289 : if (!jumpInScript(j, target))
7202 0 : return false;
7203 : } else {
7204 : /*
7205 : * Make a trampoline to issue remaining loads for the register
7206 : * state at target.
7207 : */
7208 3405 : Label start = stubcc.masm.label();
7209 3405 : stubcc.linkExitDirect(j, start);
7210 3405 : frame.prepareForJump(target, stubcc.masm, false);
7211 3405 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7212 0 : return false;
7213 3405 : if (trampoline)
7214 310 : *trampoline = true;
7215 3405 : if (pcLengths) {
7216 : /*
7217 : * This is OOL code but will usually be executed, so track
7218 : * it in the CODE_LENGTH for the opcode.
7219 : */
7220 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
7221 0 : size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
7222 0 : pcLengths[offset].codeLength += length;
7223 : }
7224 : }
7225 :
7226 14694 : if (slow) {
7227 12767 : slow->linkTo(stubcc.masm.label(), &stubcc.masm);
7228 12767 : frame.prepareForJump(target, stubcc.masm, true);
7229 12767 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7230 0 : return false;
7231 : }
7232 : }
7233 :
7234 258772 : if (target < PC)
7235 59832 : return finishLoop(target);
7236 198940 : return true;
7237 : }
7238 :
7239 : void
7240 28566 : mjit::Compiler::enterBlock(StaticBlockObject *block)
7241 : {
7242 : /* For now, don't bother doing anything for this opcode. */
7243 28566 : frame.syncAndForgetEverything();
7244 28566 : masm.move(ImmPtr(block), Registers::ArgReg1);
7245 28566 : INLINE_STUBCALL(stubs::EnterBlock, REJOIN_NONE);
7246 28566 : if (*PC == JSOP_ENTERBLOCK)
7247 21202 : frame.enterBlock(StackDefs(script, PC));
7248 28566 : }
7249 :
7250 : void
7251 28995 : mjit::Compiler::leaveBlock()
7252 : {
7253 : /*
7254 : * Note: After bug 535912, we can pass the block obj directly, inline
7255 : * PutBlockObject, and do away with the muckiness in PutBlockObject.
7256 : */
7257 28995 : uint32_t n = StackUses(script, PC);
7258 28995 : prepareStubCall(Uses(n));
7259 28995 : INLINE_STUBCALL(stubs::LeaveBlock, REJOIN_NONE);
7260 28995 : frame.leaveBlock(n);
7261 28995 : }
7262 :
7263 : // Creates the new object expected for constructors, and places it in |thisv|.
7264 : // It is broken down into the following operations:
7265 : // CALLEE
7266 : // GETPROP "prototype"
7267 : // IFPRIMTOP:
7268 : // NULL
7269 : // call js_CreateThisFromFunctionWithProto(...)
7270 : //
7271 : bool
7272 1774 : mjit::Compiler::constructThis()
7273 : {
7274 1774 : JS_ASSERT(isConstructing);
7275 :
7276 1774 : JSFunction *fun = script->function();
7277 :
7278 : do {
7279 2972 : if (!cx->typeInferenceEnabled() ||
7280 669 : !fun->hasSingletonType() ||
7281 529 : fun->getType(cx)->unknownProperties())
7282 : {
7283 1247 : break;
7284 : }
7285 :
7286 527 : jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
7287 527 : types::TypeSet *protoTypes = fun->getType(cx)->getProperty(cx, id, false);
7288 :
7289 527 : JSObject *proto = protoTypes->getSingleton(cx, true);
7290 527 : if (!proto)
7291 264 : break;
7292 :
7293 : /*
7294 : * Generate an inline path to create a 'this' object with the given
7295 : * prototype. Only do this if the type is actually known as a possible
7296 : * 'this' type of the script.
7297 : */
7298 263 : types::TypeObject *type = proto->getNewType(cx, fun);
7299 263 : if (!type)
7300 0 : return false;
7301 263 : if (!types::TypeScript::ThisTypes(script)->hasType(types::Type::ObjectType(type)))
7302 103 : break;
7303 :
7304 160 : JSObject *templateObject = js_CreateThisForFunctionWithProto(cx, fun, proto);
7305 160 : if (!templateObject)
7306 0 : return false;
7307 :
7308 : /*
7309 : * The template incorporates a shape and/or fixed slots from any
7310 : * newScript on its type, so make sure recompilation is triggered
7311 : * should this information change later.
7312 : */
7313 160 : if (templateObject->type()->newScript)
7314 100 : types::TypeSet::WatchObjectStateChange(cx, templateObject->type());
7315 :
7316 160 : RegisterID result = frame.allocReg();
7317 160 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
7318 :
7319 160 : stubcc.linkExit(emptyFreeList, Uses(0));
7320 160 : stubcc.leave();
7321 :
7322 160 : stubcc.masm.move(ImmPtr(proto), Registers::ArgReg1);
7323 160 : OOL_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7324 :
7325 160 : frame.setThis(result);
7326 :
7327 160 : stubcc.rejoin(Changes(1));
7328 160 : return true;
7329 : } while (false);
7330 :
7331 : // Load the callee.
7332 1614 : frame.pushCallee();
7333 :
7334 : // Get callee.prototype.
7335 1614 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN, false, /* forPrototype = */ true))
7336 0 : return false;
7337 :
7338 : // Reach into the proto Value and grab a register for its data.
7339 1614 : FrameEntry *protoFe = frame.peek(-1);
7340 1614 : RegisterID protoReg = frame.ownRegForData(protoFe);
7341 :
7342 : // Now, get the type. If it's not an object, set protoReg to NULL.
7343 1614 : JS_ASSERT_IF(protoFe->isTypeKnown(), protoFe->isType(JSVAL_TYPE_OBJECT));
7344 1614 : if (!protoFe->isType(JSVAL_TYPE_OBJECT)) {
7345 1614 : Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
7346 1614 : stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
7347 1614 : stubcc.masm.move(ImmPtr(NULL), protoReg);
7348 1614 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
7349 : }
7350 :
7351 : // Done with the protoFe.
7352 1614 : frame.pop();
7353 :
7354 1614 : prepareStubCall(Uses(0));
7355 1614 : if (protoReg != Registers::ArgReg1)
7356 1614 : masm.move(protoReg, Registers::ArgReg1);
7357 1614 : INLINE_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7358 1614 : frame.freeReg(protoReg);
7359 1614 : return true;
7360 : }
7361 :
7362 : bool
7363 310 : mjit::Compiler::jsop_tableswitch(jsbytecode *pc)
7364 : {
7365 : #if defined JS_CPU_ARM
7366 : JS_NOT_REACHED("Implement jump(BaseIndex) for ARM");
7367 : return true;
7368 : #else
7369 310 : jsbytecode *originalPC = pc;
7370 620 : DebugOnly<JSOp> op = JSOp(*originalPC);
7371 310 : JS_ASSERT(op == JSOP_TABLESWITCH);
7372 :
7373 310 : uint32_t defaultTarget = GET_JUMP_OFFSET(pc);
7374 310 : pc += JUMP_OFFSET_LEN;
7375 :
7376 310 : int32_t low = GET_JUMP_OFFSET(pc);
7377 310 : pc += JUMP_OFFSET_LEN;
7378 310 : int32_t high = GET_JUMP_OFFSET(pc);
7379 310 : pc += JUMP_OFFSET_LEN;
7380 310 : int numJumps = high + 1 - low;
7381 310 : JS_ASSERT(numJumps >= 0);
7382 :
7383 310 : FrameEntry *fe = frame.peek(-1);
7384 310 : if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
7385 36 : frame.syncAndForgetEverything();
7386 36 : masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7387 :
7388 : /* prepareStubCall() is not needed due to forgetEverything() */
7389 36 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7390 36 : frame.pop();
7391 36 : masm.jump(Registers::ReturnReg);
7392 36 : return true;
7393 : }
7394 :
7395 : RegisterID dataReg;
7396 274 : if (fe->isConstant()) {
7397 8 : JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
7398 8 : dataReg = frame.allocReg();
7399 8 : masm.move(Imm32(fe->getValue().toInt32()), dataReg);
7400 : } else {
7401 266 : dataReg = frame.copyDataIntoReg(fe);
7402 : }
7403 :
7404 274 : RegisterID reg = frame.allocReg();
7405 274 : frame.syncAndForgetEverything();
7406 :
7407 274 : MaybeJump notInt;
7408 274 : if (!fe->isType(JSVAL_TYPE_INT32))
7409 194 : notInt = masm.testInt32(Assembler::NotEqual, frame.addressOf(fe));
7410 :
7411 274 : JumpTable jt;
7412 274 : jt.offsetIndex = jumpTableEdges.length();
7413 274 : jt.label = masm.moveWithPatch(ImmPtr(NULL), reg);
7414 274 : jumpTables.append(jt);
7415 :
7416 1673 : for (int i = 0; i < numJumps; i++) {
7417 1399 : uint32_t target = GET_JUMP_OFFSET(pc);
7418 1399 : if (!target)
7419 21 : target = defaultTarget;
7420 : JumpTableEdge edge;
7421 1399 : edge.source = originalPC - script->code;
7422 1399 : edge.target = (originalPC + target) - script->code;
7423 1399 : jumpTableEdges.append(edge);
7424 1399 : pc += JUMP_OFFSET_LEN;
7425 : }
7426 274 : if (low != 0)
7427 95 : masm.sub32(Imm32(low), dataReg);
7428 274 : Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
7429 274 : BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
7430 274 : masm.jump(jumpTarget);
7431 :
7432 274 : if (notInt.isSet()) {
7433 194 : stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
7434 194 : stubcc.leave();
7435 194 : stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7436 194 : OOL_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7437 194 : stubcc.masm.jump(Registers::ReturnReg);
7438 : }
7439 274 : frame.pop();
7440 274 : return jumpAndRun(defaultCase, originalPC + defaultTarget);
7441 : #endif
7442 : }
7443 :
7444 : void
7445 193990 : mjit::Compiler::jsop_toid()
7446 : {
7447 : /* Leave integers alone, stub everything else. */
7448 193990 : FrameEntry *top = frame.peek(-1);
7449 :
7450 193990 : if (top->isType(JSVAL_TYPE_INT32))
7451 193708 : return;
7452 :
7453 282 : if (top->isNotType(JSVAL_TYPE_INT32)) {
7454 42 : prepareStubCall(Uses(2));
7455 42 : INLINE_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7456 42 : frame.pop();
7457 42 : pushSyncedEntry(0);
7458 42 : return;
7459 : }
7460 :
7461 240 : frame.syncAt(-1);
7462 :
7463 240 : Jump j = frame.testInt32(Assembler::NotEqual, top);
7464 240 : stubcc.linkExit(j, Uses(2));
7465 :
7466 240 : stubcc.leave();
7467 240 : OOL_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7468 :
7469 240 : frame.pop();
7470 240 : pushSyncedEntry(0);
7471 :
7472 240 : stubcc.rejoin(Changes(1));
7473 : }
7474 :
7475 : void
7476 18350 : mjit::Compiler::jsop_in()
7477 : {
7478 18350 : FrameEntry *obj = frame.peek(-1);
7479 18350 : FrameEntry *id = frame.peek(-2);
7480 :
7481 18350 : if (cx->typeInferenceEnabled() && id->isType(JSVAL_TYPE_INT32)) {
7482 121 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
7483 :
7484 274 : if (obj->mightBeType(JSVAL_TYPE_OBJECT) &&
7485 117 : !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY) &&
7486 36 : !types::ArrayPrototypeHasIndexedProperty(cx, outerScript))
7487 : {
7488 28 : bool isPacked = !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY);
7489 :
7490 28 : if (!obj->isTypeKnown()) {
7491 7 : Jump guard = frame.testObject(Assembler::NotEqual, obj);
7492 7 : stubcc.linkExit(guard, Uses(2));
7493 : }
7494 :
7495 28 : RegisterID dataReg = frame.copyDataIntoReg(obj);
7496 :
7497 28 : Int32Key key = id->isConstant()
7498 12 : ? Int32Key::FromConstant(id->getValue().toInt32())
7499 40 : : Int32Key::FromRegister(frame.tempRegForData(id));
7500 :
7501 28 : masm.loadPtr(Address(dataReg, JSObject::offsetOfElements()), dataReg);
7502 :
7503 : // Guard on the array's initialized length.
7504 : Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
7505 28 : dataReg, key, Assembler::BelowOrEqual);
7506 :
7507 : // Guard to make sure we don't have a hole. Skip it if the array is packed.
7508 28 : MaybeJump holeCheck;
7509 28 : if (!isPacked)
7510 22 : holeCheck = masm.guardElementNotHole(dataReg, key);
7511 :
7512 28 : masm.move(Imm32(1), dataReg);
7513 28 : Jump done = masm.jump();
7514 :
7515 28 : Label falseBranch = masm.label();
7516 28 : initlenGuard.linkTo(falseBranch, &masm);
7517 28 : if (!isPacked)
7518 22 : holeCheck.getJump().linkTo(falseBranch, &masm);
7519 28 : masm.move(Imm32(0), dataReg);
7520 :
7521 28 : done.linkTo(masm.label(), &masm);
7522 :
7523 28 : stubcc.leave();
7524 28 : OOL_STUBCALL_USES(stubs::In, REJOIN_PUSH_BOOLEAN, Uses(2));
7525 :
7526 28 : frame.popn(2);
7527 28 : if (dataReg != Registers::ReturnReg)
7528 28 : stubcc.masm.move(Registers::ReturnReg, dataReg);
7529 :
7530 28 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, dataReg);
7531 :
7532 28 : stubcc.rejoin(Changes(2));
7533 :
7534 28 : return;
7535 : }
7536 : }
7537 :
7538 18322 : prepareStubCall(Uses(2));
7539 18322 : INLINE_STUBCALL(stubs::In, REJOIN_PUSH_BOOLEAN);
7540 18322 : frame.popn(2);
7541 18322 : frame.takeReg(Registers::ReturnReg);
7542 18322 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
7543 : }
7544 :
7545 : /*
7546 : * For any locals or args which we know to be integers but are treated as
7547 : * doubles by the type inference, convert to double. These will be assumed to be
7548 : * doubles at control flow join points. This function must be called before
7549 : * branching to another opcode.
7550 : *
7551 : * We can only carry entries as doubles when we can track all incoming edges to
7552 : * a join point (no try blocks etc.) and when we can track all writes to the
7553 : * local/arg (the slot does not escape) and ensure the Compiler representation
7554 : * matches the inferred type for the variable's SSA value. These properties are
7555 : * both ensured by analysis->trackSlot.
7556 : */
7557 : void
7558 591580 : mjit::Compiler::fixDoubleTypes(jsbytecode *target)
7559 : {
7560 591580 : if (!cx->typeInferenceEnabled())
7561 359213 : return;
7562 :
7563 : /*
7564 : * Fill fixedIntToDoubleEntries with all variables that are known to be an
7565 : * int here and a double at the branch target, and fixedDoubleToAnyEntries
7566 : * with all variables that are known to be a double here but not at the
7567 : * branch target.
7568 : *
7569 : * Per prepareInferenceTypes, the target state consists of the current
7570 : * state plus any phi nodes or other new values introduced at the target.
7571 : */
7572 232367 : JS_ASSERT(fixedIntToDoubleEntries.empty());
7573 232367 : JS_ASSERT(fixedDoubleToAnyEntries.empty());
7574 232367 : const SlotValue *newv = analysis->newValues(target);
7575 232367 : if (newv) {
7576 377246 : while (newv->slot) {
7577 381970 : if (newv->value.kind() != SSAValue::PHI ||
7578 86902 : newv->value.phiOffset() != uint32_t(target - script->code) ||
7579 85210 : !analysis->trackSlot(newv->slot)) {
7580 143472 : newv++;
7581 143472 : continue;
7582 : }
7583 66386 : JS_ASSERT(newv->slot < TotalSlots(script));
7584 66386 : types::TypeSet *targetTypes = analysis->getValueTypes(newv->value);
7585 66386 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7586 66386 : VarType &vt = a->varTypes[newv->slot];
7587 66386 : JSValueType type = vt.getTypeTag(cx);
7588 66386 : if (targetTypes->getKnownTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
7589 522 : if (type == JSVAL_TYPE_INT32) {
7590 102 : fixedIntToDoubleEntries.append(newv->slot);
7591 102 : frame.ensureDouble(fe);
7592 102 : frame.forgetLoopReg(fe);
7593 420 : } else if (type == JSVAL_TYPE_UNKNOWN) {
7594 : /*
7595 : * Unknown here but a double at the target. The type
7596 : * set for the existing value must be empty, so this
7597 : * code is doomed and we can just mark the value as
7598 : * a double.
7599 : */
7600 3 : frame.ensureDouble(fe);
7601 : } else {
7602 417 : JS_ASSERT(type == JSVAL_TYPE_DOUBLE);
7603 : }
7604 65864 : } else if (type == JSVAL_TYPE_DOUBLE) {
7605 270 : fixedDoubleToAnyEntries.append(newv->slot);
7606 270 : frame.syncAndForgetFe(fe);
7607 270 : frame.forgetLoopReg(fe);
7608 : }
7609 66386 : newv++;
7610 : }
7611 : }
7612 : }
7613 :
7614 : void
7615 122683 : mjit::Compiler::watchGlobalReallocation()
7616 : {
7617 122683 : JS_ASSERT(cx->typeInferenceEnabled());
7618 122683 : if (hasGlobalReallocation)
7619 99654 : return;
7620 23029 : types::TypeSet::WatchObjectStateChange(cx, globalObj->getType(cx));
7621 23029 : hasGlobalReallocation = true;
7622 : }
7623 :
7624 : void
7625 292205 : mjit::Compiler::updateVarType()
7626 : {
7627 292205 : if (!cx->typeInferenceEnabled())
7628 186778 : return;
7629 :
7630 : /*
7631 : * For any non-escaping variable written at the current opcode, update the
7632 : * associated type sets according to the written type, keeping the type set
7633 : * for each variable in sync with what the SSA analysis has determined
7634 : * (see prepareInferenceTypes).
7635 : */
7636 :
7637 105427 : types::TypeSet *types = pushedTypeSet(0);
7638 105427 : uint32_t slot = GetBytecodeSlot(script, PC);
7639 :
7640 105427 : if (analysis->trackSlot(slot)) {
7641 49277 : VarType &vt = a->varTypes[slot];
7642 49277 : vt.setTypes(types);
7643 :
7644 : /*
7645 : * Variables whose type has been inferred as a double need to be
7646 : * maintained by the frame as a double. We might forget the exact
7647 : * representation used by the next call to fixDoubleTypes, fix it now.
7648 : */
7649 49277 : if (vt.getTypeTag(cx) == JSVAL_TYPE_DOUBLE)
7650 803 : frame.ensureDouble(frame.getSlotEntry(slot));
7651 : }
7652 : }
7653 :
7654 : void
7655 455910 : mjit::Compiler::updateJoinVarTypes()
7656 : {
7657 455910 : if (!cx->typeInferenceEnabled())
7658 282314 : return;
7659 :
7660 : /* Update variable types for all new values at this bytecode. */
7661 173596 : const SlotValue *newv = analysis->newValues(PC);
7662 173596 : if (newv) {
7663 244106 : while (newv->slot) {
7664 135930 : if (newv->slot < TotalSlots(script)) {
7665 45062 : VarType &vt = a->varTypes[newv->slot];
7666 45062 : JSValueType type = vt.getTypeTag(cx);
7667 45062 : vt.setTypes(analysis->getValueTypes(newv->value));
7668 45062 : if (vt.getTypeTag(cx) != type) {
7669 : /*
7670 : * If the known type of a variable changes (even if the
7671 : * variable itself has not been reassigned) then we can't
7672 : * carry a loop register for the var.
7673 : */
7674 10207 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7675 10207 : frame.forgetLoopReg(fe);
7676 : }
7677 : }
7678 135930 : newv++;
7679 : }
7680 : }
7681 : }
7682 :
7683 : void
7684 578210 : mjit::Compiler::restoreVarType()
7685 : {
7686 578210 : if (!cx->typeInferenceEnabled())
7687 332694 : return;
7688 :
7689 245516 : uint32_t slot = GetBytecodeSlot(script, PC);
7690 :
7691 245516 : if (slot >= analyze::TotalSlots(script))
7692 12937 : return;
7693 :
7694 : /*
7695 : * Restore the known type of a live local or argument. We ensure that types
7696 : * of tracked variables match their inferred type (as tracked in varTypes),
7697 : * but may have forgotten it due to a branch or syncAndForgetEverything.
7698 : */
7699 232579 : JSValueType type = a->varTypes[slot].getTypeTag(cx);
7700 235652 : if (type != JSVAL_TYPE_UNKNOWN &&
7701 3073 : (type != JSVAL_TYPE_DOUBLE || analysis->trackSlot(slot))) {
7702 143870 : FrameEntry *fe = frame.getSlotEntry(slot);
7703 143870 : JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
7704 143870 : if (!fe->isTypeKnown())
7705 93055 : frame.learnType(fe, type, false);
7706 : }
7707 : }
7708 :
7709 : JSValueType
7710 2629794 : mjit::Compiler::knownPushedType(uint32_t pushed)
7711 : {
7712 2629794 : if (!cx->typeInferenceEnabled())
7713 2058356 : return JSVAL_TYPE_UNKNOWN;
7714 571438 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7715 571438 : return types->getKnownTypeTag(cx);
7716 : }
7717 :
7718 : bool
7719 24592 : mjit::Compiler::mayPushUndefined(uint32_t pushed)
7720 : {
7721 24592 : JS_ASSERT(cx->typeInferenceEnabled());
7722 :
7723 : /*
7724 : * This should only be used when the compiler is checking if it is OK to push
7725 : * undefined without going to a stub that can trigger recompilation.
7726 : * If this returns false and undefined subsequently becomes a feasible
7727 : * value pushed by the bytecode, recompilation will *NOT* be triggered.
7728 : */
7729 24592 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7730 24592 : return types->hasType(types::Type::UndefinedType());
7731 : }
7732 :
7733 : types::TypeSet *
7734 815552 : mjit::Compiler::pushedTypeSet(uint32_t pushed)
7735 : {
7736 815552 : if (!cx->typeInferenceEnabled())
7737 604238 : return NULL;
7738 211314 : return analysis->pushedTypes(PC, pushed);
7739 : }
7740 :
7741 : bool
7742 760245 : mjit::Compiler::monitored(jsbytecode *pc)
7743 : {
7744 760245 : if (!cx->typeInferenceEnabled())
7745 505886 : return false;
7746 254359 : return analysis->getCode(pc).monitoredTypes;
7747 : }
7748 :
7749 : bool
7750 568361 : mjit::Compiler::hasTypeBarriers(jsbytecode *pc)
7751 : {
7752 568361 : if (!cx->typeInferenceEnabled())
7753 85649 : return false;
7754 :
7755 482712 : return analysis->typeBarriers(cx, pc) != NULL;
7756 : }
7757 :
7758 : void
7759 212262 : mjit::Compiler::pushSyncedEntry(uint32_t pushed)
7760 : {
7761 212262 : frame.pushSynced(knownPushedType(pushed));
7762 212262 : }
7763 :
7764 : JSObject *
7765 1891918 : mjit::Compiler::pushedSingleton(unsigned pushed)
7766 : {
7767 1891918 : if (!cx->typeInferenceEnabled())
7768 1416138 : return NULL;
7769 :
7770 475780 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7771 475780 : return types->getSingleton(cx);
7772 : }
7773 :
7774 : /*
7775 : * Barriers overview.
7776 : *
7777 : * After a property fetch finishes, we may need to do type checks on it to make
7778 : * sure it matches the pushed type set for this bytecode. This can be either
7779 : * because there is a type barrier at the bytecode, or because we cannot rule
7780 : * out an undefined result. For such accesses, we push a register pair, and
7781 : * then use those registers to check the fetched type matches the inferred
7782 : * types for the pushed set. The flow here is tricky:
7783 : *
7784 : * frame.pushRegs(type, data, knownType);
7785 : * --- Depending on knownType, the frame's representation for the pushed entry
7786 : * may not be a register pair anymore. knownType is based on the observed
7787 : * types that have been pushed here and may not actually match type/data.
7788 : * pushRegs must not clobber either register, for the test below.
7789 : *
7790 : * testBarrier(type, data)
7791 : * --- Use the type/data regs and generate a single jump taken if the barrier
7792 : * has been violated.
7793 : *
7794 : * --- Rearrange stack, rejoin from stub paths. No code must be emitted into
7795 : * the inline path between testBarrier and finishBarrier. Since a stub path
7796 : * may be in progress we can't call finishBarrier before stubcc.rejoin,
7797 : * and since typeReg/dataReg may not be intact after the stub call rejoin
7798 : * (if knownType != JSVAL_TYPE_UNKNOWN) we can't testBarrier after calling
7799 : * stubcc.rejoin.
7800 : *
7801 : * finishBarrier()
7802 : * --- Link the barrier jump to a new stub code path which updates the pushed
7803 : * types (possibly triggering recompilation). The frame has changed since
7804 : * pushRegs to reflect the final state of the op, which is OK as no inline
7805 : * code has been emitted since the barrier jump.
7806 : */
7807 :
7808 : mjit::Compiler::BarrierState
7809 119619 : mjit::Compiler::pushAddressMaybeBarrier(Address address, JSValueType type, bool reuseBase,
7810 : bool testUndefined)
7811 : {
7812 119619 : if (!hasTypeBarriers(PC) && !testUndefined) {
7813 28130 : frame.push(address, type, reuseBase);
7814 28130 : return BarrierState();
7815 : }
7816 :
7817 : RegisterID typeReg, dataReg;
7818 91489 : frame.loadIntoRegisters(address, reuseBase, &typeReg, &dataReg);
7819 :
7820 91489 : frame.pushRegs(typeReg, dataReg, type);
7821 91489 : return testBarrier(typeReg, dataReg, testUndefined);
7822 : }
7823 :
7824 : MaybeJump
7825 203488 : mjit::Compiler::trySingleTypeTest(types::TypeSet *types, RegisterID typeReg)
7826 : {
7827 : /*
7828 : * If a type set we have a barrier on is monomorphic, generate a single
7829 : * jump taken if a type register has a match. This doesn't handle type sets
7830 : * containing objects, as these require two jumps regardless (test for
7831 : * object, then test the type of the object).
7832 : */
7833 203488 : MaybeJump res;
7834 :
7835 203488 : switch (types->getKnownTypeTag(cx)) {
7836 : case JSVAL_TYPE_INT32:
7837 10758 : res.setJump(masm.testInt32(Assembler::NotEqual, typeReg));
7838 10758 : return res;
7839 :
7840 : case JSVAL_TYPE_DOUBLE:
7841 5029 : res.setJump(masm.testNumber(Assembler::NotEqual, typeReg));
7842 5029 : return res;
7843 :
7844 : case JSVAL_TYPE_BOOLEAN:
7845 1501 : res.setJump(masm.testBoolean(Assembler::NotEqual, typeReg));
7846 1501 : return res;
7847 :
7848 : case JSVAL_TYPE_STRING:
7849 16408 : res.setJump(masm.testString(Assembler::NotEqual, typeReg));
7850 16408 : return res;
7851 :
7852 : default:
7853 169792 : return res;
7854 : }
7855 : }
7856 :
7857 : JSC::MacroAssembler::Jump
7858 169792 : mjit::Compiler::addTypeTest(types::TypeSet *types, RegisterID typeReg, RegisterID dataReg)
7859 : {
7860 : /*
7861 : * :TODO: It would be good to merge this with GenerateTypeCheck, but the
7862 : * two methods have a different format for the tested value (in registers
7863 : * vs. in memory).
7864 : */
7865 :
7866 339584 : Vector<Jump> matches(CompilerAllocPolicy(cx, *this));
7867 :
7868 169792 : if (types->hasType(types::Type::Int32Type()))
7869 1264 : matches.append(masm.testInt32(Assembler::Equal, typeReg));
7870 :
7871 169792 : if (types->hasType(types::Type::DoubleType()))
7872 544 : matches.append(masm.testDouble(Assembler::Equal, typeReg));
7873 :
7874 169792 : if (types->hasType(types::Type::UndefinedType()))
7875 12862 : matches.append(masm.testUndefined(Assembler::Equal, typeReg));
7876 :
7877 169792 : if (types->hasType(types::Type::BooleanType()))
7878 3815 : matches.append(masm.testBoolean(Assembler::Equal, typeReg));
7879 :
7880 169792 : if (types->hasType(types::Type::StringType()))
7881 1175 : matches.append(masm.testString(Assembler::Equal, typeReg));
7882 :
7883 169792 : if (types->hasType(types::Type::NullType()))
7884 6203 : matches.append(masm.testNull(Assembler::Equal, typeReg));
7885 :
7886 169792 : unsigned count = 0;
7887 169792 : if (types->hasType(types::Type::AnyObjectType()))
7888 9309 : matches.append(masm.testObject(Assembler::Equal, typeReg));
7889 : else
7890 160483 : count = types->getObjectCount();
7891 :
7892 169792 : if (count != 0) {
7893 42947 : Jump notObject = masm.testObject(Assembler::NotEqual, typeReg);
7894 42947 : Address typeAddress(dataReg, JSObject::offsetOfType());
7895 :
7896 466805 : for (unsigned i = 0; i < count; i++) {
7897 423858 : if (JSObject *object = types->getSingleObject(i))
7898 120928 : matches.append(masm.branchPtr(Assembler::Equal, dataReg, ImmPtr(object)));
7899 : }
7900 :
7901 466805 : for (unsigned i = 0; i < count; i++) {
7902 423858 : if (types::TypeObject *object = types->getTypeObject(i))
7903 60728 : matches.append(masm.branchPtr(Assembler::Equal, typeAddress, ImmPtr(object)));
7904 : }
7905 :
7906 42947 : notObject.linkTo(masm.label(), &masm);
7907 : }
7908 :
7909 169792 : Jump mismatch = masm.jump();
7910 :
7911 386620 : for (unsigned i = 0; i < matches.length(); i++)
7912 216828 : matches[i].linkTo(masm.label(), &masm);
7913 :
7914 : return mismatch;
7915 : }
7916 :
7917 : mjit::Compiler::BarrierState
7918 1590164 : mjit::Compiler::testBarrier(RegisterID typeReg, RegisterID dataReg,
7919 : bool testUndefined, bool testReturn, bool force)
7920 : {
7921 1590164 : BarrierState state;
7922 1590164 : state.typeReg = typeReg;
7923 1590164 : state.dataReg = dataReg;
7924 :
7925 1590164 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7926 1260733 : return state;
7927 :
7928 329431 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7929 329431 : if (types->unknown()) {
7930 : /*
7931 : * If the result of this opcode is already unknown, there is no way for
7932 : * a type barrier to fail.
7933 : */
7934 634 : return state;
7935 : }
7936 :
7937 328797 : if (testReturn) {
7938 119181 : JS_ASSERT(!testUndefined);
7939 119181 : if (!analysis->getCode(PC).monitoredTypesReturn)
7940 74081 : return state;
7941 209616 : } else if (!hasTypeBarriers(PC) && !force) {
7942 51228 : if (testUndefined && !types->hasType(types::Type::UndefinedType()))
7943 849 : state.jump.setJump(masm.testUndefined(Assembler::Equal, typeReg));
7944 51228 : return state;
7945 : }
7946 :
7947 203488 : types->addFreeze(cx);
7948 :
7949 : /* Cannot have type barriers when the result of the operation is already unknown. */
7950 203488 : JS_ASSERT(!types->unknown());
7951 :
7952 203488 : state.jump = trySingleTypeTest(types, typeReg);
7953 203488 : if (!state.jump.isSet())
7954 169792 : state.jump.setJump(addTypeTest(types, typeReg, dataReg));
7955 :
7956 203488 : return state;
7957 : }
7958 :
7959 : void
7960 1624000 : mjit::Compiler::finishBarrier(const BarrierState &barrier, RejoinState rejoin, uint32_t which)
7961 : {
7962 1624000 : if (!barrier.jump.isSet())
7963 1419663 : return;
7964 :
7965 204337 : stubcc.linkExitDirect(barrier.jump.get(), stubcc.masm.label());
7966 :
7967 : /*
7968 : * Before syncing, store the entry to sp[0]. (scanInlineCalls accounted for
7969 : * this when making sure there is enough froom for all frames). The known
7970 : * type in the frame may be wrong leading to an incorrect sync, and this
7971 : * sync may also clobber typeReg and/or dataReg.
7972 : */
7973 204337 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
7974 : stubcc.masm.storeValueFromComponents(barrier.typeReg, barrier.dataReg,
7975 204337 : frame.addressOf(frame.peek(-1)));
7976 204337 : frame.pop();
7977 :
7978 204337 : stubcc.syncExit(Uses(0));
7979 204337 : stubcc.leave();
7980 :
7981 204337 : stubcc.masm.move(ImmIntPtr(intptr_t(which)), Registers::ArgReg1);
7982 204337 : OOL_STUBCALL(stubs::TypeBarrierHelper, rejoin);
7983 204337 : stubcc.rejoin(Changes(0));
7984 : }
7985 :
7986 : void
7987 1436033 : mjit::Compiler::testPushedType(RejoinState rejoin, int which, bool ool)
7988 : {
7989 1436033 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7990 1294893 : return;
7991 :
7992 141140 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7993 141140 : if (types->unknown())
7994 28 : return;
7995 :
7996 141112 : Assembler &masm = ool ? stubcc.masm : this->masm;
7997 :
7998 141112 : JS_ASSERT(which <= 0);
7999 141112 : Address address = (which == 0) ? frame.addressOfTop() : frame.addressOf(frame.peek(which));
8000 :
8001 282224 : Vector<Jump> mismatches(cx);
8002 141112 : if (!masm.generateTypeCheck(cx, address, types, &mismatches)) {
8003 0 : oomInVector = true;
8004 : return;
8005 : }
8006 :
8007 141112 : Jump j = masm.jump();
8008 :
8009 312277 : for (unsigned i = 0; i < mismatches.length(); i++)
8010 171165 : mismatches[i].linkTo(masm.label(), &masm);
8011 :
8012 141112 : masm.move(Imm32(which), Registers::ArgReg1);
8013 141112 : if (ool)
8014 135552 : OOL_STUBCALL(stubs::StubTypeHelper, rejoin);
8015 : else
8016 5560 : INLINE_STUBCALL(stubs::StubTypeHelper, rejoin);
8017 :
8018 141112 : j.linkTo(masm.label(), &masm);
8019 : }
|