Version 2.4.3.

Made Date.parse properly handle TZ offsets (issue 857).

Performance improvements on all platforms.



git-svn-id: http://v8.googlecode.com/svn/trunk@5447 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/AUTHORS b/AUTHORS
index 65b8965..cd6eeac 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -9,6 +9,7 @@
 
 Alexander Botero-Lowry <alexbl@FreeBSD.org>
 Alexandre Vassalotti <avassalotti@gmail.com>
+Burcu Dogan <burcujdogan@gmail.com>
 Craig Schlenter <craig.schlenter@gmail.com>
 Daniel Andersson <kodandersson@gmail.com>
 Daniel James <dnljms@gmail.com>
@@ -21,6 +22,7 @@
 Kun Zhang <zhangk@codeaurora.org>
 Matt Hanselman <mjhanselman@gmail.com>
 Martyn Capewell <martyn.capewell@arm.com>
+Michael Smith <mike@w3.org>
 Paolo Giarrusso <p.giarrusso@gmail.com>
 Patrick Gansterer <paroga@paroga.com>
 Rafal Krypa <rafal@krypa.net>
@@ -28,6 +30,4 @@
 Rodolph Perfetta <rodolph.perfetta@arm.com>
 Ryan Dahl <coldredlemur@gmail.com>
 Subrato K De <subratokde@codeaurora.org>
-Burcu Dogan <burcujdogan@gmail.com>
 Vlad Burlik <vladbph@gmail.com>
-
diff --git a/ChangeLog b/ChangeLog
index 95a3640..d867c6a 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,10 @@
+2010-09-13: Version 2.4.3
+
+        Made Date.parse properly handle TZ offsets (issue 857).
+
+        Performance improvements on all platforms.
+
+
 2010-09-08: Version 2.4.2
 
         Fixed GC crash bug.
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index f32da6d..0f8f6d4 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -493,7 +493,7 @@
       int context_chain_length =
           scope()->ContextChainLength(slot->var()->scope());
       __ LoadContext(scratch, context_chain_length);
-      return CodeGenerator::ContextOperand(scratch, slot->index());
+      return ContextOperand(scratch, slot->index());
     }
     case Slot::LOOKUP:
       UNREACHABLE();
@@ -557,19 +557,17 @@
         ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
         if (FLAG_debug_code) {
           // Check if we have the correct context pointer.
-          __ ldr(r1,
-                 CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX));
+          __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
           __ cmp(r1, cp);
           __ Check(eq, "Unexpected declaration in current context.");
         }
         if (mode == Variable::CONST) {
           __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
-          __ str(ip, CodeGenerator::ContextOperand(cp, slot->index()));
+          __ str(ip, ContextOperand(cp, slot->index()));
           // No write barrier since the_hole_value is in old space.
         } else if (function != NULL) {
           VisitForValue(function, kAccumulator);
-          __ str(result_register(),
-                 CodeGenerator::ContextOperand(cp, slot->index()));
+          __ str(result_register(), ContextOperand(cp, slot->index()));
           int offset = Context::SlotOffset(slot->index());
           // We know that we have written a function, which is not a smi.
           __ mov(r1, Operand(cp));
@@ -746,11 +744,10 @@
   __ bind(&done_convert);
   __ push(r0);
 
-  // TODO(kasperl): Check cache validity in generated code. This is a
-  // fast case for the JSObject::IsSimpleEnum cache validity
-  // checks. If we cannot guarantee cache validity, call the runtime
-  // system to check cache validity or get the property names in a
-  // fixed array.
+  // BUG(867): Check cache validity in generated code. This is a fast
+  // case for the JSObject::IsSimpleEnum cache validity checks. If we
+  // cannot guarantee cache validity, call the runtime system to check
+  // cache validity or get the property names in a fixed array.
 
   // Get the set of properties to enumerate.
   __ push(r0);  // Duplicate the enumerable object on the stack.
@@ -881,6 +878,150 @@
 }
 
 
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
+    Slot* slot,
+    Label* slow) {
+  ASSERT(slot->type() == Slot::CONTEXT);
+  Register current = cp;
+  Register next = r3;
+  Register temp = r4;
+
+  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+        __ tst(temp, temp);
+        __ b(ne, slow);
+      }
+      __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
+      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering cp.
+      current = next;
+    }
+  }
+  // Check that last extension is NULL.
+  __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+  __ tst(temp, temp);
+  __ b(ne, slow);
+  __ ldr(temp, ContextOperand(current, Context::FCONTEXT_INDEX));
+  return ContextOperand(temp, slot->index());
+}
+
+
+void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow,
+    Label* done) {
+  // Generate fast-case code for variables that might be shadowed by
+  // eval-introduced variables.  Eval is used a lot without
+  // introducing variables.  In those cases, we do not want to
+  // perform a runtime call for all variables in the scope
+  // containing the eval.
+  if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
+    EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+    __ jmp(done);
+  } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
+    Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
+    Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
+    if (potential_slot != NULL) {
+      // Generate fast case for locals that rewrite to slots.
+      __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
+      if (potential_slot->var()->mode() == Variable::CONST) {
+        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+        __ cmp(r0, ip);
+        __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+      }
+      __ jmp(done);
+    } else if (rewrite != NULL) {
+      // Generate fast case for calls of an argument function.
+      Property* property = rewrite->AsProperty();
+      if (property != NULL) {
+        VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+        Literal* key_literal = property->key()->AsLiteral();
+        if (obj_proxy != NULL &&
+            key_literal != NULL &&
+            obj_proxy->IsArguments() &&
+            key_literal->handle()->IsSmi()) {
+          // Load arguments object if there are no eval-introduced
+          // variables. Then load the argument from the arguments
+          // object using keyed load.
+          __ ldr(r1,
+                 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
+                                                   slow));
+          __ mov(r0, Operand(key_literal->handle()));
+          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          __ Call(ic, RelocInfo::CODE_TARGET);
+          __ jmp(done);
+        }
+      }
+    }
+  }
+}
+
+
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow) {
+  Register current = cp;
+  Register next = r1;
+  Register temp = r2;
+
+  Scope* s = scope();
+  while (s != NULL) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+        __ tst(temp, temp);
+        __ b(ne, slow);
+      }
+      // Load next context in chain.
+      __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
+      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering cp.
+      current = next;
+    }
+    // If no outer scope calls eval, we do not need to check more
+    // context extensions.
+    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    s = s->outer_scope();
+  }
+
+  if (s->is_eval_scope()) {
+    Label loop, fast;
+    if (!current.is(next)) {
+      __ Move(next, current);
+    }
+    __ bind(&loop);
+    // Terminate at global context.
+    __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
+    __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+    __ cmp(temp, ip);
+    __ b(eq, &fast);
+    // Check that extension is NULL.
+    __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
+    __ tst(temp, temp);
+    __ b(ne, slow);
+    // Load next context in chain.
+    __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
+    __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+    __ b(&loop);
+    __ bind(&fast);
+  }
+
+  __ ldr(r0, CodeGenerator::GlobalObject());
+  __ mov(r2, Operand(slot->var()->name()));
+  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+      ? RelocInfo::CODE_TARGET
+      : RelocInfo::CODE_TARGET_CONTEXT;
+  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  __ Call(ic, mode);
+}
+
+
 void FullCodeGenerator::EmitVariableLoad(Variable* var,
                                          Expression::Context context) {
   // Four cases: non-this global variables, lookup slots, all other
@@ -900,10 +1041,19 @@
     Apply(context, r0);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     Comment cmnt(masm_, "Lookup slot");
     __ mov(r1, Operand(var->name()));
     __ Push(cp, r1);  // Context and name.
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
+    __ bind(&done);
+
     Apply(context, r0);
 
   } else if (slot != NULL) {
@@ -913,14 +1063,11 @@
     if (var->mode() == Variable::CONST) {
        // Constants may be the hole value if they have not been initialized.
        // Unhole them.
-       Label done;
        MemOperand slot_operand = EmitSlotSearch(slot, r0);
        __ ldr(r0, slot_operand);
        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
        __ cmp(r0, ip);
-       __ b(ne, &done);
-       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
-       __ bind(&done);
+       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
        Apply(context, r0);
      } else {
        Apply(context, slot);
@@ -1647,15 +1794,41 @@
     EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
   } else if (var != NULL && var->slot() != NULL &&
              var->slot()->type() == Slot::LOOKUP) {
-    // Call to a lookup slot (dynamically introduced variable).  Call the
-    // runtime to find the function to call (returned in eax) and the object
-    // holding it (returned in edx).
+    // Call to a lookup slot (dynamically introduced variable).
+    Label slow, done;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(var->slot(),
+                                    NOT_INSIDE_TYPEOF,
+                                    &slow,
+                                    &done);
+
+    __ bind(&slow);
+    // Call the runtime to find the function to call (returned in eax)
+    // and the object holding it (returned in edx).
     __ push(context_register());
     __ mov(r2, Operand(var->name()));
     __ push(r2);
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
-    __ push(r0);  // Function.
-    __ push(r1);  // Receiver.
+    __ Push(r0, r1);  // Function, receiver.
+
+    // If fast case code has been generated, emit code to push the
+    // function and receiver and have the slow path jump around this
+    // code.
+    if (done.is_linked()) {
+      Label call;
+      __ b(&call);
+      __ bind(&done);
+      // Push function.
+      __ push(r0);
+      // Push global receiver.
+      __ ldr(r1, CodeGenerator::GlobalObject());
+      __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
+      __ push(r1);
+      __ bind(&call);
+    }
+
     EmitCallWithStub(expr);
   } else if (fun->AsProperty() != NULL) {
     // Call to an object property.
@@ -1678,12 +1851,9 @@
 
         Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
         __ Call(ic, RelocInfo::CODE_TARGET);
-        // Push result (function).
-        __ push(r0);
-        // Push Global receiver.
         __ ldr(r1, CodeGenerator::GlobalObject());
         __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
-        __ push(r1);
+        __ Push(r0, r1);  // Function, receiver.
         EmitCallWithStub(expr);
       } else {
         EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
@@ -2464,11 +2634,9 @@
 
   Register key = r0;
   Register cache = r1;
-  __ ldr(cache, CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX));
+  __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
   __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
-  __ ldr(cache,
-         CodeGenerator::ContextOperand(
-             cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
+  __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   __ ldr(cache,
          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
 
@@ -2929,9 +3097,19 @@
   } else if (proxy != NULL &&
              proxy->var()->slot() != NULL &&
              proxy->var()->slot()->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    Slot* slot = proxy->var()->slot();
+    EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     __ mov(r0, Operand(proxy->name()));
     __ Push(cp, r0);
     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+    __ bind(&done);
+
     if (where == kStack) __ push(r0);
   } else {
     // This expression cannot throw a reference error at the top level.
@@ -3187,7 +3365,7 @@
 
 
 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
-  __ ldr(dst, CodeGenerator::ContextOperand(cp, context_index));
+  __ ldr(dst, ContextOperand(cp, context_index));
 }
 
 
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 344cb6f..0da5f64 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -1220,6 +1220,62 @@
 }
 
 
+void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
+                                                   JSObject* holder,
+                                                   String* name,
+                                                   Label* miss) {
+  ASSERT(holder->IsGlobalObject());
+
+  // Get the number of arguments.
+  const int argc = arguments().immediate();
+
+  // Get the receiver from the stack.
+  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
+
+  // If the object is the holder then we know that it's a global
+  // object which can only happen for contextual calls. In this case,
+  // the receiver cannot be a smi.
+  if (object != holder) {
+    __ tst(r0, Operand(kSmiTagMask));
+    __ b(eq, miss);
+  }
+
+  // Check that the maps haven't changed.
+  CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
+}
+
+
+void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
+                                                    JSFunction* function,
+                                                    Label* miss) {
+  // Get the value from the cell.
+  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
+  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
+
+  // Check that the cell contains the same function.
+  if (Heap::InNewSpace(function)) {
+    // We can't embed a pointer to a function in new space so we have
+    // to verify that the shared function info is unchanged. This has
+    // the nice side effect that multiple closures based on the same
+    // function can all use this call IC. Before we load through the
+    // function, we have to verify that it still is a function.
+    __ tst(r1, Operand(kSmiTagMask));
+    __ b(eq, miss);
+    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
+    __ b(ne, miss);
+
+    // Check the shared function info. Make sure it hasn't changed.
+    __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
+    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+    __ cmp(r4, r3);
+    __ b(ne, miss);
+  } else {
+    __ cmp(r1, Operand(Handle<JSFunction>(function)));
+    __ b(ne, miss);
+  }
+}
+
+
 Object* CallStubCompiler::GenerateMissBranch() {
   Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
   if (obj->IsFailure()) return obj;
@@ -1266,21 +1322,18 @@
 
 Object* CallStubCompiler::CompileArrayPushCall(Object* object,
                                                JSObject* holder,
+                                               JSGlobalPropertyCell* cell,
                                                JSFunction* function,
-                                               String* name,
-                                               CheckType check) {
+                                               String* name) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
-
   // TODO(639): faster implementation.
-  ASSERT(check == RECEIVER_MAP_CHECK);
+
+  // If object is not an array, bail out to regular call.
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss;
 
@@ -1313,21 +1366,18 @@
 
 Object* CallStubCompiler::CompileArrayPopCall(Object* object,
                                               JSObject* holder,
+                                              JSGlobalPropertyCell* cell,
                                               JSFunction* function,
-                                              String* name,
-                                              CheckType check) {
+                                              String* name) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
-
   // TODO(642): faster implementation.
-  ASSERT(check == RECEIVER_MAP_CHECK);
+
+  // If object is not an array, bail out to regular call.
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss;
 
@@ -1358,11 +1408,12 @@
 }
 
 
-Object* CallStubCompiler::CompileStringCharCodeAtCall(Object* object,
-                                                      JSObject* holder,
-                                                      JSFunction* function,
-                                                      String* name,
-                                                      CheckType check) {
+Object* CallStubCompiler::CompileStringCharCodeAtCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
   // ----------- S t a t e -------------
   //  -- r2                     : function name
   //  -- lr                     : return address
@@ -1372,7 +1423,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1430,9 +1481,9 @@
 
 Object* CallStubCompiler::CompileStringCharAtCall(Object* object,
                                                   JSObject* holder,
+                                                  JSGlobalPropertyCell* cell,
                                                   JSFunction* function,
-                                                  String* name,
-                                                  CheckType check) {
+                                                  String* name) {
   // ----------- S t a t e -------------
   //  -- r2                     : function name
   //  -- lr                     : return address
@@ -1442,7 +1493,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1501,6 +1552,80 @@
 }
 
 
+Object* CallStubCompiler::CompileStringFromCharCodeCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
+  // ----------- S t a t e -------------
+  //  -- r2                     : function name
+  //  -- lr                     : return address
+  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
+  //  -- ...
+  //  -- sp[argc * 4]           : receiver
+  // -----------------------------------
+
+  const int argc = arguments().immediate();
+
+  // If the object is not a JSObject or we got an unexpected number of
+  // arguments, bail out to the regular call.
+  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+
+  Label miss;
+  GenerateNameCheck(name, &miss);
+
+  if (cell == NULL) {
+    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
+
+    STATIC_ASSERT(kSmiTag == 0);
+    __ tst(r1, Operand(kSmiTagMask));
+    __ b(eq, &miss);
+
+    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
+                    &miss);
+  } else {
+    ASSERT(cell->value() == function);
+    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
+    GenerateLoadFunctionFromCell(cell, function, &miss);
+  }
+
+  // Load the char code argument.
+  Register code = r1;
+  __ ldr(code, MemOperand(sp, 0 * kPointerSize));
+
+  // Check the code is a smi.
+  Label slow;
+  STATIC_ASSERT(kSmiTag == 0);
+  __ tst(code, Operand(kSmiTagMask));
+  __ b(ne, &slow);
+
+  // Convert the smi code to uint16.
+  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
+
+  StringCharFromCodeGenerator char_from_code_generator(code, r0);
+  char_from_code_generator.GenerateFast(masm());
+  __ Drop(argc + 1);
+  __ Ret();
+
+  ICRuntimeCallHelper call_helper;
+  char_from_code_generator.GenerateSlow(masm(), call_helper);
+
+  // Tail call the full function. We do not have to patch the receiver
+  // because the function makes no use of it.
+  __ bind(&slow);
+  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
+
+  __ bind(&miss);
+  // r2: function name.
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
+
+  // Return the generated code.
+  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+}
+
+
 Object* CallStubCompiler::CompileCallConstant(Object* object,
                                               JSObject* holder,
                                               JSFunction* function,
@@ -1513,8 +1638,8 @@
   SharedFunctionInfo* function_info = function->shared();
   if (function_info->HasCustomCallGenerator()) {
     const int id = function_info->custom_call_generator_id();
-    Object* result =
-        CompileCustomCall(id, object, holder, function, name, check);
+    Object* result = CompileCustomCall(
+        id, object, holder, NULL, function, name);
     // undefined means bail out to regular compiler.
     if (!result->IsUndefined()) {
       return result;
@@ -1714,6 +1839,16 @@
   //  -- r2    : name
   //  -- lr    : return address
   // -----------------------------------
+
+  SharedFunctionInfo* function_info = function->shared();
+  if (function_info->HasCustomCallGenerator()) {
+    const int id = function_info->custom_call_generator_id();
+    Object* result = CompileCustomCall(
+        id, object, holder, cell, function, name);
+    // undefined means bail out to regular compiler.
+    if (!result->IsUndefined()) return result;
+  }
+
   Label miss;
 
   GenerateNameCheck(name, &miss);
@@ -1721,45 +1856,9 @@
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  // Get the receiver from the stack.
-  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
+  GenerateGlobalReceiverCheck(object, holder, name, &miss);
 
-  // If the object is the holder then we know that it's a global
-  // object which can only happen for contextual calls. In this case,
-  // the receiver cannot be a smi.
-  if (object != holder) {
-    __ tst(r0, Operand(kSmiTagMask));
-    __ b(eq, &miss);
-  }
-
-  // Check that the maps haven't changed.
-  CheckPrototypes(object, r0, holder, r3, r1, r4, name, &miss);
-
-  // Get the value from the cell.
-  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
-  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
-
-  // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
-    // We can't embed a pointer to a function in new space so we have
-    // to verify that the shared function info is unchanged. This has
-    // the nice side effect that multiple closures based on the same
-    // function can all use this call IC. Before we load through the
-    // function, we have to verify that it still is a function.
-    __ tst(r1, Operand(kSmiTagMask));
-    __ b(eq, &miss);
-    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
-    __ b(ne, &miss);
-
-    // Check the shared function info. Make sure it hasn't changed.
-    __ mov(r3, Operand(Handle<SharedFunctionInfo>(function->shared())));
-    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
-    __ cmp(r4, r3);
-    __ b(ne, &miss);
-  } else {
-    __ cmp(r1, Operand(Handle<JSFunction>(function)));
-    __ b(ne, &miss);
-  }
+  GenerateLoadFunctionFromCell(cell, function, &miss);
 
   // Patch the receiver on the stack with the global proxy if
   // necessary.
diff --git a/src/array.js b/src/array.js
index e12df64..b2ebece 100644
--- a/src/array.js
+++ b/src/array.js
@@ -957,14 +957,41 @@
     // If index is still negative, search the entire array.
     if (index < 0) index = 0;
   }
+  var min = index;
+  var max = length;
+  if (UseSparseVariant(this, length, true)) {
+    var intervals = %GetArrayKeys(this, length);
+    if (intervals.length == 2 && intervals[0] < 0) {
+      // A single interval.
+      var intervalMin = -(intervals[0] + 1);
+      var intervalMax = intervalMin + intervals[1];
+      min = MAX(min, intervalMin);
+      max = intervalMax;  // Capped by length already.
+      // Fall through to loop below.
+    } else {
+      if (intervals.length == 0) return -1;
+      // Get all the keys in sorted order.
+      var sortedKeys = GetSortedArrayKeys(this, intervals);
+      var n = sortedKeys.length;
+      var i = 0;
+      while (i < n && sortedKeys[i] < index) i++;
+      while (i < n) {
+        var key = sortedKeys[i];
+        if (!IS_UNDEFINED(key) && this[key] === element) return key;
+        i++;
+      }
+      return -1;
+    }
+  }
   // Lookup through the array.
   if (!IS_UNDEFINED(element)) {
-    for (var i = index; i < length; i++) {
+    for (var i = min; i < max; i++) {
       if (this[i] === element) return i;
     }
     return -1;
   }
-  for (var i = index; i < length; i++) {
+  // Lookup through the array.
+  for (var i = min; i < max; i++) {
     if (IS_UNDEFINED(this[i]) && i in this) {
       return i;
     }
@@ -981,19 +1008,43 @@
   } else {
     index = TO_INTEGER(index);
     // If index is negative, index from end of the array.
-    if (index < 0) index = length + index;
+    if (index < 0) index += length;
     // If index is still negative, do not search the array.
-    if (index < 0) index = -1;
+    if (index < 0) return -1;
     else if (index >= length) index = length - 1;
   }
+  var min = 0;
+  var max = index;
+  if (UseSparseVariant(this, length, true)) {
+    var intervals = %GetArrayKeys(this, index + 1);
+    if (intervals.length == 2 && intervals[0] < 0) {
+      // A single interval.
+      var intervalMin = -(intervals[0] + 1);
+      var intervalMax = intervalMin + intervals[1];
+      min = MAX(min, intervalMin);
+      max = intervalMax;  // Capped by index already.
+      // Fall through to loop below.
+    } else {
+      if (intervals.length == 0) return -1;
+      // Get all the keys in sorted order.
+      var sortedKeys = GetSortedArrayKeys(this, intervals);
+      var i = sortedKeys.length - 1;
+      while (i >= 0) {
+        var key = sortedKeys[i];
+        if (!IS_UNDEFINED(key) && this[key] === element) return key;
+        i--;
+      }
+      return -1;
+    }
+  }
   // Lookup through the array.
   if (!IS_UNDEFINED(element)) {
-    for (var i = index; i >= 0; i--) {
+    for (var i = max; i >= min; i--) {
       if (this[i] === element) return i;
     }
     return -1;
   }
-  for (var i = index; i >= 0; i--) {
+  for (var i = max; i >= min; i--) {
     if (IS_UNDEFINED(this[i]) && i in this) {
       return i;
     }
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index a82d1d6..6e6c2c6 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -1344,23 +1344,33 @@
 }
 
 
-static void InstallCustomCallGenerator(Handle<JSFunction> holder_function,
-                                       const char* function_name,
-                                       int id) {
-  Handle<JSObject> proto(JSObject::cast(holder_function->instance_prototype()));
+static void InstallCustomCallGenerator(
+    Handle<JSFunction> holder_function,
+    CallStubCompiler::CustomGeneratorOwner owner_flag,
+    const char* function_name,
+    int id) {
+  Handle<JSObject> owner;
+  if (owner_flag == CallStubCompiler::FUNCTION) {
+    owner = Handle<JSObject>::cast(holder_function);
+  } else {
+    ASSERT(owner_flag == CallStubCompiler::INSTANCE_PROTOTYPE);
+    owner = Handle<JSObject>(
+        JSObject::cast(holder_function->instance_prototype()));
+  }
   Handle<String> name = Factory::LookupAsciiSymbol(function_name);
-  Handle<JSFunction> function(JSFunction::cast(proto->GetProperty(*name)));
+  Handle<JSFunction> function(JSFunction::cast(owner->GetProperty(*name)));
   function->shared()->set_function_data(Smi::FromInt(id));
 }
 
 
 void Genesis::InstallCustomCallGenerators() {
   HandleScope scope;
-#define INSTALL_CALL_GENERATOR(holder_fun, fun_name, name)                \
+#define INSTALL_CALL_GENERATOR(holder_fun, owner_flag, fun_name, name)    \
   {                                                                       \
     Handle<JSFunction> holder(global_context()->holder_fun##_function()); \
     const int id = CallStubCompiler::k##name##CallGenerator;              \
-    InstallCustomCallGenerator(holder, #fun_name, id);                    \
+    InstallCustomCallGenerator(holder, CallStubCompiler::owner_flag,      \
+                               #fun_name, id);                            \
   }
   CUSTOM_CALL_IC_GENERATORS(INSTALL_CALL_GENERATOR)
 #undef INSTALL_CALL_GENERATOR
diff --git a/src/compiler.cc b/src/compiler.cc
index bf6d41d..f65f941 100755
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -269,10 +269,19 @@
   }
 
   if (result.is_null()) {
-    // No cache entry found. Do pre-parsing and compile the script.
+    // No cache entry found. Do pre-parsing, if it makes sense, and compile
+    // the script.
+    // Building preparse data that is only used immediately after is only a
+    // saving if we might skip building the AST for lazily compiled functions.
+    // I.e., preparse data isn't relevant when the lazy flag is off, and
+    // for small sources, odds are that there aren't many functions
+    // that would be compiled lazily anyway, so we skip the preparse step
+    // in that case too.
     ScriptDataImpl* pre_data = input_pre_data;
-    if (pre_data == NULL && source_length >= FLAG_min_preparse_length) {
-      pre_data = PreParse(source, NULL, extension);
+    if (pre_data == NULL
+        && FLAG_lazy
+        && source_length >= FLAG_min_preparse_length) {
+      pre_data = PartialPreParse(source, NULL, extension);
     }
 
     // Create a script object describing the script to be compiled.
diff --git a/src/dateparser-inl.h b/src/dateparser-inl.h
index be353a3..e52cc94 100644
--- a/src/dateparser-inl.h
+++ b/src/dateparser-inl.h
@@ -65,8 +65,10 @@
         tz.SetAbsoluteMinute(n);
       } else if (time.IsExpecting(n)) {
         time.AddFinal(n);
-        // Require end, white space or Z immediately after finalizing time.
-        if (!in.IsEnd() && !in.SkipWhiteSpace() && !in.Is('Z')) return false;
+        // Require end, white space, "Z", "+" or "-" immediately after
+        // finalizing time.
+        if (!in.IsEnd() && !in.SkipWhiteSpace() && !in.Is('Z') &&
+            !in.IsAsciiSign()) return false;
       } else {
         if (!day.Add(n)) return false;
         in.Skip('-');  // Ignore suffix '-' for year, month, or day.
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 5ffebfb..7de4a00 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -298,6 +298,11 @@
 }
 
 
+MemOperand FullCodeGenerator::ContextOperand(Register context, int index) {
+  return CodeGenerator::ContextOperand(context, index);
+}
+
+
 int FullCodeGenerator::SlotOffset(Slot* slot) {
   ASSERT(slot != NULL);
   // Offset is negative because higher indexes are at lower addresses.
diff --git a/src/full-codegen.h b/src/full-codegen.h
index 840c825..ab0fd36 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -381,6 +381,14 @@
 #undef EMIT_INLINE_RUNTIME_CALL
 
   // Platform-specific code for loading variables.
+  void EmitLoadGlobalSlotCheckExtensions(Slot* slot,
+                                         TypeofState typeof_state,
+                                         Label* slow);
+  MemOperand ContextSlotOperandCheckExtensions(Slot* slot, Label* slow);
+  void EmitDynamicLoadFromSlotFastCase(Slot* slot,
+                                       TypeofState typeof_state,
+                                       Label* slow,
+                                       Label* done);
   void EmitVariableLoad(Variable* expr, Expression::Context context);
 
   // Platform-specific support for allocating a new closure based on
@@ -500,6 +508,9 @@
   // in v8::internal::Context.
   void LoadContextField(Register dst, int context_index);
 
+  // Create an operand for a context field.
+  MemOperand ContextOperand(Register context, int context_index);
+
   // AST node visit functions.
 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
   AST_NODE_LIST(DECLARE_VISIT)
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 2565acb..eef307d 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -860,9 +860,14 @@
 
 
 void Assembler::and_(Register dst, int32_t imm32) {
+  and_(dst, Immediate(imm32));
+}
+
+
+void Assembler::and_(Register dst, const Immediate& x) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
-  emit_arith(4, Operand(dst), Immediate(imm32));
+  emit_arith(4, Operand(dst), x);
 }
 
 
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index 8a5a4c5..928f172 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -577,6 +577,7 @@
   void add(const Operand& dst, const Immediate& x);
 
   void and_(Register dst, int32_t imm32);
+  void and_(Register dst, const Immediate& x);
   void and_(Register dst, const Operand& src);
   void and_(const Operand& src, Register dst);
   void and_(const Operand& dst, const Immediate& x);
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 1631b04..3d1653c 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -514,7 +514,7 @@
       int context_chain_length =
           scope()->ContextChainLength(slot->var()->scope());
       __ LoadContext(scratch, context_chain_length);
-      return CodeGenerator::ContextOperand(scratch, slot->index());
+      return ContextOperand(scratch, slot->index());
     }
     case Slot::LOOKUP:
       UNREACHABLE();
@@ -574,19 +574,17 @@
         ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
         if (FLAG_debug_code) {
           // Check if we have the correct context pointer.
-          __ mov(ebx,
-                 CodeGenerator::ContextOperand(esi, Context::FCONTEXT_INDEX));
+          __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX));
           __ cmp(ebx, Operand(esi));
           __ Check(equal, "Unexpected declaration in current context.");
         }
         if (mode == Variable::CONST) {
-          __ mov(CodeGenerator::ContextOperand(esi, slot->index()),
+          __ mov(ContextOperand(esi, slot->index()),
                  Immediate(Factory::the_hole_value()));
           // No write barrier since the hole value is in old space.
         } else if (function != NULL) {
           VisitForValue(function, kAccumulator);
-          __ mov(CodeGenerator::ContextOperand(esi, slot->index()),
-                 result_register());
+          __ mov(ContextOperand(esi, slot->index()), result_register());
           int offset = Context::SlotOffset(slot->index());
           __ mov(ebx, esi);
           __ RecordWrite(ebx, offset, result_register(), ecx);
@@ -758,13 +756,57 @@
   __ bind(&done_convert);
   __ push(eax);
 
-  // TODO(kasperl): Check cache validity in generated code. This is a
-  // fast case for the JSObject::IsSimpleEnum cache validity
-  // checks. If we cannot guarantee cache validity, call the runtime
-  // system to check cache validity or get the property names in a
-  // fixed array.
+  // Check cache validity in generated code. This is a fast case for
+  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
+  // guarantee cache validity, call the runtime system to check cache
+  // validity or get the property names in a fixed array.
+  Label next, call_runtime;
+  __ mov(ecx, eax);
+  __ bind(&next);
+
+  // Check that there are no elements.  Register ecx contains the
+  // current JS object we've reached through the prototype chain.
+  __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
+         Factory::empty_fixed_array());
+  __ j(not_equal, &call_runtime);
+
+  // Check that instance descriptors are not empty so that we can
+  // check for an enum cache.  Leave the map in ebx for the subsequent
+  // prototype load.
+  __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
+  __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
+  __ cmp(edx, Factory::empty_descriptor_array());
+  __ j(equal, &call_runtime);
+
+  // Check that there in an enum cache in the non-empty instance
+  // descriptors (edx).  This is the case if the next enumeration
+  // index field does not contain a smi.
+  __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
+  __ test(edx, Immediate(kSmiTagMask));
+  __ j(zero, &call_runtime);
+
+  // For all objects but the receiver, check that the cache is empty.
+  Label check_prototype;
+  __ cmp(ecx, Operand(eax));
+  __ j(equal, &check_prototype);
+  __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+  __ cmp(edx, Factory::empty_fixed_array());
+  __ j(not_equal, &call_runtime);
+
+  // Load the prototype from the map and loop if non-null.
+  __ bind(&check_prototype);
+  __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
+  __ cmp(ecx, Factory::null_value());
+  __ j(not_equal, &next);
+
+  // The enum cache is valid.  Load the map of the object being
+  // iterated over and use the cache for the iteration.
+  Label use_cache;
+  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
+  __ jmp(&use_cache);
 
   // Get the set of properties to enumerate.
+  __ bind(&call_runtime);
   __ push(eax);  // Duplicate the enumerable object on the stack.
   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
 
@@ -776,6 +818,7 @@
   __ j(not_equal, &fixed_array);
 
   // We got a map in register eax. Get the enumeration cache from it.
+  __ bind(&use_cache);
   __ mov(ecx, FieldOperand(eax, Map::kInstanceDescriptorsOffset));
   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
   __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
@@ -885,6 +928,151 @@
 }
 
 
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow) {
+  Register context = esi;
+  Register temp = edx;
+
+  Scope* s = scope();
+  while (s != NULL) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
+               Immediate(0));
+        __ j(not_equal, slow);
+      }
+      // Load next context in chain.
+      __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
+      __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering esi.
+      context = temp;
+    }
+    // If no outer scope calls eval, we do not need to check more
+    // context extensions.  If we have reached an eval scope, we check
+    // all extensions from this point.
+    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    s = s->outer_scope();
+  }
+
+  if (s != NULL && s->is_eval_scope()) {
+    // Loop up the context chain.  There is no frame effect so it is
+    // safe to use raw labels here.
+    Label next, fast;
+    if (!context.is(temp)) {
+      __ mov(temp, context);
+    }
+    __ bind(&next);
+    // Terminate at global context.
+    __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
+           Immediate(Factory::global_context_map()));
+    __ j(equal, &fast);
+    // Check that extension is NULL.
+    __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
+    __ j(not_equal, slow);
+    // Load next context in chain.
+    __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
+    __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
+    __ jmp(&next);
+    __ bind(&fast);
+  }
+
+  // All extension objects were empty and it is safe to use a global
+  // load IC call.
+  __ mov(eax, CodeGenerator::GlobalObject());
+  __ mov(ecx, slot->var()->name());
+  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+      ? RelocInfo::CODE_TARGET
+      : RelocInfo::CODE_TARGET_CONTEXT;
+  __ call(ic, mode);
+}
+
+
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
+    Slot* slot,
+    Label* slow) {
+  ASSERT(slot->type() == Slot::CONTEXT);
+  Register context = esi;
+  Register temp = ebx;
+
+  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
+               Immediate(0));
+        __ j(not_equal, slow);
+      }
+      __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
+      __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering esi.
+      context = temp;
+    }
+  }
+  // Check that last extension is NULL.
+  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
+  __ j(not_equal, slow);
+  __ mov(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
+  return ContextOperand(temp, slot->index());
+}
+
+
+void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow,
+    Label* done) {
+  // Generate fast-case code for variables that might be shadowed by
+  // eval-introduced variables.  Eval is used a lot without
+  // introducing variables.  In those cases, we do not want to
+  // perform a runtime call for all variables in the scope
+  // containing the eval.
+  if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
+    EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+    __ jmp(done);
+  } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
+    Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
+    Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
+    if (potential_slot != NULL) {
+      // Generate fast case for locals that rewrite to slots.
+      __ mov(eax,
+             ContextSlotOperandCheckExtensions(potential_slot, slow));
+      if (potential_slot->var()->mode() == Variable::CONST) {
+        __ cmp(eax, Factory::the_hole_value());
+        __ j(not_equal, done);
+        __ mov(eax, Factory::undefined_value());
+      }
+      __ jmp(done);
+    } else if (rewrite != NULL) {
+      // Generate fast case for calls of an argument function.
+      Property* property = rewrite->AsProperty();
+      if (property != NULL) {
+        VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+        Literal* key_literal = property->key()->AsLiteral();
+        if (obj_proxy != NULL &&
+            key_literal != NULL &&
+            obj_proxy->IsArguments() &&
+            key_literal->handle()->IsSmi()) {
+          // Load arguments object if there are no eval-introduced
+          // variables. Then load the argument from the arguments
+          // object using keyed load.
+          __ mov(edx,
+                 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
+                                                   slow));
+          __ mov(eax, Immediate(key_literal->handle()));
+          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          __ call(ic, RelocInfo::CODE_TARGET);
+          __ jmp(done);
+        }
+      }
+    }
+  }
+}
+
+
 void FullCodeGenerator::EmitVariableLoad(Variable* var,
                                          Expression::Context context) {
   // Four cases: non-this global variables, lookup slots, all other
@@ -909,10 +1097,19 @@
     Apply(context, eax);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     Comment cmnt(masm_, "Lookup slot");
     __ push(esi);  // Context.
     __ push(Immediate(var->name()));
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
+    __ bind(&done);
+
     Apply(context, eax);
 
   } else if (slot != NULL) {
@@ -1953,14 +2150,40 @@
     EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
   } else if (var != NULL && var->slot() != NULL &&
              var->slot()->type() == Slot::LOOKUP) {
-    // Call to a lookup slot (dynamically introduced variable).  Call the
-    // runtime to find the function to call (returned in eax) and the object
-    // holding it (returned in edx).
+    // Call to a lookup slot (dynamically introduced variable).
+    Label slow, done;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(var->slot(),
+                                    NOT_INSIDE_TYPEOF,
+                                    &slow,
+                                    &done);
+
+    __ bind(&slow);
+    // Call the runtime to find the function to call (returned in eax)
+    // and the object holding it (returned in edx).
     __ push(context_register());
     __ push(Immediate(var->name()));
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
     __ push(eax);  // Function.
     __ push(edx);  // Receiver.
+
+    // If fast case code has been generated, emit code to push the
+    // function and receiver and have the slow path jump around this
+    // code.
+    if (done.is_linked()) {
+      Label call;
+      __ jmp(&call);
+      __ bind(&done);
+      // Push function.
+      __ push(eax);
+      // Push global receiver.
+      __ mov(ebx, CodeGenerator::GlobalObject());
+      __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
+      __ bind(&call);
+    }
+
     EmitCallWithStub(expr);
   } else if (fun->AsProperty() != NULL) {
     // Call to an object property.
@@ -2781,12 +3004,10 @@
   Register key = eax;
   Register cache = ebx;
   Register tmp = ecx;
-  __ mov(cache, CodeGenerator::ContextOperand(esi, Context::GLOBAL_INDEX));
+  __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
   __ mov(cache,
          FieldOperand(cache, GlobalObject::kGlobalContextOffset));
-  __ mov(cache,
-         CodeGenerator::ContextOperand(
-             cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
+  __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   __ mov(cache,
          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
 
@@ -3266,9 +3487,19 @@
   } else if (proxy != NULL &&
              proxy->var()->slot() != NULL &&
              proxy->var()->slot()->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    Slot* slot = proxy->var()->slot();
+    EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     __ push(esi);
     __ push(Immediate(proxy->name()));
     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+    __ bind(&done);
+
     if (where == kStack) __ push(eax);
   } else {
     // This expression cannot throw a reference error at the top level.
@@ -3512,7 +3743,7 @@
 
 
 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
-  __ mov(dst, CodeGenerator::ContextOperand(esi, context_index));
+  __ mov(dst, ContextOperand(esi, context_index));
 }
 
 
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 7fc3f81..828e71a 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -1255,6 +1255,61 @@
 }
 
 
+void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
+                                                   JSObject* holder,
+                                                   String* name,
+                                                   Label* miss) {
+  ASSERT(holder->IsGlobalObject());
+
+  // Get the number of arguments.
+  const int argc = arguments().immediate();
+
+  // Get the receiver from the stack.
+  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
+
+  // If the object is the holder then we know that it's a global
+  // object which can only happen for contextual calls. In this case,
+  // the receiver cannot be a smi.
+  if (object != holder) {
+    __ test(edx, Immediate(kSmiTagMask));
+    __ j(zero, miss, not_taken);
+  }
+
+  // Check that the maps haven't changed.
+  CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
+}
+
+
+void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
+                                                    JSFunction* function,
+                                                    Label* miss) {
+  // Get the value from the cell.
+  __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+  __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
+
+  // Check that the cell contains the same function.
+  if (Heap::InNewSpace(function)) {
+    // We can't embed a pointer to a function in new space so we have
+    // to verify that the shared function info is unchanged. This has
+    // the nice side effect that multiple closures based on the same
+    // function can all use this call IC. Before we load through the
+    // function, we have to verify that it still is a function.
+    __ test(edi, Immediate(kSmiTagMask));
+    __ j(zero, miss, not_taken);
+    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
+    __ j(not_equal, miss, not_taken);
+
+    // Check the shared function info. Make sure it hasn't changed.
+    __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
+           Immediate(Handle<SharedFunctionInfo>(function->shared())));
+    __ j(not_equal, miss, not_taken);
+  } else {
+    __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
+    __ j(not_equal, miss, not_taken);
+  }
+}
+
+
 Object* CallStubCompiler::GenerateMissBranch() {
   Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
   if (obj->IsFailure()) return obj;
@@ -1320,9 +1375,9 @@
 
 Object* CallStubCompiler::CompileArrayPushCall(Object* object,
                                                JSObject* holder,
+                                               JSGlobalPropertyCell* cell,
                                                JSFunction* function,
-                                               String* name,
-                                               CheckType check) {
+                                               String* name) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -1330,12 +1385,9 @@
   //  -- ...
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
-  ASSERT(check == RECEIVER_MAP_CHECK);
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss;
 
@@ -1469,9 +1521,9 @@
 
 Object* CallStubCompiler::CompileArrayPopCall(Object* object,
                                               JSObject* holder,
+                                              JSGlobalPropertyCell* cell,
                                               JSFunction* function,
-                                              String* name,
-                                              CheckType check) {
+                                              String* name) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -1479,12 +1531,9 @@
   //  -- ...
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
-  ASSERT(check == RECEIVER_MAP_CHECK);
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss, return_undefined, call_builtin;
 
@@ -1551,11 +1600,12 @@
 }
 
 
-Object* CallStubCompiler::CompileStringCharCodeAtCall(Object* object,
-                                                      JSObject* holder,
-                                                      JSFunction* function,
-                                                      String* name,
-                                                      CheckType check) {
+Object* CallStubCompiler::CompileStringCharCodeAtCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
   // ----------- S t a t e -------------
   //  -- ecx                 : function name
   //  -- esp[0]              : return address
@@ -1565,7 +1615,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1621,9 +1671,9 @@
 
 Object* CallStubCompiler::CompileStringCharAtCall(Object* object,
                                                   JSObject* holder,
+                                                  JSGlobalPropertyCell* cell,
                                                   JSFunction* function,
-                                                  String* name,
-                                                  CheckType check) {
+                                                  String* name) {
   // ----------- S t a t e -------------
   //  -- ecx                 : function name
   //  -- esp[0]              : return address
@@ -1633,7 +1683,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1690,6 +1740,79 @@
 }
 
 
+Object* CallStubCompiler::CompileStringFromCharCodeCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
+  // ----------- S t a t e -------------
+  //  -- ecx                 : function name
+  //  -- esp[0]              : return address
+  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
+  //  -- ...
+  //  -- esp[(argc + 1) * 4] : receiver
+  // -----------------------------------
+
+  const int argc = arguments().immediate();
+
+  // If the object is not a JSObject or we got an unexpected number of
+  // arguments, bail out to the regular call.
+  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+
+  Label miss;
+  GenerateNameCheck(name, &miss);
+
+  if (cell == NULL) {
+    __ mov(edx, Operand(esp, 2 * kPointerSize));
+
+    STATIC_ASSERT(kSmiTag == 0);
+    __ test(edx, Immediate(kSmiTagMask));
+    __ j(zero, &miss);
+
+    CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
+                    &miss);
+  } else {
+    ASSERT(cell->value() == function);
+    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
+    GenerateLoadFunctionFromCell(cell, function, &miss);
+  }
+
+  // Load the char code argument.
+  Register code = ebx;
+  __ mov(code, Operand(esp, 1 * kPointerSize));
+
+  // Check the code is a smi.
+  Label slow;
+  STATIC_ASSERT(kSmiTag == 0);
+  __ test(code, Immediate(kSmiTagMask));
+  __ j(not_zero, &slow);
+
+  // Convert the smi code to uint16.
+  __ and_(code, Immediate(Smi::FromInt(0xffff)));
+
+  StringCharFromCodeGenerator char_from_code_generator(code, eax);
+  char_from_code_generator.GenerateFast(masm());
+  __ ret(2 * kPointerSize);
+
+  ICRuntimeCallHelper call_helper;
+  char_from_code_generator.GenerateSlow(masm(), call_helper);
+
+  // Tail call the full function. We do not have to patch the receiver
+  // because the function makes no use of it.
+  __ bind(&slow);
+  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
+
+  __ bind(&miss);
+  // ecx: function name.
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
+
+  // Return the generated code.
+  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+}
+
+
 Object* CallStubCompiler::CompileCallConstant(Object* object,
                                               JSObject* holder,
                                               JSFunction* function,
@@ -1706,12 +1829,10 @@
   SharedFunctionInfo* function_info = function->shared();
   if (function_info->HasCustomCallGenerator()) {
     const int id = function_info->custom_call_generator_id();
-    Object* result =
-        CompileCustomCall(id, object, holder, function, name, check);
+    Object* result = CompileCustomCall(
+        id, object, holder, NULL, function, name);
     // undefined means bail out to regular compiler.
-    if (!result->IsUndefined()) {
-      return result;
-    }
+    if (!result->IsUndefined()) return result;
   }
 
   Label miss_in_smi_check;
@@ -1922,6 +2043,16 @@
   //  -- ...
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
+
+  SharedFunctionInfo* function_info = function->shared();
+  if (function_info->HasCustomCallGenerator()) {
+    const int id = function_info->custom_call_generator_id();
+    Object* result = CompileCustomCall(
+        id, object, holder, cell, function, name);
+    // undefined means bail out to regular compiler.
+    if (!result->IsUndefined()) return result;
+  }
+
   Label miss;
 
   GenerateNameCheck(name, &miss);
@@ -1929,44 +2060,9 @@
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  // Get the receiver from the stack.
-  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
+  GenerateGlobalReceiverCheck(object, holder, name, &miss);
 
-  // If the object is the holder then we know that it's a global
-  // object which can only happen for contextual calls. In this case,
-  // the receiver cannot be a smi.
-  if (object != holder) {
-    __ test(edx, Immediate(kSmiTagMask));
-    __ j(zero, &miss, not_taken);
-  }
-
-  // Check that the maps haven't changed.
-  CheckPrototypes(object, edx, holder, ebx, eax, edi, name, &miss);
-
-  // Get the value from the cell.
-  __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
-  __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
-
-  // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
-    // We can't embed a pointer to a function in new space so we have
-    // to verify that the shared function info is unchanged. This has
-    // the nice side effect that multiple closures based on the same
-    // function can all use this call IC. Before we load through the
-    // function, we have to verify that it still is a function.
-    __ test(edi, Immediate(kSmiTagMask));
-    __ j(zero, &miss, not_taken);
-    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
-    __ j(not_equal, &miss, not_taken);
-
-    // Check the shared function info. Make sure it hasn't changed.
-    __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
-           Immediate(Handle<SharedFunctionInfo>(function->shared())));
-    __ j(not_equal, &miss, not_taken);
-  } else {
-    __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
-    __ j(not_equal, &miss, not_taken);
-  }
+  GenerateLoadFunctionFromCell(cell, function, &miss);
 
   // Patch the receiver on the stack with the global proxy.
   if (object->IsGlobalObject()) {
diff --git a/src/liveedit.cc b/src/liveedit.cc
index 5a8749e..41523a8 100644
--- a/src/liveedit.cc
+++ b/src/liveedit.cc
@@ -617,9 +617,33 @@
     current_parent_index_ = info.GetParentIndex();
   }
 
-// TODO(LiveEdit): Move private method below.
-//     This private section was created here to avoid moving the function
-//      to keep already complex diff simpler.
+ public:
+  // Saves only function code, because for a script function we
+  // may never create a SharedFunctionInfo object.
+  void FunctionCode(Handle<Code> function_code) {
+    FunctionInfoWrapper info =
+        FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
+    info.SetFunctionCode(function_code, Handle<Object>(Heap::null_value()));
+  }
+
+  // Saves full information about a function: its code, its scope info
+  // and a SharedFunctionInfo object.
+  void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope) {
+    if (!shared->IsSharedFunctionInfo()) {
+      return;
+    }
+    FunctionInfoWrapper info =
+        FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
+    info.SetFunctionCode(Handle<Code>(shared->code()),
+        Handle<Object>(shared->scope_info()));
+    info.SetSharedFunctionInfo(shared);
+
+    Handle<Object> scope_info_list(SerializeFunctionScope(scope));
+    info.SetOuterScopeInfo(scope_info_list);
+  }
+
+  Handle<JSArray> GetResult() { return result_; }
+
  private:
   Object* SerializeFunctionScope(Scope* scope) {
     HandleScope handle_scope;
@@ -676,36 +700,6 @@
     return *scope_info_list;
   }
 
- public:
-  // Saves only function code, because for a script function we
-  // may never create a SharedFunctionInfo object.
-  void FunctionCode(Handle<Code> function_code) {
-    FunctionInfoWrapper info =
-        FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
-    info.SetFunctionCode(function_code, Handle<Object>(Heap::null_value()));
-  }
-
-  // Saves full information about a function: its code, its scope info
-  // and a SharedFunctionInfo object.
-  void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope) {
-    if (!shared->IsSharedFunctionInfo()) {
-      return;
-    }
-    FunctionInfoWrapper info =
-        FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
-    info.SetFunctionCode(Handle<Code>(shared->code()),
-        Handle<Object>(shared->scope_info()));
-    info.SetSharedFunctionInfo(shared);
-
-    Handle<Object> scope_info_list(SerializeFunctionScope(scope));
-    info.SetOuterScopeInfo(scope_info_list);
-  }
-
-  Handle<JSArray> GetResult() {
-    return result_;
-  }
-
- private:
   Handle<JSArray> result_;
   int len_;
   int current_parent_index_;
diff --git a/src/parser.cc b/src/parser.cc
index 7667e89..11e2eb5 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -872,11 +872,14 @@
   // Records the occurrence of a function.
   virtual FunctionEntry LogFunction(int start) { return FunctionEntry(); }
   virtual void LogSymbol(int start, Vector<const char> symbol) {}
+  virtual void LogError() { }
   // Return the current position in the function entry log.
   virtual int function_position() { return 0; }
   virtual int symbol_position() { return 0; }
   virtual int symbol_ids() { return 0; }
-  virtual void LogError() { }
+  virtual Vector<unsigned> ExtractData() {
+    return Vector<unsigned>();
+  };
 };
 
 
@@ -889,9 +892,14 @@
 
   virtual Handle<String> LookupSymbol(int symbol_id,
                                       Vector<const char> string) {
-    // If there is no preparse data, we have no simpler way to identify similar
-    // symbols.
-    if (symbol_id < 0) return Factory::LookupSymbol(string);
+    // Length of symbol cache is the number of identified symbols.
+    // If we are larger than that, or negative, it's not a cached symbol.
+    // This might also happen if there is no preparser symbol data, even
+    // if there is some preparser data.
+    if (static_cast<unsigned>(symbol_id)
+        >= static_cast<unsigned>(symbol_cache_.length())) {
+      return Factory::LookupSymbol(string);
+    }
     return LookupCachedSymbol(symbol_id, string);
   }
 
@@ -933,10 +941,55 @@
 };
 
 
-class ParserRecorder: public ParserLog {
+// Record only functions.
+class PartialParserRecorder: public ParserLog {
  public:
-  ParserRecorder();
+  PartialParserRecorder();
   virtual FunctionEntry LogFunction(int start);
+
+  virtual int function_position() { return function_store_.size(); }
+
+  virtual void LogError() { }
+
+  virtual void LogMessage(Scanner::Location loc,
+                          const char* message,
+                          Vector<const char*> args);
+
+  virtual Vector<unsigned> ExtractData() {
+    int function_size = function_store_.size();
+    int total_size = ScriptDataImpl::kHeaderSize + function_size;
+    Vector<unsigned> data = Vector<unsigned>::New(total_size);
+    preamble_[ScriptDataImpl::kFunctionsSizeOffset] = function_size;
+    preamble_[ScriptDataImpl::kSymbolCountOffset] = 0;
+    memcpy(data.start(), preamble_, sizeof(preamble_));
+    int symbol_start = ScriptDataImpl::kHeaderSize + function_size;
+    if (function_size > 0) {
+      function_store_.WriteTo(data.SubVector(ScriptDataImpl::kHeaderSize,
+                                             symbol_start));
+    }
+    return data;
+  }
+
+ protected:
+  bool has_error() {
+    return static_cast<bool>(preamble_[ScriptDataImpl::kHasErrorOffset]);
+  }
+
+  void WriteString(Vector<const char> str);
+
+  Collector<unsigned> function_store_;
+  unsigned preamble_[ScriptDataImpl::kHeaderSize];
+#ifdef DEBUG
+  int prev_start;
+#endif
+};
+
+
+// Record both functions and symbols.
+class CompleteParserRecorder: public PartialParserRecorder {
+ public:
+  CompleteParserRecorder();
+
   virtual void LogSymbol(int start, Vector<const char> literal) {
     int hash = vector_hash(literal);
     HashMap::Entry* entry = symbol_table_.Lookup(&literal, hash, true);
@@ -953,11 +1006,8 @@
       symbol_store_.Add(id - 1);
     }
   }
-  virtual void LogError() { }
-  virtual void LogMessage(Scanner::Location loc,
-                          const char* message,
-                          Vector<const char*> args);
-  Vector<unsigned> ExtractData() {
+
+  virtual Vector<unsigned> ExtractData() {
     int function_size = function_store_.size();
     int symbol_size = symbol_store_.size();
     int total_size = ScriptDataImpl::kHeaderSize + function_size + symbol_size;
@@ -976,11 +1026,9 @@
     return data;
   }
 
-  virtual int function_position() { return function_store_.size(); }
   virtual int symbol_position() { return symbol_store_.size(); }
   virtual int symbol_ids() { return symbol_id_; }
  private:
-  Collector<unsigned> function_store_;
   Collector<unsigned> symbol_store_;
   Collector<Vector<const char> > symbol_entries_;
   HashMap symbol_table_;
@@ -1004,16 +1052,6 @@
     if (string2->length() != length) return false;
     return memcmp(string1->start(), string2->start(), length) == 0;
   }
-
-  unsigned preamble_[ScriptDataImpl::kHeaderSize];
-#ifdef DEBUG
-  int prev_start;
-#endif
-
-  bool has_error() {
-    return static_cast<bool>(preamble_[ScriptDataImpl::kHasErrorOffset]);
-  }
-  void WriteString(Vector<const char> str);
 };
 
 
@@ -1088,15 +1126,8 @@
 }
 
 
-ParserRecorder::ParserRecorder()
-  : function_store_(0),
-    symbol_store_(0),
-    symbol_entries_(0),
-    symbol_table_(vector_compare),
-    symbol_id_(0) {
-#ifdef DEBUG
-  prev_start = -1;
-#endif
+
+PartialParserRecorder::PartialParserRecorder() : function_store_(0) {
   preamble_[ScriptDataImpl::kMagicOffset] = ScriptDataImpl::kMagicNumber;
   preamble_[ScriptDataImpl::kVersionOffset] = ScriptDataImpl::kCurrentVersion;
   preamble_[ScriptDataImpl::kHasErrorOffset] = false;
@@ -1104,10 +1135,22 @@
   preamble_[ScriptDataImpl::kSymbolCountOffset] = 0;
   preamble_[ScriptDataImpl::kSizeOffset] = 0;
   ASSERT_EQ(6, ScriptDataImpl::kHeaderSize);
+#ifdef DEBUG
+  prev_start = -1;
+#endif
 }
 
 
-void ParserRecorder::WriteString(Vector<const char> str) {
+CompleteParserRecorder::CompleteParserRecorder()
+    : PartialParserRecorder(),
+      symbol_store_(0),
+      symbol_entries_(0),
+      symbol_table_(vector_compare),
+      symbol_id_(0) {
+}
+
+
+void PartialParserRecorder::WriteString(Vector<const char> str) {
   function_store_.Add(str.length());
   for (int i = 0; i < str.length(); i++) {
     function_store_.Add(str[i]);
@@ -1127,8 +1170,9 @@
 }
 
 
-void ParserRecorder::LogMessage(Scanner::Location loc, const char* message,
-                                Vector<const char*> args) {
+void PartialParserRecorder::LogMessage(Scanner::Location loc,
+                                      const char* message,
+                                      Vector<const char*> args) {
   if (has_error()) return;
   preamble_[ScriptDataImpl::kHasErrorOffset] = true;
   function_store_.Reset();
@@ -1183,7 +1227,7 @@
 }
 
 
-FunctionEntry ParserRecorder::LogFunction(int start) {
+FunctionEntry PartialParserRecorder::LogFunction(int start) {
 #ifdef DEBUG
   ASSERT(start > prev_start);
   prev_start = start;
@@ -1206,7 +1250,7 @@
                factory(),
                log(),
                pre_data),
-        factory_(pre_data ? pre_data->symbol_count() : 16) { }
+        factory_(pre_data ? pre_data->symbol_count() : 0) { }
   virtual void ReportMessageAt(Scanner::Location loc, const char* message,
                                Vector<const char*> args);
   virtual VariableProxy* Declare(Handle<String> name, Variable::Mode mode,
@@ -1223,23 +1267,44 @@
 class PreParser : public Parser {
  public:
   PreParser(Handle<Script> script, bool allow_natives_syntax,
-            v8::Extension* extension)
+            v8::Extension* extension, ParserLog* recorder)
       : Parser(script, allow_natives_syntax, extension, PREPARSE,
-               factory(), recorder(), NULL),
+               factory(), recorder, NULL),
         factory_(true) { }
   virtual void ReportMessageAt(Scanner::Location loc, const char* message,
                                Vector<const char*> args);
   virtual VariableProxy* Declare(Handle<String> name, Variable::Mode mode,
                                  FunctionLiteral* fun, bool resolve, bool* ok);
   ParserFactory* factory() { return &factory_; }
-  ParserRecorder* recorder() { return &recorder_; }
+  virtual PartialParserRecorder* recorder() = 0;
 
  private:
-  ParserRecorder recorder_;
   ParserFactory factory_;
 };
 
 
+class CompletePreParser : public PreParser {
+ public:
+  CompletePreParser(Handle<Script> script, bool allow_natives_syntax,
+                    v8::Extension* extension)
+      : PreParser(script, allow_natives_syntax, extension, &recorder_) { }
+  virtual PartialParserRecorder* recorder() { return &recorder_; }
+ private:
+  CompleteParserRecorder recorder_;
+};
+
+
+class PartialPreParser : public PreParser {
+ public:
+  PartialPreParser(Handle<Script> script, bool allow_natives_syntax,
+                   v8::Extension* extension)
+      : PreParser(script, allow_natives_syntax, extension, &recorder_) { }
+  virtual PartialParserRecorder* recorder() { return &recorder_; }
+ private:
+  PartialParserRecorder recorder_;
+};
+
+
 Scope* AstBuildingParserFactory::NewScope(Scope* parent, Scope::Type type,
                                           bool inside_with) {
   Scope* result = new Scope(parent, type);
@@ -5413,6 +5478,25 @@
 }
 
 
+// Preparse, but only collect data that is immediately useful,
+// even if the preparser data is only used once.
+ScriptDataImpl* PartialPreParse(Handle<String> source,
+                                unibrow::CharacterStream* stream,
+                                v8::Extension* extension) {
+  Handle<Script> no_script;
+  bool allow_natives_syntax =
+      always_allow_natives_syntax ||
+      FLAG_allow_natives_syntax ||
+      Bootstrapper::IsActive();
+  PartialPreParser parser(no_script, allow_natives_syntax, extension);
+  if (!parser.PreParseProgram(source, stream)) return NULL;
+  // Extract the accumulated data from the recorder as a single
+  // contiguous vector that we are responsible for disposing.
+  Vector<unsigned> store = parser.recorder()->ExtractData();
+  return new ScriptDataImpl(store);
+}
+
+
 ScriptDataImpl* PreParse(Handle<String> source,
                          unibrow::CharacterStream* stream,
                          v8::Extension* extension) {
@@ -5421,7 +5505,7 @@
       always_allow_natives_syntax ||
       FLAG_allow_natives_syntax ||
       Bootstrapper::IsActive();
-  PreParser parser(no_script, allow_natives_syntax, extension);
+  CompletePreParser parser(no_script, allow_natives_syntax, extension);
   if (!parser.PreParseProgram(source, stream)) return NULL;
   // Extract the accumulated data from the recorder as a single
   // contiguous vector that we are responsible for disposing.
diff --git a/src/parser.h b/src/parser.h
index 56412a0..c3e947f 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -212,11 +212,17 @@
                          ScriptDataImpl* pre_data,
                          bool is_json = false);
 
-
+// Generic preparser generating full preparse data.
 ScriptDataImpl* PreParse(Handle<String> source,
                          unibrow::CharacterStream* stream,
                          v8::Extension* extension);
 
+// Preparser that only does preprocessing that makes sense if only used
+// immediately after.
+ScriptDataImpl* PartialPreParse(Handle<String> source,
+                                unibrow::CharacterStream* stream,
+                                v8::Extension* extension);
+
 
 bool ParseRegExp(FlatStringReader* input,
                  bool multiline,
diff --git a/src/runtime.cc b/src/runtime.cc
index 43a6734..a1f6810 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -47,6 +47,7 @@
 #include "smart-pointer.h"
 #include "stub-cache.h"
 #include "v8threads.h"
+#include "string-search.h"
 
 namespace v8 {
 namespace internal {
@@ -2571,418 +2572,6 @@
 }
 
 
-// Cap on the maximal shift in the Boyer-Moore implementation. By setting a
-// limit, we can fix the size of tables.
-static const int kBMMaxShift = 0xff;
-// Reduce alphabet to this size.
-static const int kBMAlphabetSize = 0x100;
-// For patterns below this length, the skip length of Boyer-Moore is too short
-// to compensate for the algorithmic overhead compared to simple brute force.
-static const int kBMMinPatternLength = 5;
-
-// Holds the two buffers used by Boyer-Moore string search's Good Suffix
-// shift. Only allows the last kBMMaxShift characters of the needle
-// to be indexed.
-class BMGoodSuffixBuffers {
- public:
-  BMGoodSuffixBuffers() {}
-  inline void init(int needle_length) {
-    ASSERT(needle_length > 1);
-    int start = needle_length < kBMMaxShift ? 0 : needle_length - kBMMaxShift;
-    int len = needle_length - start;
-    biased_suffixes_ = suffixes_ - start;
-    biased_good_suffix_shift_ = good_suffix_shift_ - start;
-    for (int i = 0; i <= len; i++) {
-      good_suffix_shift_[i] = len;
-    }
-  }
-  inline int& suffix(int index) {
-    ASSERT(biased_suffixes_ + index >= suffixes_);
-    return biased_suffixes_[index];
-  }
-  inline int& shift(int index) {
-    ASSERT(biased_good_suffix_shift_ + index >= good_suffix_shift_);
-    return biased_good_suffix_shift_[index];
-  }
- private:
-  int suffixes_[kBMMaxShift + 1];
-  int good_suffix_shift_[kBMMaxShift + 1];
-  int* biased_suffixes_;
-  int* biased_good_suffix_shift_;
-  DISALLOW_COPY_AND_ASSIGN(BMGoodSuffixBuffers);
-};
-
-// buffers reused by BoyerMoore
-static int bad_char_occurrence[kBMAlphabetSize];
-static BMGoodSuffixBuffers bmgs_buffers;
-
-// State of the string match tables.
-// SIMPLE: No usable content in the buffers.
-// BOYER_MOORE_HORSPOOL: The bad_char_occurences table has been populated.
-// BOYER_MOORE: The bmgs_buffers tables have also been populated.
-// Whenever starting with a new needle, one should call InitializeStringSearch
-// to determine which search strategy to use, and in the case of a long-needle
-// strategy, the call also initializes the algorithm to SIMPLE.
-enum StringSearchAlgorithm { SIMPLE_SEARCH, BOYER_MOORE_HORSPOOL, BOYER_MOORE };
-static StringSearchAlgorithm algorithm;
-
-
-// Compute the bad-char table for Boyer-Moore in the static buffer.
-template <typename pchar>
-static void BoyerMoorePopulateBadCharTable(Vector<const pchar> pattern) {
-  // Only preprocess at most kBMMaxShift last characters of pattern.
-  int start = pattern.length() < kBMMaxShift ? 0
-                                             : pattern.length() - kBMMaxShift;
-  // Run forwards to populate bad_char_table, so that *last* instance
-  // of character equivalence class is the one registered.
-  // Notice: Doesn't include the last character.
-  int table_size = (sizeof(pchar) == 1) ? String::kMaxAsciiCharCode + 1
-                                        : kBMAlphabetSize;
-  if (start == 0) {  // All patterns less than kBMMaxShift in length.
-    memset(bad_char_occurrence, -1, table_size * sizeof(*bad_char_occurrence));
-  } else {
-    for (int i = 0; i < table_size; i++) {
-      bad_char_occurrence[i] = start - 1;
-    }
-  }
-  for (int i = start; i < pattern.length() - 1; i++) {
-    pchar c = pattern[i];
-    int bucket = (sizeof(pchar) ==1) ? c : c % kBMAlphabetSize;
-    bad_char_occurrence[bucket] = i;
-  }
-}
-
-
-template <typename pchar>
-static void BoyerMoorePopulateGoodSuffixTable(Vector<const pchar> pattern) {
-  int m = pattern.length();
-  int start = m < kBMMaxShift ? 0 : m - kBMMaxShift;
-  int len = m - start;
-  // Compute Good Suffix tables.
-  bmgs_buffers.init(m);
-
-  bmgs_buffers.shift(m-1) = 1;
-  bmgs_buffers.suffix(m) = m + 1;
-  pchar last_char = pattern[m - 1];
-  int suffix = m + 1;
-  for (int i = m; i > start;) {
-    for (pchar c = pattern[i - 1]; suffix <= m && c != pattern[suffix - 1];) {
-      if (bmgs_buffers.shift(suffix) == len) {
-        bmgs_buffers.shift(suffix) = suffix - i;
-      }
-      suffix = bmgs_buffers.suffix(suffix);
-    }
-    i--;
-    suffix--;
-    bmgs_buffers.suffix(i) = suffix;
-    if (suffix == m) {
-      // No suffix to extend, so we check against last_char only.
-      while (i > start && pattern[i - 1] != last_char) {
-        if (bmgs_buffers.shift(m) == len) {
-          bmgs_buffers.shift(m) = m - i;
-        }
-        i--;
-        bmgs_buffers.suffix(i) = m;
-      }
-      if (i > start) {
-        i--;
-        suffix--;
-        bmgs_buffers.suffix(i) = suffix;
-      }
-    }
-  }
-  if (suffix < m) {
-    for (int i = start; i <= m; i++) {
-      if (bmgs_buffers.shift(i) == len) {
-        bmgs_buffers.shift(i) = suffix - start;
-      }
-      if (i == suffix) {
-        suffix = bmgs_buffers.suffix(suffix);
-      }
-    }
-  }
-}
-
-
-template <typename schar, typename pchar>
-static inline int CharOccurrence(int char_code) {
-  if (sizeof(schar) == 1) {
-    return bad_char_occurrence[char_code];
-  }
-  if (sizeof(pchar) == 1) {
-    if (char_code > String::kMaxAsciiCharCode) {
-      return -1;
-    }
-    return bad_char_occurrence[char_code];
-  }
-  return bad_char_occurrence[char_code % kBMAlphabetSize];
-}
-
-
-// Restricted simplified Boyer-Moore string matching.
-// Uses only the bad-shift table of Boyer-Moore and only uses it
-// for the character compared to the last character of the needle.
-template <typename schar, typename pchar>
-static int BoyerMooreHorspool(Vector<const schar> subject,
-                              Vector<const pchar> pattern,
-                              int start_index,
-                              bool* complete) {
-  ASSERT(algorithm <= BOYER_MOORE_HORSPOOL);
-  int n = subject.length();
-  int m = pattern.length();
-
-  int badness = -m;
-
-  // How bad we are doing without a good-suffix table.
-  int idx;  // No matches found prior to this index.
-  pchar last_char = pattern[m - 1];
-  int last_char_shift = m - 1 - CharOccurrence<schar, pchar>(last_char);
-  // Perform search
-  for (idx = start_index; idx <= n - m;) {
-    int j = m - 1;
-    int c;
-    while (last_char != (c = subject[idx + j])) {
-      int bc_occ = CharOccurrence<schar, pchar>(c);
-      int shift = j - bc_occ;
-      idx += shift;
-      badness += 1 - shift;  // at most zero, so badness cannot increase.
-      if (idx > n - m) {
-        *complete = true;
-        return -1;
-      }
-    }
-    j--;
-    while (j >= 0 && pattern[j] == (subject[idx + j])) j--;
-    if (j < 0) {
-      *complete = true;
-      return idx;
-    } else {
-      idx += last_char_shift;
-      // Badness increases by the number of characters we have
-      // checked, and decreases by the number of characters we
-      // can skip by shifting. It's a measure of how we are doing
-      // compared to reading each character exactly once.
-      badness += (m - j) - last_char_shift;
-      if (badness > 0) {
-        *complete = false;
-        return idx;
-      }
-    }
-  }
-  *complete = true;
-  return -1;
-}
-
-
-template <typename schar, typename pchar>
-static int BoyerMooreIndexOf(Vector<const schar> subject,
-                             Vector<const pchar> pattern,
-                             int idx) {
-  ASSERT(algorithm <= BOYER_MOORE);
-  int n = subject.length();
-  int m = pattern.length();
-  // Only preprocess at most kBMMaxShift last characters of pattern.
-  int start = m < kBMMaxShift ? 0 : m - kBMMaxShift;
-
-  pchar last_char = pattern[m - 1];
-  // Continue search from i.
-  while (idx <= n - m) {
-    int j = m - 1;
-    schar c;
-    while (last_char != (c = subject[idx + j])) {
-      int shift = j - CharOccurrence<schar, pchar>(c);
-      idx += shift;
-      if (idx > n - m) {
-        return -1;
-      }
-    }
-    while (j >= 0 && pattern[j] == (c = subject[idx + j])) j--;
-    if (j < 0) {
-      return idx;
-    } else if (j < start) {
-      // we have matched more than our tables allow us to be smart about.
-      // Fall back on BMH shift.
-      idx += m - 1 - CharOccurrence<schar, pchar>(last_char);
-    } else {
-      int gs_shift = bmgs_buffers.shift(j + 1);       // Good suffix shift.
-      int bc_occ = CharOccurrence<schar, pchar>(c);
-      int shift = j - bc_occ;                         // Bad-char shift.
-      if (gs_shift > shift) {
-        shift = gs_shift;
-      }
-      idx += shift;
-    }
-  }
-
-  return -1;
-}
-
-
-// Trivial string search for shorter strings.
-// On return, if "complete" is set to true, the return value is the
-// final result of searching for the patter in the subject.
-// If "complete" is set to false, the return value is the index where
-// further checking should start, i.e., it's guaranteed that the pattern
-// does not occur at a position prior to the returned index.
-template <typename pchar, typename schar>
-static int SimpleIndexOf(Vector<const schar> subject,
-                         Vector<const pchar> pattern,
-                         int idx,
-                         bool* complete) {
-  ASSERT(pattern.length() > 1);
-  // Badness is a count of how much work we have done.  When we have
-  // done enough work we decide it's probably worth switching to a better
-  // algorithm.
-  int badness = -10 - (pattern.length() << 2);
-
-  // We know our pattern is at least 2 characters, we cache the first so
-  // the common case of the first character not matching is faster.
-  pchar pattern_first_char = pattern[0];
-  for (int i = idx, n = subject.length() - pattern.length(); i <= n; i++) {
-    badness++;
-    if (badness > 0) {
-      *complete = false;
-      return i;
-    }
-    if (sizeof(schar) == 1 && sizeof(pchar) == 1) {
-      const schar* pos = reinterpret_cast<const schar*>(
-          memchr(subject.start() + i,
-                 pattern_first_char,
-                 n - i + 1));
-      if (pos == NULL) {
-        *complete = true;
-        return -1;
-      }
-      i = static_cast<int>(pos - subject.start());
-    } else {
-      if (subject[i] != pattern_first_char) continue;
-    }
-    int j = 1;
-    do {
-      if (pattern[j] != subject[i+j]) {
-        break;
-      }
-      j++;
-    } while (j < pattern.length());
-    if (j == pattern.length()) {
-      *complete = true;
-      return i;
-    }
-    badness += j;
-  }
-  *complete = true;
-  return -1;
-}
-
-// Simple indexOf that never bails out. For short patterns only.
-template <typename pchar, typename schar>
-static int SimpleIndexOf(Vector<const schar> subject,
-                         Vector<const pchar> pattern,
-                         int idx) {
-  pchar pattern_first_char = pattern[0];
-  for (int i = idx, n = subject.length() - pattern.length(); i <= n; i++) {
-    if (sizeof(schar) == 1 && sizeof(pchar) == 1) {
-      const schar* pos = reinterpret_cast<const schar*>(
-          memchr(subject.start() + i,
-                 pattern_first_char,
-                 n - i + 1));
-      if (pos == NULL) return -1;
-      i = static_cast<int>(pos - subject.start());
-    } else {
-      if (subject[i] != pattern_first_char) continue;
-    }
-    int j = 1;
-    while (j < pattern.length()) {
-      if (pattern[j] != subject[i+j]) {
-        break;
-      }
-      j++;
-    }
-    if (j == pattern.length()) {
-      return i;
-    }
-  }
-  return -1;
-}
-
-
-// Strategy for searching for a string in another string.
-enum StringSearchStrategy { SEARCH_FAIL, SEARCH_SHORT, SEARCH_LONG };
-
-
-template <typename pchar>
-static inline StringSearchStrategy InitializeStringSearch(
-    Vector<const pchar> pat, bool ascii_subject) {
-  // We have an ASCII haystack and a non-ASCII needle. Check if there
-  // really is a non-ASCII character in the needle and bail out if there
-  // is.
-  if (ascii_subject && sizeof(pchar) > 1) {
-    for (int i = 0; i < pat.length(); i++) {
-      uc16 c = pat[i];
-      if (c > String::kMaxAsciiCharCode) {
-        return SEARCH_FAIL;
-      }
-    }
-  }
-  if (pat.length() < kBMMinPatternLength) {
-    return SEARCH_SHORT;
-  }
-  algorithm = SIMPLE_SEARCH;
-  return SEARCH_LONG;
-}
-
-
-// Dispatch long needle searches to different algorithms.
-template <typename schar, typename pchar>
-static int ComplexIndexOf(Vector<const schar> sub,
-                          Vector<const pchar> pat,
-                          int start_index) {
-  ASSERT(pat.length() >= kBMMinPatternLength);
-  // Try algorithms in order of increasing setup cost and expected performance.
-  bool complete;
-  int idx = start_index;
-  switch (algorithm) {
-    case SIMPLE_SEARCH:
-      idx = SimpleIndexOf(sub, pat, idx, &complete);
-      if (complete) return idx;
-      BoyerMoorePopulateBadCharTable(pat);
-      algorithm = BOYER_MOORE_HORSPOOL;
-      // FALLTHROUGH.
-    case BOYER_MOORE_HORSPOOL:
-      idx = BoyerMooreHorspool(sub, pat, idx, &complete);
-      if (complete) return idx;
-      // Build the Good Suffix table and continue searching.
-      BoyerMoorePopulateGoodSuffixTable(pat);
-      algorithm = BOYER_MOORE;
-      // FALLTHROUGH.
-    case BOYER_MOORE:
-      return BoyerMooreIndexOf(sub, pat, idx);
-  }
-  UNREACHABLE();
-  return -1;
-}
-
-
-// Dispatch to different search strategies for a single search.
-// If searching multiple times on the same needle, the search
-// strategy should only be computed once and then dispatch to different
-// loops.
-template <typename schar, typename pchar>
-static int StringSearch(Vector<const schar> sub,
-                        Vector<const pchar> pat,
-                        int start_index) {
-  bool ascii_subject = (sizeof(schar) == 1);
-  StringSearchStrategy strategy = InitializeStringSearch(pat, ascii_subject);
-  switch (strategy) {
-    case SEARCH_FAIL: return -1;
-    case SEARCH_SHORT: return SimpleIndexOf(sub, pat, start_index);
-    case SEARCH_LONG: return ComplexIndexOf(sub, pat, start_index);
-  }
-  UNREACHABLE();
-  return -1;
-}
-
-
 // Perform string match of pattern on subject, starting at start index.
 // Caller must ensure that 0 <= start_index <= sub->length(),
 // and should check that pat->length() + start_index <= sub->length()
@@ -3042,32 +2631,33 @@
 
 
 template <typename schar, typename pchar>
-static int StringMatchBackwards(Vector<const schar> sub,
-                                Vector<const pchar> pat,
+static int StringMatchBackwards(Vector<const schar> subject,
+                                Vector<const pchar> pattern,
                                 int idx) {
-  ASSERT(pat.length() >= 1);
-  ASSERT(idx + pat.length() <= sub.length());
+  int pattern_length = pattern.length();
+  ASSERT(pattern_length >= 1);
+  ASSERT(idx + pattern_length <= subject.length());
 
   if (sizeof(schar) == 1 && sizeof(pchar) > 1) {
-    for (int i = 0; i < pat.length(); i++) {
-      uc16 c = pat[i];
+    for (int i = 0; i < pattern_length; i++) {
+      uc16 c = pattern[i];
       if (c > String::kMaxAsciiCharCode) {
         return -1;
       }
     }
   }
 
-  pchar pattern_first_char = pat[0];
+  pchar pattern_first_char = pattern[0];
   for (int i = idx; i >= 0; i--) {
-    if (sub[i] != pattern_first_char) continue;
+    if (subject[i] != pattern_first_char) continue;
     int j = 1;
-    while (j < pat.length()) {
-      if (pat[j] != sub[i+j]) {
+    while (j < pattern_length) {
+      if (pattern[j] != subject[i+j]) {
         break;
       }
       j++;
     }
-    if (j == pat.length()) {
+    if (j == pattern_length) {
       return i;
     }
   }
@@ -5167,6 +4757,12 @@
 }
 
 
+// Define storage for buffers declared in header file.
+// TODO(lrn): Remove these when rewriting search code.
+int BMBuffers::bad_char_occurrence[kBMAlphabetSize];
+BMGoodSuffixBuffers BMBuffers::bmgs_buffers;
+
+
 template <typename schar, typename pchar>
 void FindStringIndices(Vector<const schar> subject,
                        Vector<const pchar> pattern,
@@ -7979,15 +7575,17 @@
 }
 
 
-// How many elements does this array have?
+// How many elements does this object/array have?
 static Object* Runtime_EstimateNumberOfElements(Arguments args) {
   ASSERT(args.length() == 1);
-  CONVERT_CHECKED(JSArray, array, args[0]);
-  HeapObject* elements = array->elements();
+  CONVERT_CHECKED(JSObject, object, args[0]);
+  HeapObject* elements = object->elements();
   if (elements->IsDictionary()) {
     return Smi::FromInt(NumberDictionary::cast(elements)->NumberOfElements());
+  } else if (object->IsJSArray()) {
+    return JSArray::cast(object)->length();
   } else {
-    return array->length();
+    return Smi::FromInt(FixedArray::cast(elements)->length());
   }
 }
 
@@ -8019,8 +7617,10 @@
 
 
 // Returns an array that tells you where in the [0, length) interval an array
-// might have elements.  Can either return keys or intervals.  Keys can have
-// gaps in (undefined).  Intervals can also span over some undefined keys.
+// might have elements.  Can either return keys (positive integers) or
+// intervals (pair of a negative integer (-start-1) followed by a
+// positive (length)) or undefined values.
+// Intervals can span over some keys that are not in the object.
 static Object* Runtime_GetArrayKeys(Arguments args) {
   ASSERT(args.length() == 2);
   HandleScope scope;
diff --git a/src/string-search.h b/src/string-search.h
new file mode 100644
index 0000000..d7959c0
--- /dev/null
+++ b/src/string-search.h
@@ -0,0 +1,463 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_STRING_SEARCH_H_
+#define V8_STRING_SEARCH_H_
+
+namespace v8 {
+namespace internal {
+
+
+// Cap on the maximal shift in the Boyer-Moore implementation. By setting a
+// limit, we can fix the size of tables. For a needle longer than this limit,
+// search will not be optimal, since we only build tables for a smaller suffix
+// of the string, which is a safe approximation.
+static const int kBMMaxShift = 250;
+// Reduce alphabet to this size.
+// One of the tables used by Boyer-Moore and Boyer-Moore-Horspool has size
+// proportional to the input alphabet. We reduce the alphabet size by
+// equating input characters modulo a smaller alphabet size. This gives
+// a potentially less efficient searching, but is a safe approximation.
+// For needles using only characters in the same Unicode 256-code point page,
+// there is no search speed degradation.
+static const int kBMAlphabetSize = 256;
+// For patterns below this length, the skip length of Boyer-Moore is too short
+// to compensate for the algorithmic overhead compared to simple brute force.
+static const int kBMMinPatternLength = 7;
+
+// Holds the two buffers used by Boyer-Moore string search's Good Suffix
+// shift. Only allows the last kBMMaxShift characters of the needle
+// to be indexed.
+class BMGoodSuffixBuffers {
+ public:
+  BMGoodSuffixBuffers() {}
+  inline void Initialize(int needle_length) {
+    ASSERT(needle_length > 1);
+    int start = needle_length < kBMMaxShift ? 0 : needle_length - kBMMaxShift;
+    int len = needle_length - start;
+    biased_suffixes_ = suffixes_ - start;
+    biased_good_suffix_shift_ = good_suffix_shift_ - start;
+    for (int i = 0; i <= len; i++) {
+      good_suffix_shift_[i] = len;
+    }
+  }
+  inline int& suffix(int index) {
+    ASSERT(biased_suffixes_ + index >= suffixes_);
+    return biased_suffixes_[index];
+  }
+  inline int& shift(int index) {
+    ASSERT(biased_good_suffix_shift_ + index >= good_suffix_shift_);
+    return biased_good_suffix_shift_[index];
+  }
+ private:
+  int suffixes_[kBMMaxShift + 1];
+  int good_suffix_shift_[kBMMaxShift + 1];
+  int* biased_suffixes_;
+  int* biased_good_suffix_shift_;
+  DISALLOW_COPY_AND_ASSIGN(BMGoodSuffixBuffers);
+};
+
+// buffers reused by BoyerMoore
+struct BMBuffers {
+ public:
+  static int bad_char_occurrence[kBMAlphabetSize];
+  static BMGoodSuffixBuffers bmgs_buffers;
+};
+
+// State of the string match tables.
+// SIMPLE: No usable content in the buffers.
+// BOYER_MOORE_HORSPOOL: The bad_char_occurence table has been populated.
+// BOYER_MOORE: The bmgs_buffers tables have also been populated.
+// Whenever starting with a new needle, one should call InitializeStringSearch
+// to determine which search strategy to use, and in the case of a long-needle
+// strategy, the call also initializes the algorithm to SIMPLE.
+enum StringSearchAlgorithm { SIMPLE_SEARCH, BOYER_MOORE_HORSPOOL, BOYER_MOORE };
+static StringSearchAlgorithm algorithm;
+
+
+// Compute the bad-char table for Boyer-Moore in the static buffer.
+template <typename PatternChar>
+static void BoyerMoorePopulateBadCharTable(Vector<const PatternChar> pattern) {
+  // Only preprocess at most kBMMaxShift last characters of pattern.
+  int start = Max(pattern.length() - kBMMaxShift, 0);
+  // Run forwards to populate bad_char_table, so that *last* instance
+  // of character equivalence class is the one registered.
+  // Notice: Doesn't include the last character.
+  int table_size = (sizeof(PatternChar) == 1) ? String::kMaxAsciiCharCode + 1
+                                        : kBMAlphabetSize;
+  if (start == 0) {  // All patterns less than kBMMaxShift in length.
+    memset(BMBuffers::bad_char_occurrence,
+           -1,
+           table_size * sizeof(*BMBuffers::bad_char_occurrence));
+  } else {
+    for (int i = 0; i < table_size; i++) {
+      BMBuffers::bad_char_occurrence[i] = start - 1;
+    }
+  }
+  for (int i = start; i < pattern.length() - 1; i++) {
+    PatternChar c = pattern[i];
+    int bucket = (sizeof(PatternChar) ==1) ? c : c % kBMAlphabetSize;
+    BMBuffers::bad_char_occurrence[bucket] = i;
+  }
+}
+
+
+template <typename PatternChar>
+static void BoyerMoorePopulateGoodSuffixTable(
+    Vector<const PatternChar> pattern) {
+  int m = pattern.length();
+  int start = m < kBMMaxShift ? 0 : m - kBMMaxShift;
+  int len = m - start;
+  // Compute Good Suffix tables.
+  BMBuffers::bmgs_buffers.Initialize(m);
+
+  BMBuffers::bmgs_buffers.shift(m-1) = 1;
+  BMBuffers::bmgs_buffers.suffix(m) = m + 1;
+  PatternChar last_char = pattern[m - 1];
+  int suffix = m + 1;
+  {
+    int i = m;
+    while (i > start) {
+      PatternChar c = pattern[i - 1];
+      while (suffix <= m && c != pattern[suffix - 1]) {
+        if (BMBuffers::bmgs_buffers.shift(suffix) == len) {
+          BMBuffers::bmgs_buffers.shift(suffix) = suffix - i;
+        }
+        suffix = BMBuffers::bmgs_buffers.suffix(suffix);
+      }
+      BMBuffers::bmgs_buffers.suffix(--i) = --suffix;
+      if (suffix == m) {
+        // No suffix to extend, so we check against last_char only.
+        while ((i > start) && (pattern[i - 1] != last_char)) {
+          if (BMBuffers::bmgs_buffers.shift(m) == len) {
+            BMBuffers::bmgs_buffers.shift(m) = m - i;
+          }
+          BMBuffers::bmgs_buffers.suffix(--i) = m;
+        }
+        if (i > start) {
+          BMBuffers::bmgs_buffers.suffix(--i) = --suffix;
+        }
+      }
+    }
+  }
+  if (suffix < m) {
+    for (int i = start; i <= m; i++) {
+      if (BMBuffers::bmgs_buffers.shift(i) == len) {
+        BMBuffers::bmgs_buffers.shift(i) = suffix - start;
+      }
+      if (i == suffix) {
+        suffix = BMBuffers::bmgs_buffers.suffix(suffix);
+      }
+    }
+  }
+}
+
+
+template <typename SubjectChar, typename PatternChar>
+static inline int CharOccurrence(int char_code) {
+  if (sizeof(SubjectChar) == 1) {
+    return BMBuffers::bad_char_occurrence[char_code];
+  }
+  if (sizeof(PatternChar) == 1) {
+    if (char_code > String::kMaxAsciiCharCode) {
+      return -1;
+    }
+    return BMBuffers::bad_char_occurrence[char_code];
+  }
+  return BMBuffers::bad_char_occurrence[char_code % kBMAlphabetSize];
+}
+
+
+// Restricted simplified Boyer-Moore string matching.
+// Uses only the bad-shift table of Boyer-Moore and only uses it
+// for the character compared to the last character of the needle.
+template <typename SubjectChar, typename PatternChar>
+static int BoyerMooreHorspool(Vector<const SubjectChar> subject,
+                              Vector<const PatternChar> pattern,
+                              int start_index,
+                              bool* complete) {
+  ASSERT(algorithm <= BOYER_MOORE_HORSPOOL);
+  int n = subject.length();
+  int m = pattern.length();
+
+  int badness = -m;
+
+  // How bad we are doing without a good-suffix table.
+  int idx;  // No matches found prior to this index.
+  PatternChar last_char = pattern[m - 1];
+  int last_char_shift =
+      m - 1 - CharOccurrence<SubjectChar, PatternChar>(last_char);
+  // Perform search
+  for (idx = start_index; idx <= n - m;) {
+    int j = m - 1;
+    int c;
+    while (last_char != (c = subject[idx + j])) {
+      int bc_occ = CharOccurrence<SubjectChar, PatternChar>(c);
+      int shift = j - bc_occ;
+      idx += shift;
+      badness += 1 - shift;  // at most zero, so badness cannot increase.
+      if (idx > n - m) {
+        *complete = true;
+        return -1;
+      }
+    }
+    j--;
+    while (j >= 0 && pattern[j] == (subject[idx + j])) j--;
+    if (j < 0) {
+      *complete = true;
+      return idx;
+    } else {
+      idx += last_char_shift;
+      // Badness increases by the number of characters we have
+      // checked, and decreases by the number of characters we
+      // can skip by shifting. It's a measure of how we are doing
+      // compared to reading each character exactly once.
+      badness += (m - j) - last_char_shift;
+      if (badness > 0) {
+        *complete = false;
+        return idx;
+      }
+    }
+  }
+  *complete = true;
+  return -1;
+}
+
+
+template <typename SubjectChar, typename PatternChar>
+static int BoyerMooreIndexOf(Vector<const SubjectChar> subject,
+                             Vector<const PatternChar> pattern,
+                             int idx) {
+  ASSERT(algorithm <= BOYER_MOORE);
+  int n = subject.length();
+  int m = pattern.length();
+  // Only preprocess at most kBMMaxShift last characters of pattern.
+  int start = m < kBMMaxShift ? 0 : m - kBMMaxShift;
+
+  PatternChar last_char = pattern[m - 1];
+  // Continue search from i.
+  while (idx <= n - m) {
+    int j = m - 1;
+    SubjectChar c;
+    while (last_char != (c = subject[idx + j])) {
+      int shift = j - CharOccurrence<SubjectChar, PatternChar>(c);
+      idx += shift;
+      if (idx > n - m) {
+        return -1;
+      }
+    }
+    while (j >= 0 && pattern[j] == (c = subject[idx + j])) j--;
+    if (j < 0) {
+      return idx;
+    } else if (j < start) {
+      // we have matched more than our tables allow us to be smart about.
+      // Fall back on BMH shift.
+      idx += m - 1 - CharOccurrence<SubjectChar, PatternChar>(last_char);
+    } else {
+      int gs_shift = BMBuffers::bmgs_buffers.shift(j + 1);
+      int bc_occ = CharOccurrence<SubjectChar, PatternChar>(c);
+      int shift = j - bc_occ;
+      if (gs_shift > shift) {
+        shift = gs_shift;
+      }
+      idx += shift;
+    }
+  }
+
+  return -1;
+}
+
+
+// Trivial string search for shorter strings.
+// On return, if "complete" is set to true, the return value is the
+// final result of searching for the patter in the subject.
+// If "complete" is set to false, the return value is the index where
+// further checking should start, i.e., it's guaranteed that the pattern
+// does not occur at a position prior to the returned index.
+template <typename PatternChar, typename SubjectChar>
+static int SimpleIndexOf(Vector<const SubjectChar> subject,
+                         Vector<const PatternChar> pattern,
+                         int idx,
+                         bool* complete) {
+  ASSERT(pattern.length() > 1);
+  int pattern_length = pattern.length();
+  // Badness is a count of how much work we have done.  When we have
+  // done enough work we decide it's probably worth switching to a better
+  // algorithm.
+  int badness = -10 - (pattern_length << 2);
+
+  // We know our pattern is at least 2 characters, we cache the first so
+  // the common case of the first character not matching is faster.
+  PatternChar pattern_first_char = pattern[0];
+  for (int i = idx, n = subject.length() - pattern_length; i <= n; i++) {
+    badness++;
+    if (badness > 0) {
+      *complete = false;
+      return i;
+    }
+    if (sizeof(SubjectChar) == 1 && sizeof(PatternChar) == 1) {
+      const SubjectChar* pos = reinterpret_cast<const SubjectChar*>(
+          memchr(subject.start() + i,
+                 pattern_first_char,
+                 n - i + 1));
+      if (pos == NULL) {
+        *complete = true;
+        return -1;
+      }
+      i = static_cast<int>(pos - subject.start());
+    } else {
+      if (subject[i] != pattern_first_char) continue;
+    }
+    int j = 1;
+    do {
+      if (pattern[j] != subject[i+j]) {
+        break;
+      }
+      j++;
+    } while (j < pattern_length);
+    if (j == pattern_length) {
+      *complete = true;
+      return i;
+    }
+    badness += j;
+  }
+  *complete = true;
+  return -1;
+}
+
+// Simple indexOf that never bails out. For short patterns only.
+template <typename PatternChar, typename SubjectChar>
+static int SimpleIndexOf(Vector<const SubjectChar> subject,
+                         Vector<const PatternChar> pattern,
+                         int idx) {
+  int pattern_length = pattern.length();
+  PatternChar pattern_first_char = pattern[0];
+  for (int i = idx, n = subject.length() - pattern_length; i <= n; i++) {
+    if (sizeof(SubjectChar) == 1 && sizeof(PatternChar) == 1) {
+      const SubjectChar* pos = reinterpret_cast<const SubjectChar*>(
+          memchr(subject.start() + i,
+                 pattern_first_char,
+                 n - i + 1));
+      if (pos == NULL) return -1;
+      i = static_cast<int>(pos - subject.start());
+    } else {
+      if (subject[i] != pattern_first_char) continue;
+    }
+    int j = 1;
+    while (j < pattern_length) {
+      if (pattern[j] != subject[i+j]) {
+        break;
+      }
+      j++;
+    }
+    if (j == pattern_length) {
+      return i;
+    }
+  }
+  return -1;
+}
+
+
+// Strategy for searching for a string in another string.
+enum StringSearchStrategy { SEARCH_FAIL, SEARCH_SHORT, SEARCH_LONG };
+
+
+template <typename PatternChar>
+static inline StringSearchStrategy InitializeStringSearch(
+    Vector<const PatternChar> pat, bool ascii_subject) {
+  // We have an ASCII haystack and a non-ASCII needle. Check if there
+  // really is a non-ASCII character in the needle and bail out if there
+  // is.
+  if (ascii_subject && sizeof(PatternChar) > 1) {
+    for (int i = 0; i < pat.length(); i++) {
+      uc16 c = pat[i];
+      if (c > String::kMaxAsciiCharCode) {
+        return SEARCH_FAIL;
+      }
+    }
+  }
+  if (pat.length() < kBMMinPatternLength) {
+    return SEARCH_SHORT;
+  }
+  algorithm = SIMPLE_SEARCH;
+  return SEARCH_LONG;
+}
+
+
+// Dispatch long needle searches to different algorithms.
+template <typename SubjectChar, typename PatternChar>
+static int ComplexIndexOf(Vector<const SubjectChar> sub,
+                          Vector<const PatternChar> pat,
+                          int start_index) {
+  ASSERT(pat.length() >= kBMMinPatternLength);
+  // Try algorithms in order of increasing setup cost and expected performance.
+  bool complete;
+  int idx = start_index;
+  switch (algorithm) {
+    case SIMPLE_SEARCH:
+      idx = SimpleIndexOf(sub, pat, idx, &complete);
+      if (complete) return idx;
+      BoyerMoorePopulateBadCharTable(pat);
+      algorithm = BOYER_MOORE_HORSPOOL;
+      // FALLTHROUGH.
+    case BOYER_MOORE_HORSPOOL:
+      idx = BoyerMooreHorspool(sub, pat, idx, &complete);
+      if (complete) return idx;
+      // Build the Good Suffix table and continue searching.
+      BoyerMoorePopulateGoodSuffixTable(pat);
+      algorithm = BOYER_MOORE;
+      // FALLTHROUGH.
+    case BOYER_MOORE:
+      return BoyerMooreIndexOf(sub, pat, idx);
+  }
+  UNREACHABLE();
+  return -1;
+}
+
+
+// Dispatch to different search strategies for a single search.
+// If searching multiple times on the same needle, the search
+// strategy should only be computed once and then dispatch to different
+// loops.
+template <typename SubjectChar, typename PatternChar>
+static int StringSearch(Vector<const SubjectChar> sub,
+                        Vector<const PatternChar> pat,
+                        int start_index) {
+  bool ascii_subject = (sizeof(SubjectChar) == 1);
+  StringSearchStrategy strategy = InitializeStringSearch(pat, ascii_subject);
+  switch (strategy) {
+    case SEARCH_FAIL: return -1;
+    case SEARCH_SHORT: return SimpleIndexOf(sub, pat, start_index);
+    case SEARCH_LONG: return ComplexIndexOf(sub, pat, start_index);
+  }
+  UNREACHABLE();
+  return -1;
+}
+
+}}  // namespace v8::internal
+
+#endif  // V8_STRING_SEARCH_H_
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 7a490d3..34989d37 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -1222,23 +1222,23 @@
 Object* CallStubCompiler::CompileCustomCall(int generator_id,
                                             Object* object,
                                             JSObject* holder,
+                                            JSGlobalPropertyCell* cell,
                                             JSFunction* function,
-                                            String* fname,
-                                            CheckType check) {
-    ASSERT(generator_id >= 0 && generator_id < kNumCallGenerators);
-    switch (generator_id) {
-#define CALL_GENERATOR_CASE(ignored1, ignored2, name)          \
-      case k##name##CallGenerator:                             \
-        return CallStubCompiler::Compile##name##Call(object,   \
-                                                     holder,   \
-                                                     function, \
-                                                     fname,    \
-                                                     check);
-      CUSTOM_CALL_IC_GENERATORS(CALL_GENERATOR_CASE)
+                                            String* fname) {
+  ASSERT(generator_id >= 0 && generator_id < kNumCallGenerators);
+  switch (generator_id) {
+#define CALL_GENERATOR_CASE(ignored1, ignored2, ignored3, name) \
+    case k##name##CallGenerator:                                \
+      return CallStubCompiler::Compile##name##Call(object,      \
+                                                   holder,      \
+                                                   cell,        \
+                                                   function,    \
+                                                   fname);
+    CUSTOM_CALL_IC_GENERATORS(CALL_GENERATOR_CASE)
 #undef CALL_GENERATOR_CASE
-    }
-    UNREACHABLE();
-    return Heap::undefined_value();
+  }
+  UNREACHABLE();
+  return Heap::undefined_value();
 }
 
 
diff --git a/src/stub-cache.h b/src/stub-cache.h
index bf14a4f..388bb52 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -612,21 +612,29 @@
 // Installation of custom call generators for the selected builtins is
 // handled by the bootstrapper.
 //
-// Each entry has a name of a global function (lowercased), a name of
-// a builtin function on its instance prototype (the one the generator
-// is set for), and a name of a generator itself (used to build ids
-// and generator function names).
-#define CUSTOM_CALL_IC_GENERATORS(V)      \
-  V(array, push, ArrayPush)               \
-  V(array, pop, ArrayPop)                 \
-  V(string, charCodeAt, StringCharCodeAt) \
-  V(string, charAt, StringCharAt)
+// Each entry has a name of a global function (lowercased), a flag
+// controlling whether the generator is set on the function itself or
+// on its instance prototype, a name of a builtin function on the
+// function or its instance prototype (the one the generator is set
+// for), and a name of a generator itself (used to build ids and
+// generator function names).
+#define CUSTOM_CALL_IC_GENERATORS(V)                          \
+  V(array, INSTANCE_PROTOTYPE, push, ArrayPush)               \
+  V(array, INSTANCE_PROTOTYPE, pop, ArrayPop)                 \
+  V(string, INSTANCE_PROTOTYPE, charCodeAt, StringCharCodeAt) \
+  V(string, INSTANCE_PROTOTYPE, charAt, StringCharAt)         \
+  V(string, FUNCTION, fromCharCode, StringFromCharCode)
 
 
 class CallStubCompiler: public StubCompiler {
  public:
+  enum CustomGeneratorOwner {
+    FUNCTION,
+    INSTANCE_PROTOTYPE
+  };
+
   enum {
-#define DECLARE_CALL_GENERATOR_ID(ignored1, ignored2, name) \
+#define DECLARE_CALL_GENERATOR_ID(ignored1, ignore2, ignored3, name) \
     k##name##CallGenerator,
     CUSTOM_CALL_IC_GENERATORS(DECLARE_CALL_GENERATOR_ID)
 #undef DECLARE_CALL_GENERATOR_ID
@@ -656,20 +664,21 @@
                             JSFunction* function,
                             String* name);
 
-  // Compiles a custom call constant IC using the generator with given id.
+  // Compiles a custom call constant/global IC using the generator
+  // with given id. For constant calls cell is NULL.
   Object* CompileCustomCall(int generator_id,
                             Object* object,
                             JSObject* holder,
+                            JSGlobalPropertyCell* cell,
                             JSFunction* function,
-                            String* name,
-                            CheckType check);
+                            String* name);
 
-#define DECLARE_CALL_GENERATOR(ignored1, ignored2, name) \
-  Object* Compile##name##Call(Object* object,            \
-                              JSObject* holder,          \
-                              JSFunction* function,      \
-                              String* fname,             \
-                              CheckType check);
+#define DECLARE_CALL_GENERATOR(ignored1, ignored2, ignored3, name) \
+  Object* Compile##name##Call(Object* object,                      \
+                              JSObject* holder,                    \
+                              JSGlobalPropertyCell* cell,          \
+                              JSFunction* function,                \
+                              String* fname);
   CUSTOM_CALL_IC_GENERATORS(DECLARE_CALL_GENERATOR)
 #undef DECLARE_CALL_GENERATOR
 
@@ -689,6 +698,17 @@
 
   void GenerateNameCheck(String* name, Label* miss);
 
+  void GenerateGlobalReceiverCheck(JSObject* object,
+                                   JSObject* holder,
+                                   String* name,
+                                   Label* miss);
+
+  // Generates code to load the function from the cell checking that
+  // it still contains the same function.
+  void GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
+                                    JSFunction* function,
+                                    Label* miss);
+
   // Generates a jump to CallIC miss stub. Returns Failure if the jump cannot
   // be generated.
   Object* GenerateMissBranch();
diff --git a/src/version.cc b/src/version.cc
index 0af1795..f105cbd 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     2
 #define MINOR_VERSION     4
-#define BUILD_NUMBER      2
+#define BUILD_NUMBER      3
 #define PATCH_LEVEL       0
 #define CANDIDATE_VERSION false
 
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 9ad94ce..9318bb8 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -829,6 +829,7 @@
 
 
 void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
+  WriteRecordedPositions();
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   // 1110 1000 #32-bit disp.
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index ccd0392..40e1e35 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -507,7 +507,7 @@
       int context_chain_length =
           scope()->ContextChainLength(slot->var()->scope());
       __ LoadContext(scratch, context_chain_length);
-      return CodeGenerator::ContextOperand(scratch, slot->index());
+      return ContextOperand(scratch, slot->index());
     }
     case Slot::LOOKUP:
       UNREACHABLE();
@@ -568,20 +568,17 @@
         ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
         if (FLAG_debug_code) {
           // Check if we have the correct context pointer.
-          __ movq(rbx,
-                  CodeGenerator::ContextOperand(rsi, Context::FCONTEXT_INDEX));
+          __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
           __ cmpq(rbx, rsi);
           __ Check(equal, "Unexpected declaration in current context.");
         }
         if (mode == Variable::CONST) {
           __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
-          __ movq(CodeGenerator::ContextOperand(rsi, slot->index()),
-                  kScratchRegister);
+          __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
           // No write barrier since the hole value is in old space.
         } else if (function != NULL) {
           VisitForValue(function, kAccumulator);
-          __ movq(CodeGenerator::ContextOperand(rsi, slot->index()),
-                  result_register());
+          __ movq(ContextOperand(rsi, slot->index()), result_register());
           int offset = Context::SlotOffset(slot->index());
           __ movq(rbx, rsi);
           __ RecordWrite(rbx, offset, result_register(), rcx);
@@ -749,11 +746,10 @@
   __ bind(&done_convert);
   __ push(rax);
 
-  // TODO(kasperl): Check cache validity in generated code. This is a
-  // fast case for the JSObject::IsSimpleEnum cache validity
-  // checks. If we cannot guarantee cache validity, call the runtime
-  // system to check cache validity or get the property names in a
-  // fixed array.
+  // BUG(867): Check cache validity in generated code. This is a fast
+  // case for the JSObject::IsSimpleEnum cache validity checks. If we
+  // cannot guarantee cache validity, call the runtime system to check
+  // cache validity or get the property names in a fixed array.
 
   // Get the set of properties to enumerate.
   __ push(rax);  // Duplicate the enumerable object on the stack.
@@ -881,6 +877,152 @@
 }
 
 
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow) {
+  Register context = rsi;
+  Register temp = rdx;
+
+  Scope* s = scope();
+  while (s != NULL) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
+                Immediate(0));
+        __ j(not_equal, slow);
+      }
+      // Load next context in chain.
+      __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
+      __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering rsi.
+      context = temp;
+    }
+    // If no outer scope calls eval, we do not need to check more
+    // context extensions.  If we have reached an eval scope, we check
+    // all extensions from this point.
+    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    s = s->outer_scope();
+  }
+
+  if (s != NULL && s->is_eval_scope()) {
+    // Loop up the context chain.  There is no frame effect so it is
+    // safe to use raw labels here.
+    Label next, fast;
+    if (!context.is(temp)) {
+      __ movq(temp, context);
+    }
+    // Load map for comparison into register, outside loop.
+    __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
+    __ bind(&next);
+    // Terminate at global context.
+    __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
+    __ j(equal, &fast);
+    // Check that extension is NULL.
+    __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
+    __ j(not_equal, slow);
+    // Load next context in chain.
+    __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
+    __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
+    __ jmp(&next);
+    __ bind(&fast);
+  }
+
+  // All extension objects were empty and it is safe to use a global
+  // load IC call.
+  __ movq(rax, CodeGenerator::GlobalObject());
+  __ Move(rcx, slot->var()->name());
+  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+      ? RelocInfo::CODE_TARGET
+      : RelocInfo::CODE_TARGET_CONTEXT;
+  __ call(ic, mode);
+}
+
+
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
+    Slot* slot,
+    Label* slow) {
+  ASSERT(slot->type() == Slot::CONTEXT);
+  Register context = rsi;
+  Register temp = rbx;
+
+  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
+                Immediate(0));
+        __ j(not_equal, slow);
+      }
+      __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
+      __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering rsi.
+      context = temp;
+    }
+  }
+  // Check that last extension is NULL.
+  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
+  __ j(not_equal, slow);
+  __ movq(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
+  return ContextOperand(temp, slot->index());
+}
+
+
+void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow,
+    Label* done) {
+  // Generate fast-case code for variables that might be shadowed by
+  // eval-introduced variables.  Eval is used a lot without
+  // introducing variables.  In those cases, we do not want to
+  // perform a runtime call for all variables in the scope
+  // containing the eval.
+  if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
+    EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+    __ jmp(done);
+  } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
+    Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
+    Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
+    if (potential_slot != NULL) {
+      // Generate fast case for locals that rewrite to slots.
+      __ movq(rax,
+              ContextSlotOperandCheckExtensions(potential_slot, slow));
+      if (potential_slot->var()->mode() == Variable::CONST) {
+        __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+        __ j(not_equal, done);
+        __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+      }
+      __ jmp(done);
+    } else if (rewrite != NULL) {
+      // Generate fast case for calls of an argument function.
+      Property* property = rewrite->AsProperty();
+      if (property != NULL) {
+        VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+        Literal* key_literal = property->key()->AsLiteral();
+        if (obj_proxy != NULL &&
+            key_literal != NULL &&
+            obj_proxy->IsArguments() &&
+            key_literal->handle()->IsSmi()) {
+          // Load arguments object if there are no eval-introduced
+          // variables. Then load the argument from the arguments
+          // object using keyed load.
+          __ movq(rdx,
+                  ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
+                                                    slow));
+          __ Move(rax, key_literal->handle());
+          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          __ call(ic, RelocInfo::CODE_TARGET);
+          __ jmp(done);
+        }
+      }
+    }
+  }
+}
+
+
 void FullCodeGenerator::EmitVariableLoad(Variable* var,
                                          Expression::Context context) {
   // Four cases: non-this global variables, lookup slots, all other
@@ -904,10 +1046,19 @@
     Apply(context, rax);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     Comment cmnt(masm_, "Lookup slot");
     __ push(rsi);  // Context.
     __ Push(var->name());
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
+    __ bind(&done);
+
     Apply(context, rax);
 
   } else if (slot != NULL) {
@@ -1713,15 +1864,42 @@
     EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
   } else if (var != NULL && var->slot() != NULL &&
              var->slot()->type() == Slot::LOOKUP) {
-    // Call to a lookup slot (dynamically introduced variable).  Call
-    // the runtime to find the function to call (returned in rax) and
-    // the object holding it (returned in rdx).
+    // Call to a lookup slot (dynamically introduced variable).
+    Label slow, done;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    EmitDynamicLoadFromSlotFastCase(var->slot(),
+                                    NOT_INSIDE_TYPEOF,
+                                    &slow,
+                                    &done);
+
+    __ bind(&slow);
+    // Call the runtime to find the function to call (returned in rax)
+    // and the object holding it (returned in rdx).
     __ push(context_register());
     __ Push(var->name());
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
     __ push(rax);  // Function.
     __ push(rdx);  // Receiver.
+
+    // If fast case code has been generated, emit code to push the
+    // function and receiver and have the slow path jump around this
+    // code.
+    if (done.is_linked()) {
+      Label call;
+      __ jmp(&call);
+      __ bind(&done);
+      // Push function.
+      __ push(rax);
+      // Push global receiver.
+      __ movq(rbx, CodeGenerator::GlobalObject());
+      __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
+      __ bind(&call);
+    }
+
     EmitCallWithStub(expr);
+
   } else if (fun->AsProperty() != NULL) {
     // Call to an object property.
     Property* prop = fun->AsProperty();
@@ -2522,12 +2700,11 @@
   Register key = rax;
   Register cache = rbx;
   Register tmp = rcx;
-  __ movq(cache, CodeGenerator::ContextOperand(rsi, Context::GLOBAL_INDEX));
+  __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
   __ movq(cache,
           FieldOperand(cache, GlobalObject::kGlobalContextOffset));
   __ movq(cache,
-          CodeGenerator::ContextOperand(
-              cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
+          ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   __ movq(cache,
           FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
 
@@ -3001,9 +3178,19 @@
   } else if (proxy != NULL &&
              proxy->var()->slot() != NULL &&
              proxy->var()->slot()->type() == Slot::LOOKUP) {
+    Label done, slow;
+
+    // Generate code for loading from variables potentially shadowed
+    // by eval-introduced variables.
+    Slot* slot = proxy->var()->slot();
+    EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+
+    __ bind(&slow);
     __ push(rsi);
     __ Push(proxy->name());
     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+    __ bind(&done);
+
     if (where == kStack) __ push(rax);
   } else {
     // This expression cannot throw a reference error at the top level.
@@ -3243,7 +3430,7 @@
 
 
 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
-  __ movq(dst, CodeGenerator::ContextOperand(rsi, context_index));
+  __ movq(dst, ContextOperand(rsi, context_index));
 }
 
 
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index f500ce6..765a90c 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -821,6 +821,59 @@
 }
 
 
+void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
+                                                   JSObject* holder,
+                                                   String* name,
+                                                   Label* miss) {
+  ASSERT(holder->IsGlobalObject());
+
+  // Get the number of arguments.
+  const int argc = arguments().immediate();
+
+  // Get the receiver from the stack.
+  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+  // If the object is the holder then we know that it's a global
+  // object which can only happen for contextual calls. In this case,
+  // the receiver cannot be a smi.
+  if (object != holder) {
+    __ JumpIfSmi(rdx, miss);
+  }
+
+  // Check that the maps haven't changed.
+  CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
+}
+
+
+void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
+                                                    JSFunction* function,
+                                                    Label* miss) {
+  // Get the value from the cell.
+  __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
+  __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
+
+  // Check that the cell contains the same function.
+  if (Heap::InNewSpace(function)) {
+    // We can't embed a pointer to a function in new space so we have
+    // to verify that the shared function info is unchanged. This has
+    // the nice side effect that multiple closures based on the same
+    // function can all use this call IC. Before we load through the
+    // function, we have to verify that it still is a function.
+    __ JumpIfSmi(rdi, miss);
+    __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
+    __ j(not_equal, miss);
+
+    // Check the shared function info. Make sure it hasn't changed.
+    __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
+    __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
+    __ j(not_equal, miss);
+  } else {
+    __ Cmp(rdi, Handle<JSFunction>(function));
+    __ j(not_equal, miss);
+  }
+}
+
+
 Object* CallStubCompiler::GenerateMissBranch() {
   Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
   if (obj->IsFailure()) return obj;
@@ -847,12 +900,10 @@
   SharedFunctionInfo* function_info = function->shared();
   if (function_info->HasCustomCallGenerator()) {
     const int id = function_info->custom_call_generator_id();
-    Object* result =
-        CompileCustomCall(id, object, holder, function, name, check);
+    Object* result = CompileCustomCall(
+        id, object, holder,  NULL, function, name);
     // undefined means bail out to regular compiler.
-    if (!result->IsUndefined()) {
-      return result;
-    }
+    if (!result->IsUndefined()) return result;
   }
 
   Label miss_in_smi_check;
@@ -1043,9 +1094,9 @@
 
 Object* CallStubCompiler::CompileArrayPushCall(Object* object,
                                                JSObject* holder,
+                                               JSGlobalPropertyCell* cell,
                                                JSFunction* function,
-                                               String* name,
-                                               CheckType check) {
+                                               String* name) {
   // ----------- S t a t e -------------
   //  -- rcx                 : name
   //  -- rsp[0]              : return address
@@ -1053,12 +1104,9 @@
   //  -- ...
   //  -- rsp[(argc + 1) * 8] : receiver
   // -----------------------------------
-  ASSERT(check == RECEIVER_MAP_CHECK);
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss;
 
@@ -1204,9 +1252,9 @@
 
 Object* CallStubCompiler::CompileArrayPopCall(Object* object,
                                               JSObject* holder,
+                                              JSGlobalPropertyCell* cell,
                                               JSFunction* function,
-                                              String* name,
-                                              CheckType check) {
+                                              String* name) {
   // ----------- S t a t e -------------
   //  -- rcx                 : name
   //  -- rsp[0]              : return address
@@ -1214,12 +1262,9 @@
   //  -- ...
   //  -- rsp[(argc + 1) * 8] : receiver
   // -----------------------------------
-  ASSERT(check == RECEIVER_MAP_CHECK);
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray()) {
-    return Heap::undefined_value();
-  }
+  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
 
   Label miss, return_undefined, call_builtin;
 
@@ -1289,9 +1334,9 @@
 
 Object* CallStubCompiler::CompileStringCharAtCall(Object* object,
                                                   JSObject* holder,
+                                                  JSGlobalPropertyCell* cell,
                                                   JSFunction* function,
-                                                  String* name,
-                                                  CheckType check) {
+                                                  String* name) {
   // ----------- S t a t e -------------
   //  -- rcx                 : function name
   //  -- rsp[0]              : return address
@@ -1301,7 +1346,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1358,11 +1403,12 @@
 }
 
 
-Object* CallStubCompiler::CompileStringCharCodeAtCall(Object* object,
-                                                      JSObject* holder,
-                                                      JSFunction* function,
-                                                      String* name,
-                                                      CheckType check) {
+Object* CallStubCompiler::CompileStringCharCodeAtCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
   // ----------- S t a t e -------------
   //  -- rcx                 : function name
   //  -- rsp[0]              : return address
@@ -1372,7 +1418,7 @@
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString()) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1426,6 +1472,75 @@
 }
 
 
+Object* CallStubCompiler::CompileStringFromCharCodeCall(
+    Object* object,
+    JSObject* holder,
+    JSGlobalPropertyCell* cell,
+    JSFunction* function,
+    String* name) {
+  // ----------- S t a t e -------------
+  //  -- rcx                 : function name
+  //  -- rsp[0]              : return address
+  //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
+  //  -- ...
+  //  -- rsp[(argc + 1) * 8] : receiver
+  // -----------------------------------
+
+  const int argc = arguments().immediate();
+
+  // If the object is not a JSObject or we got an unexpected number of
+  // arguments, bail out to the regular call.
+  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+
+  Label miss;
+  GenerateNameCheck(name, &miss);
+
+  if (cell == NULL) {
+    __ movq(rdx, Operand(rsp, 2 * kPointerSize));
+
+    __ JumpIfSmi(rdx, &miss);
+
+    CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
+                    &miss);
+  } else {
+    ASSERT(cell->value() == function);
+    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
+    GenerateLoadFunctionFromCell(cell, function, &miss);
+  }
+
+  // Load the char code argument.
+  Register code = rbx;
+  __ movq(code, Operand(rsp, 1 * kPointerSize));
+
+  // Check the code is a smi.
+  Label slow;
+  __ JumpIfNotSmi(code, &slow);
+
+  // Convert the smi code to uint16.
+  __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
+
+  StringCharFromCodeGenerator char_from_code_generator(code, rax);
+  char_from_code_generator.GenerateFast(masm());
+  __ ret(2 * kPointerSize);
+
+  ICRuntimeCallHelper call_helper;
+  char_from_code_generator.GenerateSlow(masm(), call_helper);
+
+  // Tail call the full function. We do not have to patch the receiver
+  // because the function makes no use of it.
+  __ bind(&slow);
+  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
+
+  __ bind(&miss);
+  // rcx: function name.
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
+
+  // Return the generated code.
+  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+}
+
+
 Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
                                                  JSObject* holder,
                                                  String* name) {
@@ -1498,7 +1613,6 @@
                                             JSFunction* function,
                                             String* name) {
   // ----------- S t a t e -------------
-  // -----------------------------------
   // rcx                 : function name
   // rsp[0]              : return address
   // rsp[8]              : argument argc
@@ -1506,6 +1620,17 @@
   // ...
   // rsp[argc * 8]       : argument 1
   // rsp[(argc + 1) * 8] : argument 0 = receiver
+  // -----------------------------------
+
+  SharedFunctionInfo* function_info = function->shared();
+  if (function_info->HasCustomCallGenerator()) {
+    const int id = function_info->custom_call_generator_id();
+    Object* result = CompileCustomCall(
+        id, object, holder, cell, function, name);
+    // undefined means bail out to regular compiler.
+    if (!result->IsUndefined()) return result;
+  }
+
   Label miss;
 
   GenerateNameCheck(name, &miss);
@@ -1513,42 +1638,9 @@
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  // Get the receiver from the stack.
-  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+  GenerateGlobalReceiverCheck(object, holder, name, &miss);
 
-  // If the object is the holder then we know that it's a global
-  // object which can only happen for contextual calls. In this case,
-  // the receiver cannot be a smi.
-  if (object != holder) {
-    __ JumpIfSmi(rdx, &miss);
-  }
-
-  // Check that the maps haven't changed.
-  CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, &miss);
-
-  // Get the value from the cell.
-  __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
-  __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
-
-  // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
-    // We can't embed a pointer to a function in new space so we have
-    // to verify that the shared function info is unchanged. This has
-    // the nice side effect that multiple closures based on the same
-    // function can all use this call IC. Before we load through the
-    // function, we have to verify that it still is a function.
-    __ JumpIfSmi(rdi, &miss);
-    __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
-    __ j(not_equal, &miss);
-
-    // Check the shared function info. Make sure it hasn't changed.
-    __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
-    __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
-    __ j(not_equal, &miss);
-  } else {
-    __ Cmp(rdi, Handle<JSFunction>(function));
-    __ j(not_equal, &miss);
-  }
+  GenerateLoadFunctionFromCell(cell, function, &miss);
 
   // Patch the receiver on the stack with the global proxy.
   if (object->IsGlobalObject()) {
diff --git a/test/mjsunit/array-indexing.js b/test/mjsunit/array-indexing.js
index 2322c54..7276742 100644
--- a/test/mjsunit/array-indexing.js
+++ b/test/mjsunit/array-indexing.js
@@ -26,41 +26,161 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 var array = [1,2,3,1,2,3,1,2,3,1,2,3];
+var undef_array = [0,,2,undefined,4,,6,undefined,8,,10];
+// Sparse arrays with length 42000.
+var sparse_array = [];
+sparse_array[100] = 3;
+sparse_array[200] = undefined;
+sparse_array[300] = 4;
+sparse_array[400] = 5;
+sparse_array[500] = 6;
+sparse_array[600] = 5;
+sparse_array[700] = 4;
+sparse_array[800] = undefined;
+sparse_array[900] = 3
+sparse_array[41999] = "filler";
+
+var dense_object = { 0: 42, 1: 37, length: 2 };
+var sparse_object = { 0: 42, 100000: 37, length: 200000 };
+var funky_object = { 10:42, 100000: 42, 100001: 37, length: 50000 };
+var infinite_object = { 10: 42, 100000: 37, length: Infinity };
 
 // ----------------------------------------------------------------------
 // Array.prototype.indexOf.
 // ----------------------------------------------------------------------
 
 // Negative cases.
-assertEquals([].indexOf(1), -1);
-assertEquals(array.indexOf(4), -1);
-assertEquals(array.indexOf(3, array.length), -1);
+assertEquals(-1, [].indexOf(1));
+assertEquals(-1, array.indexOf(4));
+assertEquals(-1, array.indexOf(3, array.length));
 
-assertEquals(array.indexOf(3), 2);
+assertEquals(2, array.indexOf(3));
 // Negative index out of range.
-assertEquals(array.indexOf(1, -17), 0);
+assertEquals(0, array.indexOf(1, -17));
 // Negative index in rage.
-assertEquals(array.indexOf(1, -11), 3);
+assertEquals(3, array.indexOf(1, -11));
 // Index in range.
-assertEquals(array.indexOf(1, 1), 3);
-assertEquals(array.indexOf(1, 3), 3);
-assertEquals(array.indexOf(1, 4), 6);
+assertEquals(3, array.indexOf(1, 1));
+assertEquals(3, array.indexOf(1, 3));
+assertEquals(6, array.indexOf(1, 4));
+
+// Find undefined, not holes.
+assertEquals(3, undef_array.indexOf(undefined));
+assertEquals(3, undef_array.indexOf(undefined, 3));
+assertEquals(7, undef_array.indexOf(undefined, 4));
+assertEquals(7, undef_array.indexOf(undefined, 7));
+assertEquals(-1, undef_array.indexOf(undefined, 8));
+assertEquals(3, undef_array.indexOf(undefined, -11));
+assertEquals(3, undef_array.indexOf(undefined, -8));
+assertEquals(7, undef_array.indexOf(undefined, -7));
+assertEquals(7, undef_array.indexOf(undefined, -4));
+assertEquals(-1, undef_array.indexOf(undefined, -3));
+
+// Find in sparse array.
+assertEquals(100, sparse_array.indexOf(3));
+assertEquals(900, sparse_array.indexOf(3, 101));
+assertEquals(-1, sparse_array.indexOf(3, 901));
+assertEquals(100, sparse_array.indexOf(3, -42000));
+assertEquals(900, sparse_array.indexOf(3, 101 - 42000));
+assertEquals(-1, sparse_array.indexOf(3, 901 - 42000));
+
+assertEquals(300, sparse_array.indexOf(4));
+assertEquals(700, sparse_array.indexOf(4, 301));
+assertEquals(-1, sparse_array.indexOf(4, 701));
+assertEquals(300, sparse_array.indexOf(4, -42000));
+assertEquals(700, sparse_array.indexOf(4, 301 - 42000));
+assertEquals(-1, sparse_array.indexOf(4, 701 - 42000));
+
+assertEquals(200, sparse_array.indexOf(undefined));
+assertEquals(800, sparse_array.indexOf(undefined, 201));
+assertEquals(-1, sparse_array.indexOf(undefined, 801));
+assertEquals(200, sparse_array.indexOf(undefined, -42000));
+assertEquals(800, sparse_array.indexOf(undefined, 201 - 42000));
+assertEquals(-1, sparse_array.indexOf(undefined, 801 - 42000));
+
+// Find in non-arrays.
+assertEquals(0, Array.prototype.indexOf.call(dense_object, 42));
+assertEquals(1, Array.prototype.indexOf.call(dense_object, 37));
+assertEquals(-1, Array.prototype.indexOf.call(dense_object, 87));
+
+assertEquals(0, Array.prototype.indexOf.call(sparse_object, 42));
+assertEquals(100000, Array.prototype.indexOf.call(sparse_object, 37));
+assertEquals(-1, Array.prototype.indexOf.call(sparse_object, 87));
+
+assertEquals(10, Array.prototype.indexOf.call(funky_object, 42));
+assertEquals(-1, Array.prototype.indexOf.call(funky_object, 42, 15));
+assertEquals(-1, Array.prototype.indexOf.call(funky_object, 37));
+
+assertEquals(-1, Array.prototype.indexOf.call(infinite_object, 42));
 
 // ----------------------------------------------------------------------
 // Array.prototype.lastIndexOf.
 // ----------------------------------------------------------------------
 
 // Negative cases.
-assertEquals([].lastIndexOf(1), -1);
-assertEquals(array.lastIndexOf(1, -17), -1);
+assertEquals(-1, [].lastIndexOf(1));
+assertEquals(-1, array.lastIndexOf(1, -17));
 
-assertEquals(array.lastIndexOf(1), 9);
+assertEquals(9, array.lastIndexOf(1));
 // Index out of range.
-assertEquals(array.lastIndexOf(1, array.length), 9);
+assertEquals(9, array.lastIndexOf(1, array.length));
 // Index in range.
-assertEquals(array.lastIndexOf(1, 2), 0);
-assertEquals(array.lastIndexOf(1, 4), 3);
-assertEquals(array.lastIndexOf(1, 3), 3);
+assertEquals(0, array.lastIndexOf(1, 2));
+assertEquals(3, array.lastIndexOf(1, 4));
+assertEquals(3, array.lastIndexOf(1, 3));
 // Negative index in range.
-assertEquals(array.lastIndexOf(1, -11), 0);
+assertEquals(0, array.lastIndexOf(1, -11));
 
+// Find undefined, not holes.
+assertEquals(7, undef_array.lastIndexOf(undefined));
+assertEquals(-1, undef_array.lastIndexOf(undefined, 2));
+assertEquals(3, undef_array.lastIndexOf(undefined, 3));
+assertEquals(3, undef_array.lastIndexOf(undefined, 6));
+assertEquals(7, undef_array.lastIndexOf(undefined, 7));
+assertEquals(7, undef_array.lastIndexOf(undefined, -1));
+assertEquals(-1, undef_array.lastIndexOf(undefined, -9));
+assertEquals(3, undef_array.lastIndexOf(undefined, -8));
+assertEquals(3, undef_array.lastIndexOf(undefined, -5));
+assertEquals(7, undef_array.lastIndexOf(undefined, -4));
+
+// Find in sparse array.
+assertEquals(900, sparse_array.lastIndexOf(3));
+assertEquals(100, sparse_array.lastIndexOf(3, 899));
+assertEquals(-1, sparse_array.lastIndexOf(3, 99));
+assertEquals(900, sparse_array.lastIndexOf(3, -1));
+assertEquals(100, sparse_array.lastIndexOf(3, 899 - 42000));
+assertEquals(-1, sparse_array.lastIndexOf(3, 99 - 42000));
+
+assertEquals(700, sparse_array.lastIndexOf(4));
+assertEquals(300, sparse_array.lastIndexOf(4, 699));
+assertEquals(-1, sparse_array.lastIndexOf(4, 299));
+assertEquals(700, sparse_array.lastIndexOf(4, -1));
+assertEquals(300, sparse_array.lastIndexOf(4, 699 - 42000));
+assertEquals(-1, sparse_array.lastIndexOf(4, 299 - 42000));
+
+assertEquals(800, sparse_array.lastIndexOf(undefined));
+assertEquals(200, sparse_array.lastIndexOf(undefined, 799));
+assertEquals(-1, sparse_array.lastIndexOf(undefined, 199));
+assertEquals(800, sparse_array.lastIndexOf(undefined, -1));
+assertEquals(200, sparse_array.lastIndexOf(undefined, 799 - 42000));
+assertEquals(-1, sparse_array.lastIndexOf(undefined, 199 - 42000));
+
+assertEquals(0, Array.prototype.lastIndexOf.call(dense_object, 42));
+assertEquals(1, Array.prototype.lastIndexOf.call(dense_object, 37));
+assertEquals(0, Array.prototype.lastIndexOf.call(sparse_object, 42));
+assertEquals(100000, Array.prototype.lastIndexOf.call(sparse_object, 37));
+
+//Find in non-arrays.
+assertEquals(0, Array.prototype.lastIndexOf.call(dense_object, 42));
+assertEquals(1, Array.prototype.lastIndexOf.call(dense_object, 37));
+assertEquals(-1, Array.prototype.lastIndexOf.call(dense_object, 87));
+
+assertEquals(0, Array.prototype.lastIndexOf.call(sparse_object, 42));
+assertEquals(100000, Array.prototype.lastIndexOf.call(sparse_object, 37));
+assertEquals(-1, Array.prototype.lastIndexOf.call(sparse_object, 87));
+
+assertEquals(10, Array.prototype.lastIndexOf.call(funky_object, 42, 15));
+assertEquals(10, Array.prototype.lastIndexOf.call(funky_object, 42));
+assertEquals(-1, Array.prototype.lastIndexOf.call(funky_object, 37));
+
+assertEquals(-1, Array.prototype.lastIndexOf.call(infinite_object, 42));
diff --git a/test/mjsunit/regress/regress-857.js b/test/mjsunit/regress/regress-857.js
new file mode 100644
index 0000000..183248d
--- /dev/null
+++ b/test/mjsunit/regress/regress-857.js
@@ -0,0 +1,37 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Make sure ES5 15.9.1.15 (ISO 8601 / RFC 3339) time zone offsets of
+// the form "+09:00" & "-09:00" get parsed as expected
+assertEquals(1283326536000, Date.parse("2010-08-31T22:35:36-09:00"));
+assertEquals(1283261736000, Date.parse("2010-08-31T22:35:36+09:00"));
+assertEquals(1283326536000, Date.parse("2010-08-31T22:35:36.0-09:00"));
+assertEquals(1283261736000, Date.parse("2010-08-31T22:35:36.0+09:00"));
+// colon-less time expressions in time zone offsets are not conformant
+// with ES5 15.9.1.15 but are nonetheless supported in V8
+assertEquals(1283326536000, Date.parse("2010-08-31T22:35:36-0900"));
+assertEquals(1283261736000, Date.parse("2010-08-31T22:35:36+0900"));
diff --git a/test/mjsunit/string-fromcharcode.js b/test/mjsunit/string-fromcharcode.js
new file mode 100644
index 0000000..7a2db5f
--- /dev/null
+++ b/test/mjsunit/string-fromcharcode.js
@@ -0,0 +1,89 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test String.fromCharCode.
+
+
+// Test various receivers and arguments passed to String.fromCharCode.
+
+Object.prototype.fromCharCode = function(x) { return this; };
+
+var fcc = String.fromCharCode;
+var fcc2 = fcc;
+
+function constFun(x) { return function(y) { return x; }; }
+
+function test(num) {
+  assertEquals(" ", String.fromCharCode(0x20));
+  assertEquals(" ", String.fromCharCode(0x20 + 0x10000));
+  assertEquals(" ", String.fromCharCode(0x20 - 0x10000));
+  assertEquals(" ", String.fromCharCode(0x20 + 0.5));
+
+  assertEquals("\u1234", String.fromCharCode(0x1234));
+  assertEquals("\u1234", String.fromCharCode(0x1234 + 0x10000));
+  assertEquals("\u1234", String.fromCharCode(0x1234 - 0x10000));
+  assertEquals("\u1234", String.fromCharCode(0x1234 + 0.5));
+
+  assertEquals("  ", String.fromCharCode(0x20, 0x20));
+  assertEquals("  ", String.fromCharCode(0x20 + 0.5, 0x20));
+
+  assertEquals(" ", fcc(0x20));
+  assertEquals(" ", fcc(0x20 + 0x10000));
+  assertEquals(" ", fcc(0x20 - 0x10000));
+  assertEquals(" ", fcc(0x20 + 0.5));
+
+  assertEquals("\u1234", fcc(0x1234));
+  assertEquals("\u1234", fcc(0x1234 + 0x10000));
+  assertEquals("\u1234", fcc(0x1234 - 0x10000));
+  assertEquals("\u1234", fcc(0x1234 + 0.5));
+
+  assertEquals("  ", fcc(0x20, 0x20));
+  assertEquals("  ", fcc(0x20 + 0.5, 0x20));
+
+  var receiver = (num < 5) ? String : (num < 9) ? "dummy" : 42;
+  fcc2 = (num < 5) ? fcc : (num < 9) ? constFun("dummy") : constFun(42);
+  var expected = (num < 5) ? " " : (num < 9) ? "dummy" : 42;
+  assertEquals(expected, receiver.fromCharCode(0x20));
+  assertEquals(expected, receiver.fromCharCode(0x20 - 0x10000));
+  assertEquals(expected, receiver.fromCharCode(0x20 + 0.5));
+  assertEquals(expected, fcc2(0x20));
+  assertEquals(expected, fcc2(0x20 - 0x10000));
+  assertEquals(expected, fcc2(0x20 + 0.5));
+}
+
+// Use loop to test the custom IC.
+for (var i = 0; i < 10; i++) {
+  test(i);
+}
+
+
+// Test the custom IC works correctly when the map changes.
+for (var i = 0; i < 10; i++) {
+  var expected = (i < 5) ? " " : 42;
+  if (i == 5) String.fromCharCode = function() { return 42; };
+  assertEquals(expected, String.fromCharCode(0x20));
+}