summaryrefslogtreecommitdiffstats
path: root/src/ia32/lithium-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/ia32/lithium-ia32.cc')
-rw-r--r--src/ia32/lithium-ia32.cc2128
1 files changed, 2128 insertions, 0 deletions
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
new file mode 100644
index 00000000..3b272d0b
--- /dev/null
+++ b/src/ia32/lithium-ia32.cc
@@ -0,0 +1,2128 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "ia32/lithium-ia32.h"
+#include "ia32/lithium-codegen-ia32.h"
+
+namespace v8 {
+namespace internal {
+
+#define DEFINE_COMPILE(type) \
+ void L##type::CompileToNative(LCodeGen* generator) { \
+ generator->Do##type(this); \
+ }
+LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
+#undef DEFINE_COMPILE
+
+LOsrEntry::LOsrEntry() {
+ for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
+ register_spills_[i] = NULL;
+ }
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
+ double_register_spills_[i] = NULL;
+ }
+}
+
+
+void LOsrEntry::MarkSpilledRegister(int allocation_index,
+ LOperand* spill_operand) {
+ ASSERT(spill_operand->IsStackSlot());
+ ASSERT(register_spills_[allocation_index] == NULL);
+ register_spills_[allocation_index] = spill_operand;
+}
+
+
+void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
+ LOperand* spill_operand) {
+ ASSERT(spill_operand->IsDoubleStackSlot());
+ ASSERT(double_register_spills_[allocation_index] == NULL);
+ double_register_spills_[allocation_index] = spill_operand;
+}
+
+
+void LInstruction::PrintTo(StringStream* stream) const {
+ stream->Add("%s ", this->Mnemonic());
+ if (HasResult()) {
+ result()->PrintTo(stream);
+ stream->Add(" ");
+ }
+ PrintDataTo(stream);
+
+ if (HasEnvironment()) {
+ stream->Add(" ");
+ environment()->PrintTo(stream);
+ }
+
+ if (HasPointerMap()) {
+ stream->Add(" ");
+ pointer_map()->PrintTo(stream);
+ }
+}
+
+
+void LLabel::PrintDataTo(StringStream* stream) const {
+ LGap::PrintDataTo(stream);
+ LLabel* rep = replacement();
+ if (rep != NULL) {
+ stream->Add(" Dead block replaced with B%d", rep->block_id());
+ }
+}
+
+
+bool LParallelMove::IsRedundant() const {
+ for (int i = 0; i < move_operands_.length(); ++i) {
+ if (!move_operands_[i].IsRedundant()) return false;
+ }
+ return true;
+}
+
+
+void LParallelMove::PrintDataTo(StringStream* stream) const {
+ for (int i = move_operands_.length() - 1; i >= 0; --i) {
+ if (!move_operands_[i].IsEliminated()) {
+ LOperand* from = move_operands_[i].from();
+ LOperand* to = move_operands_[i].to();
+ if (from->Equals(to)) {
+ to->PrintTo(stream);
+ } else {
+ to->PrintTo(stream);
+ stream->Add(" = ");
+ from->PrintTo(stream);
+ }
+ stream->Add("; ");
+ }
+ }
+}
+
+
+bool LGap::IsRedundant() const {
+ for (int i = 0; i < 4; i++) {
+ if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+
+void LGap::PrintDataTo(StringStream* stream) const {
+ for (int i = 0; i < 4; i++) {
+ stream->Add("(");
+ if (parallel_moves_[i] != NULL) {
+ parallel_moves_[i]->PrintDataTo(stream);
+ }
+ stream->Add(") ");
+ }
+}
+
+
+const char* LArithmeticD::Mnemonic() const {
+ switch (op()) {
+ case Token::ADD: return "add-d";
+ case Token::SUB: return "sub-d";
+ case Token::MUL: return "mul-d";
+ case Token::DIV: return "div-d";
+ case Token::MOD: return "mod-d";
+ default:
+ UNREACHABLE();
+ return NULL;
+ }
+}
+
+
+const char* LArithmeticT::Mnemonic() const {
+ switch (op()) {
+ case Token::ADD: return "add-t";
+ case Token::SUB: return "sub-t";
+ case Token::MUL: return "mul-t";
+ case Token::MOD: return "mod-t";
+ case Token::DIV: return "div-t";
+ default:
+ UNREACHABLE();
+ return NULL;
+ }
+}
+
+
+
+void LBinaryOperation::PrintDataTo(StringStream* stream) const {
+ stream->Add("= ");
+ left()->PrintTo(stream);
+ stream->Add(" ");
+ right()->PrintTo(stream);
+}
+
+
+void LGoto::PrintDataTo(StringStream* stream) const {
+ stream->Add("B%d", block_id());
+}
+
+
+void LBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
+ input()->PrintTo(stream);
+}
+
+
+void LCmpIDAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if ");
+ left()->PrintTo(stream);
+ stream->Add(" %s ", Token::String(op()));
+ right()->PrintTo(stream);
+ stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LIsNullAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if ");
+ input()->PrintTo(stream);
+ stream->Add(is_strict() ? " === null" : " == null");
+ stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LIsObjectAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if is_object(");
+ input()->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LIsSmiAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if is_smi(");
+ input()->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if has_instance_type(");
+ input()->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if has_cached_array_index(");
+ input()->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
+void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if class_of_test(");
+ input()->PrintTo(stream);
+ stream->Add(", \"%o\") then B%d else B%d",
+ *hydrogen()->class_name(),
+ true_block_id(),
+ false_block_id());
+}
+
+
+void LTypeofIs::PrintDataTo(StringStream* stream) const {
+ input()->PrintTo(stream);
+ stream->Add(" == \"%s\"", *hydrogen()->type_literal()->ToCString());
+}
+
+
+void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) const {
+ stream->Add("if typeof ");
+ input()->PrintTo(stream);
+ stream->Add(" == \"%s\" then B%d else B%d",
+ *hydrogen()->type_literal()->ToCString(),
+ true_block_id(), false_block_id());
+}
+
+
+void LCallConstantFunction::PrintDataTo(StringStream* stream) const {
+ stream->Add("#%d / ", arity());
+}
+
+
+void LUnaryMathOperation::PrintDataTo(StringStream* stream) const {
+ stream->Add("/%s ", hydrogen()->OpName());
+ input()->PrintTo(stream);
+}
+
+
+void LCallKeyed::PrintDataTo(StringStream* stream) const {
+ stream->Add("[ecx] #%d / ", arity());
+}
+
+
+void LCallNamed::PrintDataTo(StringStream* stream) const {
+ SmartPointer<char> name_string = name()->ToCString();
+ stream->Add("%s #%d / ", *name_string, arity());
+}
+
+
+void LCallGlobal::PrintDataTo(StringStream* stream) const {
+ SmartPointer<char> name_string = name()->ToCString();
+ stream->Add("%s #%d / ", *name_string, arity());
+}
+
+
+void LCallKnownGlobal::PrintDataTo(StringStream* stream) const {
+ stream->Add("#%d / ", arity());
+}
+
+
+void LCallNew::PrintDataTo(StringStream* stream) const {
+ LUnaryOperation::PrintDataTo(stream);
+ stream->Add(" #%d / ", arity());
+}
+
+
+void LClassOfTest::PrintDataTo(StringStream* stream) const {
+ stream->Add("= class_of_test(");
+ input()->PrintTo(stream);
+ stream->Add(", \"%o\")", *hydrogen()->class_name());
+}
+
+
+void LUnaryOperation::PrintDataTo(StringStream* stream) const {
+ stream->Add("= ");
+ input()->PrintTo(stream);
+}
+
+
+void LAccessArgumentsAt::PrintDataTo(StringStream* stream) const {
+ arguments()->PrintTo(stream);
+
+ stream->Add(" length ");
+ length()->PrintTo(stream);
+
+ stream->Add(" index ");
+ index()->PrintTo(stream);
+}
+
+
+LChunk::LChunk(HGraph* graph)
+ : spill_slot_count_(0),
+ graph_(graph),
+ instructions_(32),
+ pointer_maps_(8),
+ inlined_closures_(1) {
+}
+
+
+void LChunk::Verify() const {
+ // TODO(twuerthinger): Implement verification for chunk.
+}
+
+
+int LChunk::GetNextSpillIndex(bool is_double) {
+ // Skip a slot if for a double-width slot.
+ if (is_double) spill_slot_count_++;
+ return spill_slot_count_++;
+}
+
+
+LOperand* LChunk::GetNextSpillSlot(bool is_double) {
+ int index = GetNextSpillIndex(is_double);
+ if (is_double) {
+ return LDoubleStackSlot::Create(index);
+ } else {
+ return LStackSlot::Create(index);
+ }
+}
+
+
+void LChunk::MarkEmptyBlocks() {
+ HPhase phase("Mark empty blocks", this);
+ for (int i = 0; i < graph()->blocks()->length(); ++i) {
+ HBasicBlock* block = graph()->blocks()->at(i);
+ int first = block->first_instruction_index();
+ int last = block->last_instruction_index();
+ LInstruction* first_instr = instructions()->at(first);
+ LInstruction* last_instr = instructions()->at(last);
+
+ LLabel* label = LLabel::cast(first_instr);
+ if (last_instr->IsGoto()) {
+ LGoto* goto_instr = LGoto::cast(last_instr);
+ if (!goto_instr->include_stack_check() &&
+ label->IsRedundant() &&
+ !label->is_loop_header()) {
+ bool can_eliminate = true;
+ for (int i = first + 1; i < last && can_eliminate; ++i) {
+ LInstruction* cur = instructions()->at(i);
+ if (cur->IsGap()) {
+ LGap* gap = LGap::cast(cur);
+ if (!gap->IsRedundant()) {
+ can_eliminate = false;
+ }
+ } else {
+ can_eliminate = false;
+ }
+ }
+
+ if (can_eliminate) {
+ label->set_replacement(GetLabel(goto_instr->block_id()));
+ }
+ }
+ }
+ }
+}
+
+
+void LStoreNamed::PrintDataTo(StringStream* stream) const {
+ object()->PrintTo(stream);
+ stream->Add(".");
+ stream->Add(*String::cast(*name())->ToCString());
+ stream->Add(" <- ");
+ value()->PrintTo(stream);
+}
+
+
+void LStoreKeyed::PrintDataTo(StringStream* stream) const {
+ object()->PrintTo(stream);
+ stream->Add("[");
+ key()->PrintTo(stream);
+ stream->Add("] <- ");
+ value()->PrintTo(stream);
+}
+
+
+int LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
+ LGap* gap = new LGap(block);
+ int index = -1;
+ if (instr->IsControl()) {
+ instructions_.Add(gap);
+ index = instructions_.length();
+ instructions_.Add(instr);
+ } else {
+ index = instructions_.length();
+ instructions_.Add(instr);
+ instructions_.Add(gap);
+ }
+ if (instr->HasPointerMap()) {
+ pointer_maps_.Add(instr->pointer_map());
+ instr->pointer_map()->set_lithium_position(index);
+ }
+ return index;
+}
+
+
+LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
+ return LConstantOperand::Create(constant->id());
+}
+
+
+int LChunk::GetParameterStackSlot(int index) const {
+ // The receiver is at index 0, the first parameter at index 1, so we
+ // shift all parameter indexes down by the number of parameters, and
+ // make sure they end up negative so they are distinguishable from
+ // spill slots.
+ int result = index - graph()->info()->scope()->num_parameters() - 1;
+ ASSERT(result < 0);
+ return result;
+}
+
+// A parameter relative to ebp in the arguments stub.
+int LChunk::ParameterAt(int index) {
+ ASSERT(-1 <= index); // -1 is the receiver.
+ return (1 + graph()->info()->scope()->num_parameters() - index) *
+ kPointerSize;
+}
+
+
+LGap* LChunk::GetGapAt(int index) const {
+ return LGap::cast(instructions_[index]);
+}
+
+
+bool LChunk::IsGapAt(int index) const {
+ return instructions_[index]->IsGap();
+}
+
+
+int LChunk::NearestGapPos(int index) const {
+ while (!IsGapAt(index)) index--;
+ return index;
+}
+
+
+void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
+ GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
+}
+
+
+class LGapNode: public ZoneObject {
+ public:
+ explicit LGapNode(LOperand* operand)
+ : operand_(operand), resolved_(false), visited_id_(-1) { }
+
+ LOperand* operand() const { return operand_; }
+ bool IsResolved() const { return !IsAssigned() || resolved_; }
+ void MarkResolved() {
+ ASSERT(!IsResolved());
+ resolved_ = true;
+ }
+ int visited_id() const { return visited_id_; }
+ void set_visited_id(int id) {
+ ASSERT(id > visited_id_);
+ visited_id_ = id;
+ }
+
+ bool IsAssigned() const { return assigned_from_.is_set(); }
+ LGapNode* assigned_from() const { return assigned_from_.get(); }
+ void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
+
+ private:
+ LOperand* operand_;
+ SetOncePointer<LGapNode> assigned_from_;
+ bool resolved_;
+ int visited_id_;
+};
+
+
+LGapResolver::LGapResolver(const ZoneList<LMoveOperands>* moves,
+ LOperand* marker_operand)
+ : nodes_(4),
+ identified_cycles_(4),
+ result_(4),
+ marker_operand_(marker_operand),
+ next_visited_id_(0) {
+ for (int i = 0; i < moves->length(); ++i) {
+ LMoveOperands move = moves->at(i);
+ if (!move.IsRedundant()) RegisterMove(move);
+ }
+}
+
+
+const ZoneList<LMoveOperands>* LGapResolver::ResolveInReverseOrder() {
+ for (int i = 0; i < identified_cycles_.length(); ++i) {
+ ResolveCycle(identified_cycles_[i]);
+ }
+
+ int unresolved_nodes;
+ do {
+ unresolved_nodes = 0;
+ for (int j = 0; j < nodes_.length(); j++) {
+ LGapNode* node = nodes_[j];
+ if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
+ AddResultMove(node->assigned_from(), node);
+ node->MarkResolved();
+ }
+ if (!node->IsResolved()) ++unresolved_nodes;
+ }
+ } while (unresolved_nodes > 0);
+ return &result_;
+}
+
+
+void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
+ AddResultMove(from->operand(), to->operand());
+}
+
+
+void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
+ result_.Add(LMoveOperands(from, to));
+}
+
+
+void LGapResolver::ResolveCycle(LGapNode* start) {
+ ZoneList<LOperand*> circle_operands(8);
+ circle_operands.Add(marker_operand_);
+ LGapNode* cur = start;
+ do {
+ cur->MarkResolved();
+ circle_operands.Add(cur->operand());
+ cur = cur->assigned_from();
+ } while (cur != start);
+ circle_operands.Add(marker_operand_);
+
+ for (int i = circle_operands.length() - 1; i > 0; --i) {
+ LOperand* from = circle_operands[i];
+ LOperand* to = circle_operands[i - 1];
+ AddResultMove(from, to);
+ }
+}
+
+
+bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
+ ASSERT(a != b);
+ LGapNode* cur = a;
+ while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
+ cur->set_visited_id(visited_id);
+ cur = cur->assigned_from();
+ }
+
+ return cur == b;
+}
+
+
+bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
+ ASSERT(a != b);
+ return CanReach(a, b, next_visited_id_++);
+}
+
+
+void LGapResolver::RegisterMove(LMoveOperands move) {
+ if (move.from()->IsConstantOperand()) {
+ // Constant moves should be last in the machine code. Therefore add them
+ // first to the result set.
+ AddResultMove(move.from(), move.to());
+ } else {
+ LGapNode* from = LookupNode(move.from());
+ LGapNode* to = LookupNode(move.to());
+ if (to->IsAssigned() && to->assigned_from() == from) {
+ move.Eliminate();
+ return;
+ }
+ ASSERT(!to->IsAssigned());
+ if (CanReach(from, to)) {
+ // This introduces a circle. Save.
+ identified_cycles_.Add(from);
+ }
+ to->set_assigned_from(from);
+ }
+}
+
+
+LGapNode* LGapResolver::LookupNode(LOperand* operand) {
+ for (int i = 0; i < nodes_.length(); ++i) {
+ if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
+ }
+
+ // No node found => create a new one.
+ LGapNode* result = new LGapNode(operand);
+ nodes_.Add(result);
+ return result;
+}
+
+
+Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
+ return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
+}
+
+
+Representation LChunk::LookupLiteralRepresentation(
+ LConstantOperand* operand) const {
+ return graph_->LookupValue(operand->index())->representation();
+}
+
+
+LChunk* LChunkBuilder::Build() {
+ ASSERT(is_unused());
+ chunk_ = new LChunk(graph());
+ HPhase phase("Building chunk", chunk_);
+ status_ = BUILDING;
+ const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
+ for (int i = 0; i < blocks->length(); i++) {
+ HBasicBlock* next = NULL;
+ if (i < blocks->length() - 1) next = blocks->at(i + 1);
+ DoBasicBlock(blocks->at(i), next);
+ if (is_aborted()) return NULL;
+ }
+ status_ = DONE;
+ return chunk_;
+}
+
+
+void LChunkBuilder::Abort(const char* format, ...) {
+ if (FLAG_trace_bailout) {
+ SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
+ PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
+ va_list arguments;
+ va_start(arguments, format);
+ OS::VPrint(format, arguments);
+ va_end(arguments);
+ PrintF("\n");
+ }
+ status_ = ABORTED;
+}
+
+
+LRegister* LChunkBuilder::ToOperand(Register reg) {
+ return LRegister::Create(Register::ToAllocationIndex(reg));
+}
+
+
+LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
+ return new LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
+}
+
+
+LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
+ return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ XMMRegister::ToAllocationIndex(reg));
+}
+
+
+LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
+ return Use(value, ToUnallocated(fixed_register));
+}
+
+
+LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
+ return Use(value, ToUnallocated(reg));
+}
+
+
+LOperand* LChunkBuilder::UseRegister(HValue* value) {
+ return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+}
+
+
+LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
+ return Use(value,
+ new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ LUnallocated::USED_AT_START));
+}
+
+
+LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
+ return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+}
+
+
+LOperand* LChunkBuilder::Use(HValue* value) {
+ return Use(value, new LUnallocated(LUnallocated::NONE));
+}
+
+
+LOperand* LChunkBuilder::UseAtStart(HValue* value) {
+ return Use(value, new LUnallocated(LUnallocated::NONE,
+ LUnallocated::USED_AT_START));
+}
+
+
+LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
+ return value->IsConstant()
+ ? chunk_->DefineConstantOperand(HConstant::cast(value))
+ : Use(value);
+}
+
+
+LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
+ return value->IsConstant()
+ ? chunk_->DefineConstantOperand(HConstant::cast(value))
+ : UseAtStart(value);
+}
+
+
+LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
+ return value->IsConstant()
+ ? chunk_->DefineConstantOperand(HConstant::cast(value))
+ : UseRegister(value);
+}
+
+
+LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
+ return value->IsConstant()
+ ? chunk_->DefineConstantOperand(HConstant::cast(value))
+ : UseRegisterAtStart(value);
+}
+
+
+LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
+ if (value->EmitAtUses()) {
+ HInstruction* instr = HInstruction::cast(value);
+ VisitInstruction(instr);
+ }
+ allocator_->RecordUse(value, operand);
+ return operand;
+}
+
+
+LInstruction* LChunkBuilder::Define(LInstruction* instr) {
+ return Define(instr, new LUnallocated(LUnallocated::NONE));
+}
+
+
+LInstruction* LChunkBuilder::DefineAsRegister(LInstruction* instr) {
+ return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+}
+
+
+LInstruction* LChunkBuilder::DefineAsSpilled(LInstruction* instr, int index) {
+ return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+}
+
+
+LInstruction* LChunkBuilder::DefineSameAsAny(LInstruction* instr) {
+ return Define(instr, new LUnallocated(LUnallocated::SAME_AS_ANY_INPUT));
+}
+
+
+LInstruction* LChunkBuilder::DefineSameAsFirst(LInstruction* instr) {
+ return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+}
+
+
+LInstruction* LChunkBuilder::DefineFixed(LInstruction* instr, Register reg) {
+ return Define(instr, ToUnallocated(reg));
+}
+
+
+LInstruction* LChunkBuilder::DefineFixedDouble(LInstruction* instr,
+ XMMRegister reg) {
+ return Define(instr, ToUnallocated(reg));
+}
+
+
+LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
+ HEnvironment* hydrogen_env = current_block_->last_environment();
+ instr->set_environment(CreateEnvironment(hydrogen_env));
+ return instr;
+}
+
+
+LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
+ LInstruction* instr, int ast_id) {
+ ASSERT(instructions_pending_deoptimization_environment_ == NULL);
+ ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ instructions_pending_deoptimization_environment_ = instr;
+ pending_deoptimization_ast_id_ = ast_id;
+ return instr;
+}
+
+
+void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
+ instructions_pending_deoptimization_environment_ = NULL;
+ pending_deoptimization_ast_id_ = AstNode::kNoNumber;
+}
+
+
+LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
+ HInstruction* hinstr,
+ CanDeoptimize can_deoptimize) {
+ allocator_->MarkAsCall();
+ instr = AssignPointerMap(instr);
+
+ if (hinstr->HasSideEffects()) {
+ ASSERT(hinstr->next()->IsSimulate());
+ HSimulate* sim = HSimulate::cast(hinstr->next());
+ instr = SetInstructionPendingDeoptimizationEnvironment(
+ instr, sim->ast_id());
+ }
+
+ // If instruction does not have side-effects lazy deoptimization
+ // after the call will try to deoptimize to the point before the call.
+ // Thus we still need to attach environment to this call even if
+ // call sequence can not deoptimize eagerly.
+ bool needs_environment =
+ (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || !hinstr->HasSideEffects();
+ if (needs_environment && !instr->HasEnvironment()) {
+ instr = AssignEnvironment(instr);
+ }
+
+ return instr;
+}
+
+
+LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
+ ASSERT(!instr->HasPointerMap());
+ instr->set_pointer_map(new LPointerMap(position_));
+ return instr;
+}
+
+
+LInstruction* LChunkBuilder::Define(LInstruction* instr, LUnallocated* result) {
+ allocator_->RecordDefinition(current_instruction_, result);
+ instr->set_result(result);
+ return instr;
+}
+
+
+LOperand* LChunkBuilder::Temp() {
+ LUnallocated* operand = new LUnallocated(LUnallocated::NONE);
+ allocator_->RecordTemporary(operand);
+ return operand;
+}
+
+
+LUnallocated* LChunkBuilder::TempRegister() {
+ LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ allocator_->RecordTemporary(operand);
+ return operand;
+}
+
+
+LOperand* LChunkBuilder::FixedTemp(Register reg) {
+ LUnallocated* operand = ToUnallocated(reg);
+ allocator_->RecordTemporary(operand);
+ return operand;
+}
+
+
+LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
+ LUnallocated* operand = ToUnallocated(reg);
+ allocator_->RecordTemporary(operand);
+ return operand;
+}
+
+
+LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
+ return new LLabel(instr->block());
+}
+
+
+LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
+ return AssignEnvironment(new LDeoptimize);
+}
+
+
+LInstruction* LChunkBuilder::DoBit(Token::Value op,
+ HBitwiseBinaryOperation* instr) {
+ ASSERT(instr->representation().IsInteger32());
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+
+ LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
+ LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+ return DefineSameAsFirst(new LBitI(op, left, right));
+}
+
+
+LInstruction* LChunkBuilder::DoShift(Token::Value op,
+ HBitwiseBinaryOperation* instr) {
+ ASSERT(instr->representation().IsInteger32());
+ ASSERT(instr->OperandAt(0)->representation().IsInteger32());
+ ASSERT(instr->OperandAt(1)->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->OperandAt(0));
+
+ HValue* right_value = instr->OperandAt(1);
+ LOperand* right = NULL;
+ int constant_value = 0;
+ if (right_value->IsConstant()) {
+ HConstant* constant = HConstant::cast(right_value);
+ right = chunk_->DefineConstantOperand(constant);
+ constant_value = constant->Integer32Value() & 0x1f;
+ } else {
+ right = UseFixed(right_value, ecx);
+ }
+
+ // Shift operations can only deoptimize if we do a logical shift
+ // by 0 and the result cannot be truncated to int32.
+ bool can_deopt = (op == Token::SHR && constant_value == 0);
+ if (can_deopt) {
+ bool can_truncate = true;
+ for (int i = 0; i < instr->uses()->length(); i++) {
+ if (!instr->uses()->at(i)->CheckFlag(HValue::kTruncatingToInt32)) {
+ can_truncate = false;
+ break;
+ }
+ }
+ can_deopt = !can_truncate;
+ }
+
+ LInstruction* result =
+ DefineSameAsFirst(new LShiftI(op, left, right, can_deopt));
+ if (can_deopt) AssignEnvironment(result);
+ return result;
+}
+
+
+LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
+ HArithmeticBinaryOperation* instr) {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseRegisterAtStart(instr->right());
+ LArithmeticD* result = new LArithmeticD(op, left, right);
+ return DefineSameAsFirst(result);
+}
+
+
+LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
+ HArithmeticBinaryOperation* instr) {
+ ASSERT(op == Token::ADD ||
+ op == Token::DIV ||
+ op == Token::MOD ||
+ op == Token::MUL ||
+ op == Token::SUB);
+ HValue* left = instr->left();
+ HValue* right = instr->right();
+ ASSERT(left->representation().IsTagged());
+ ASSERT(right->representation().IsTagged());
+ LOperand* left_operand = UseFixed(left, edx);
+ LOperand* right_operand = UseFixed(right, eax);
+ LInstruction* result = new LArithmeticT(op, left_operand, right_operand);
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
+ ASSERT(is_building());
+ current_block_ = block;
+ next_block_ = next_block;
+ if (block->IsStartBlock()) {
+ block->UpdateEnvironment(graph_->start_environment());
+ argument_count_ = 0;
+ } else if (block->predecessors()->length() == 1) {
+ // We have a single predecessor => copy environment and outgoing
+ // argument count from the predecessor.
+ ASSERT(block->phis()->length() == 0);
+ HBasicBlock* pred = block->predecessors()->at(0);
+ HEnvironment* last_environment = pred->last_environment();
+ ASSERT(last_environment != NULL);
+ // Only copy the environment, if it is later used again.
+ if (pred->end()->SecondSuccessor() == NULL) {
+ ASSERT(pred->end()->FirstSuccessor() == block);
+ } else {
+ if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
+ pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
+ last_environment = last_environment->Copy();
+ }
+ }
+ block->UpdateEnvironment(last_environment);
+ ASSERT(pred->argument_count() >= 0);
+ argument_count_ = pred->argument_count();
+ } else {
+ // We are at a state join => process phis.
+ HBasicBlock* pred = block->predecessors()->at(0);
+ // No need to copy the environment, it cannot be used later.
+ HEnvironment* last_environment = pred->last_environment();
+ for (int i = 0; i < block->phis()->length(); ++i) {
+ HPhi* phi = block->phis()->at(i);
+ last_environment->SetValueAt(phi->merged_index(), phi);
+ }
+ for (int i = 0; i < block->deleted_phis()->length(); ++i) {
+ last_environment->SetValueAt(block->deleted_phis()->at(i),
+ graph_->GetConstantUndefined());
+ }
+ block->UpdateEnvironment(last_environment);
+ // Pick up the outgoing argument count of one of the predecessors.
+ argument_count_ = pred->argument_count();
+ }
+ HInstruction* current = block->first();
+ int start = chunk_->instructions()->length();
+ while (current != NULL && !is_aborted()) {
+ if (FLAG_trace_environment) {
+ PrintF("Process instruction %d\n", current->id());
+ }
+ // Code for constants in registers is generated lazily.
+ if (!current->EmitAtUses()) {
+ VisitInstruction(current);
+ }
+ current = current->next();
+ }
+ int end = chunk_->instructions()->length() - 1;
+ if (end >= start) {
+ block->set_first_instruction_index(start);
+ block->set_last_instruction_index(end);
+ }
+ block->set_argument_count(argument_count_);
+ next_block_ = NULL;
+ current_block_ = NULL;
+}
+
+
+void LChunkBuilder::VisitInstruction(HInstruction* current) {
+ HInstruction* old_current = current_instruction_;
+ current_instruction_ = current;
+ allocator_->BeginInstruction();
+ if (current->has_position()) position_ = current->position();
+ LInstruction* instr = current->CompileToLithium(this);
+
+ if (instr != NULL) {
+ if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
+ instr = AssignPointerMap(instr);
+ }
+ if (FLAG_stress_environments && !instr->HasEnvironment()) {
+ instr = AssignEnvironment(instr);
+ }
+ if (current->IsBranch()) {
+ instr->set_hydrogen_value(HBranch::cast(current)->value());
+ } else {
+ instr->set_hydrogen_value(current);
+ }
+
+ int index = chunk_->AddInstruction(instr, current_block_);
+ allocator_->SummarizeInstruction(index);
+ } else {
+ // This instruction should be omitted.
+ allocator_->OmitInstruction();
+ }
+ current_instruction_ = old_current;
+}
+
+
+void LEnvironment::WriteTranslation(LCodeGen* cgen,
+ Translation* translation) const {
+ if (this == NULL) return;
+
+ // The translation includes one command per value in the environment.
+ int translation_size = values()->length();
+ // The output frame height does not include the parameters.
+ int height = translation_size - parameter_count();
+
+ outer()->WriteTranslation(cgen, translation);
+ int closure_id = cgen->DefineDeoptimizationLiteral(closure());
+ translation->BeginFrame(ast_id(), closure_id, height);
+ for (int i = 0; i < translation_size; ++i) {
+ LOperand* value = values()->at(i);
+ // spilled_registers_ and spilled_double_registers_ are either
+ // both NULL or both set.
+ if (spilled_registers_ != NULL && value != NULL) {
+ if (value->IsRegister() &&
+ spilled_registers_[value->index()] != NULL) {
+ translation->MarkDuplicate();
+ cgen->AddToTranslation(translation,
+ spilled_registers_[value->index()],
+ HasTaggedValueAt(i));
+ } else if (value->IsDoubleRegister() &&
+ spilled_double_registers_[value->index()] != NULL) {
+ translation->MarkDuplicate();
+ cgen->AddToTranslation(translation,
+ spilled_double_registers_[value->index()],
+ false);
+ }
+ }
+
+ cgen->AddToTranslation(translation, value, HasTaggedValueAt(i));
+ }
+}
+
+
+void LEnvironment::PrintTo(StringStream* stream) const {
+ stream->Add("[id=%d|", ast_id());
+ stream->Add("[parameters=%d|", parameter_count());
+ stream->Add("[arguments_stack_height=%d|", arguments_stack_height());
+ for (int i = 0; i < values_.length(); ++i) {
+ if (i != 0) stream->Add(";");
+ if (values_[i] == NULL) {
+ stream->Add("[hole]");
+ } else {
+ values_[i]->PrintTo(stream);
+ }
+ }
+ stream->Add("]");
+}
+
+
+LEnvironment* LChunkBuilder::CreateEnvironment(HEnvironment* hydrogen_env) {
+ if (hydrogen_env == NULL) return NULL;
+
+ LEnvironment* outer = CreateEnvironment(hydrogen_env->outer());
+ int ast_id = hydrogen_env->ast_id();
+ ASSERT(ast_id != AstNode::kNoNumber);
+ int value_count = hydrogen_env->values()->length();
+ LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
+ int argument_index = 0;
+ for (int i = 0; i < value_count; ++i) {
+ HValue* value = hydrogen_env->values()->at(i);
+ LOperand* op = NULL;
+ if (value->IsArgumentsObject()) {
+ op = NULL;
+ } else if (value->IsPushArgument()) {
+ op = new LArgument(argument_index++);
+ } else {
+ op = UseOrConstant(value);
+ if (op->IsUnallocated()) {
+ LUnallocated* unalloc = LUnallocated::cast(op);
+ unalloc->set_policy(LUnallocated::ANY);
+ }
+ }
+ result->AddValue(op, value->representation());
+ }
+
+ return result;
+}
+
+
+LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
+ LInstruction* result = new LGoto(instr->FirstSuccessor()->block_id(),
+ instr->include_stack_check());
+ if (instr->include_stack_check()) result = AssignPointerMap(result);
+ return result;
+}
+
+
+LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
+ HValue* v = instr->value();
+ HBasicBlock* first = instr->FirstSuccessor();
+ HBasicBlock* second = instr->SecondSuccessor();
+ ASSERT(first != NULL && second != NULL);
+ int first_id = first->block_id();
+ int second_id = second->block_id();
+
+ if (v->EmitAtUses()) {
+ if (v->IsClassOfTest()) {
+ HClassOfTest* compare = HClassOfTest::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ return new LClassOfTestAndBranch(UseTempRegister(compare->value()),
+ TempRegister(),
+ TempRegister(),
+ first_id,
+ second_id);
+ } else if (v->IsCompare()) {
+ HCompare* compare = HCompare::cast(v);
+ Token::Value op = compare->token();
+ HValue* left = compare->left();
+ HValue* right = compare->right();
+ if (left->representation().IsInteger32()) {
+ ASSERT(right->representation().IsInteger32());
+ return new LCmpIDAndBranch(op,
+ UseRegisterAtStart(left),
+ UseOrConstantAtStart(right),
+ first_id,
+ second_id,
+ false);
+ } else if (left->representation().IsDouble()) {
+ ASSERT(right->representation().IsDouble());
+ return new LCmpIDAndBranch(op,
+ UseRegisterAtStart(left),
+ UseRegisterAtStart(right),
+ first_id,
+ second_id,
+ true);
+ } else {
+ ASSERT(left->representation().IsTagged());
+ ASSERT(right->representation().IsTagged());
+ bool reversed = op == Token::GT || op == Token::LTE;
+ LOperand* left_operand = UseFixed(left, reversed ? eax : edx);
+ LOperand* right_operand = UseFixed(right, reversed ? edx : eax);
+ LInstruction* result = new LCmpTAndBranch(left_operand,
+ right_operand,
+ first_id,
+ second_id);
+ return MarkAsCall(result, instr);
+ }
+ } else if (v->IsIsSmi()) {
+ HIsSmi* compare = HIsSmi::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ return new LIsSmiAndBranch(Use(compare->value()),
+ first_id,
+ second_id);
+ } else if (v->IsHasInstanceType()) {
+ HHasInstanceType* compare = HHasInstanceType::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ return new LHasInstanceTypeAndBranch(UseRegisterAtStart(compare->value()),
+ TempRegister(),
+ first_id,
+ second_id);
+ } else if (v->IsHasCachedArrayIndex()) {
+ HHasCachedArrayIndex* compare = HHasCachedArrayIndex::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ return new LHasCachedArrayIndexAndBranch(
+ UseRegisterAtStart(compare->value()), first_id, second_id);
+ } else if (v->IsIsNull()) {
+ HIsNull* compare = HIsNull::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ // We only need a temp register for non-strict compare.
+ LOperand* temp = compare->is_strict() ? NULL : TempRegister();
+ return new LIsNullAndBranch(UseRegisterAtStart(compare->value()),
+ compare->is_strict(),
+ temp,
+ first_id,
+ second_id);
+ } else if (v->IsIsObject()) {
+ HIsObject* compare = HIsObject::cast(v);
+ ASSERT(compare->value()->representation().IsTagged());
+
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = TempRegister();
+ return new LIsObjectAndBranch(UseRegisterAtStart(compare->value()),
+ temp1,
+ temp2,
+ first_id,
+ second_id);
+ } else if (v->IsCompareJSObjectEq()) {
+ HCompareJSObjectEq* compare = HCompareJSObjectEq::cast(v);
+ return new LCmpJSObjectEqAndBranch(UseRegisterAtStart(compare->left()),
+ UseRegisterAtStart(compare->right()),
+ first_id,
+ second_id);
+ } else if (v->IsInstanceOf()) {
+ HInstanceOf* instance_of = HInstanceOf::cast(v);
+ LInstruction* result =
+ new LInstanceOfAndBranch(UseFixed(instance_of->left(), eax),
+ UseFixed(instance_of->right(), edx),
+ first_id,
+ second_id);
+ return MarkAsCall(result, instr);
+ } else if (v->IsTypeofIs()) {
+ HTypeofIs* typeof_is = HTypeofIs::cast(v);
+ return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()),
+ first_id,
+ second_id);
+ } else {
+ if (v->IsConstant()) {
+ if (HConstant::cast(v)->handle()->IsTrue()) {
+ return new LGoto(first_id);
+ } else if (HConstant::cast(v)->handle()->IsFalse()) {
+ return new LGoto(second_id);
+ }
+ }
+ Abort("Undefined compare before branch");
+ return NULL;
+ }
+ }
+ return new LBranch(UseRegisterAtStart(v), first_id, second_id);
+}
+
+
+LInstruction* LChunkBuilder::DoCompareMapAndBranch(
+ HCompareMapAndBranch* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ HBasicBlock* first = instr->FirstSuccessor();
+ HBasicBlock* second = instr->SecondSuccessor();
+ return new LCmpMapAndBranch(value,
+ instr->map(),
+ first->block_id(),
+ second->block_id());
+}
+
+
+LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
+ return DefineAsRegister(new LArgumentsLength(Use(length->value())));
+}
+
+
+LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
+ return DefineAsRegister(new LArgumentsElements);
+}
+
+
+LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
+ LInstruction* result =
+ new LInstanceOf(UseFixed(instr->left(), eax),
+ UseFixed(instr->right(), edx));
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
+ LOperand* function = UseFixed(instr->function(), edi);
+ LOperand* receiver = UseFixed(instr->receiver(), eax);
+ LOperand* length = UseRegisterAtStart(instr->length());
+ LOperand* elements = UseRegisterAtStart(instr->elements());
+ LInstruction* result = new LApplyArguments(function,
+ receiver,
+ length,
+ elements);
+ return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
+ ++argument_count_;
+ LOperand* argument = UseOrConstant(instr->argument());
+ return new LPushArgument(argument);
+}
+
+
+LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
+ return DefineAsRegister(new LGlobalObject);
+}
+
+
+LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
+ return DefineAsRegister(new LGlobalReceiver);
+}
+
+
+LInstruction* LChunkBuilder::DoCallConstantFunction(
+ HCallConstantFunction* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallConstantFunction, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
+ BuiltinFunctionId op = instr->op();
+ if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ LOperand* input = UseFixedDouble(instr->value(), xmm1);
+ LInstruction* result = new LUnaryMathOperation(input);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
+ } else {
+ LOperand* input = UseRegisterAtStart(instr->value());
+ LInstruction* result = new LUnaryMathOperation(input);
+ switch (op) {
+ case kMathAbs:
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ case kMathFloor:
+ return AssignEnvironment(DefineAsRegister(result));
+ case kMathRound:
+ return AssignEnvironment(DefineAsRegister(result));
+ case kMathSqrt:
+ return DefineSameAsFirst(result);
+ case kMathPowHalf:
+ return AssignEnvironment(DefineSameAsFirst(result));
+ default:
+ UNREACHABLE();
+ return NULL;
+ }
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
+ ASSERT(instr->key()->representation().IsTagged());
+ argument_count_ -= instr->argument_count();
+ UseFixed(instr->key(), ecx);
+ return MarkAsCall(DefineFixed(new LCallKeyed, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallNamed, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallGlobal, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallKnownGlobal, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
+ LOperand* constructor = UseFixed(instr->constructor(), edi);
+ argument_count_ -= instr->argument_count();
+ LInstruction* result = new LCallNew(constructor);
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallFunction, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallRuntime, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoShr(HShr* instr) {
+ return DoShift(Token::SHR, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoSar(HSar* instr) {
+ return DoShift(Token::SAR, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoShl(HShl* instr) {
+ return DoShift(Token::SHL, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoBitAnd(HBitAnd* instr) {
+ return DoBit(Token::BIT_AND, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
+ ASSERT(instr->value()->representation().IsInteger32());
+ ASSERT(instr->representation().IsInteger32());
+ return DefineSameAsFirst(new LBitNotI(UseRegisterAtStart(instr->value())));
+}
+
+
+LInstruction* LChunkBuilder::DoBitOr(HBitOr* instr) {
+ return DoBit(Token::BIT_OR, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoBitXor(HBitXor* instr) {
+ return DoBit(Token::BIT_XOR, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
+ if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::DIV, instr);
+ } else if (instr->representation().IsInteger32()) {
+ // The temporary operand is necessary to ensure that right is not allocated
+ // into edx.
+ FixedTemp(edx);
+ LOperand* value = UseFixed(instr->left(), eax);
+ LOperand* divisor = UseRegister(instr->right());
+ return AssignEnvironment(DefineFixed(new LDivI(value, divisor), eax));
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::DIV, instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoMod(HMod* instr) {
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ // The temporary operand is necessary to ensure that right is not allocated
+ // into edx.
+ FixedTemp(edx);
+ LOperand* value = UseFixed(instr->left(), eax);
+ LOperand* divisor = UseRegister(instr->right());
+ LInstruction* result = DefineFixed(new LModI(value, divisor), edx);
+ if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
+ instr->CheckFlag(HValue::kCanBeDivByZero)) {
+ result = AssignEnvironment(result);
+ }
+ return result;
+ } else if (instr->representation().IsTagged()) {
+ return DoArithmeticT(Token::MOD, instr);
+ } else {
+ ASSERT(instr->representation().IsDouble());
+ // We call a C function for double modulo. It can't trigger a GC.
+ // We need to use fixed result register for the call.
+ // TODO(fschneider): Allow any register as input registers.
+ LOperand* left = UseFixedDouble(instr->left(), xmm1);
+ LOperand* right = UseFixedDouble(instr->right(), xmm2);
+ LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoMul(HMul* instr) {
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
+ LOperand* right = UseOrConstant(instr->MostConstantOperand());
+ LOperand* temp = NULL;
+ if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ temp = TempRegister();
+ }
+ LMulI* mul = new LMulI(left, right, temp);
+ return AssignEnvironment(DefineSameAsFirst(mul));
+ } else if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::MUL, instr);
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::MUL, instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoSub(HSub* instr) {
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
+ LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+ LSubI* sub = new LSubI(left, right);
+ LInstruction* result = DefineSameAsFirst(sub);
+ if (instr->CheckFlag(HValue::kCanOverflow)) {
+ result = AssignEnvironment(result);
+ }
+ return result;
+ } else if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::SUB, instr);
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::SUB, instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
+ LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+ LAddI* add = new LAddI(left, right);
+ LInstruction* result = DefineSameAsFirst(add);
+ if (instr->CheckFlag(HValue::kCanOverflow)) {
+ result = AssignEnvironment(result);
+ }
+ return result;
+ } else if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::ADD, instr);
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::ADD, instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoPower(HPower* instr) {
+ ASSERT(instr->representation().IsDouble());
+ // We call a C function for double power. It can't trigger a GC.
+ // We need to use fixed result register for the call.
+ Representation exponent_type = instr->right()->representation();
+ ASSERT(instr->left()->representation().IsDouble());
+ LOperand* left = UseFixedDouble(instr->left(), xmm1);
+ LOperand* right = exponent_type.IsDouble() ?
+ UseFixedDouble(instr->right(), xmm2) :
+ UseFixed(instr->right(), eax);
+ LPower* result = new LPower(left, right);
+ return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
+ CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoCompare(HCompare* instr) {
+ Token::Value op = instr->token();
+ if (instr->left()->representation().IsInteger32()) {
+ ASSERT(instr->right()->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseOrConstantAtStart(instr->right());
+ return DefineAsRegister(new LCmpID(op, left, right, false));
+ } else if (instr->left()->representation().IsDouble()) {
+ ASSERT(instr->right()->representation().IsDouble());
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseRegisterAtStart(instr->right());
+ return DefineAsRegister(new LCmpID(op, left, right, true));
+ } else {
+ bool reversed = (op == Token::GT || op == Token::LTE);
+ LOperand* left = UseFixed(instr->left(), reversed ? eax : edx);
+ LOperand* right = UseFixed(instr->right(), reversed ? edx : eax);
+ LInstruction* result = new LCmpT(left, right);
+ return MarkAsCall(DefineFixed(result, eax), instr);
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoCompareJSObjectEq(
+ HCompareJSObjectEq* instr) {
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseRegisterAtStart(instr->right());
+ LInstruction* result = new LCmpJSObjectEq(left, right);
+ return DefineAsRegister(result);
+}
+
+
+LInstruction* LChunkBuilder::DoIsNull(HIsNull* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
+
+ return DefineAsRegister(new LIsNull(value,
+ instr->is_strict()));
+}
+
+
+LInstruction* LChunkBuilder::DoIsObject(HIsObject* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegister(instr->value());
+
+ return DefineAsRegister(new LIsObject(value, TempRegister()));
+}
+
+
+LInstruction* LChunkBuilder::DoIsSmi(HIsSmi* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseAtStart(instr->value());
+
+ return DefineAsRegister(new LIsSmi(value));
+}
+
+
+LInstruction* LChunkBuilder::DoHasInstanceType(HHasInstanceType* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
+
+ return DefineAsRegister(new LHasInstanceType(value));
+}
+
+
+LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
+ HHasCachedArrayIndex* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegister(instr->value());
+
+ return DefineAsRegister(new LHasCachedArrayIndex(value));
+}
+
+
+LInstruction* LChunkBuilder::DoClassOfTest(HClassOfTest* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseTempRegister(instr->value());
+
+ return DefineSameAsFirst(new LClassOfTest(value, TempRegister()));
+}
+
+
+LInstruction* LChunkBuilder::DoArrayLength(HArrayLength* instr) {
+ LOperand* array = NULL;
+ LOperand* temporary = NULL;
+
+ if (instr->value()->IsLoadElements()) {
+ array = UseRegisterAtStart(instr->value());
+ } else {
+ array = UseRegister(instr->value());
+ temporary = TempRegister();
+ }
+
+ LInstruction* result = new LArrayLength(array, temporary);
+ return AssignEnvironment(DefineAsRegister(result));
+}
+
+
+LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
+ LOperand* object = UseRegister(instr->value());
+ LInstruction* result = new LValueOf(object, TempRegister());
+ return AssignEnvironment(DefineSameAsFirst(result));
+}
+
+
+LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
+ return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
+ Use(instr->length())));
+}
+
+
+LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
+ LOperand* value = UseFixed(instr->value(), eax);
+ return MarkAsCall(new LThrow(value), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoChange(HChange* instr) {
+ Representation from = instr->from();
+ Representation to = instr->to();
+ if (from.IsTagged()) {
+ if (to.IsDouble()) {
+ LOperand* value = UseRegister(instr->value());
+ LInstruction* res = new LNumberUntagD(value);
+ return AssignEnvironment(DefineAsRegister(res));
+ } else {
+ ASSERT(to.IsInteger32());
+ LOperand* value = UseRegister(instr->value());
+ bool needs_check = !instr->value()->type().IsSmi();
+ if (needs_check) {
+ LOperand* xmm_temp =
+ (instr->CanTruncateToInt32() && CpuFeatures::IsSupported(SSE3))
+ ? NULL
+ : FixedTemp(xmm1);
+ LInstruction* res = new LTaggedToI(value, xmm_temp);
+ return AssignEnvironment(DefineSameAsFirst(res));
+ } else {
+ return DefineSameAsFirst(new LSmiUntag(value, needs_check));
+ }
+ }
+ } else if (from.IsDouble()) {
+ if (to.IsTagged()) {
+ LOperand* value = UseRegister(instr->value());
+ LOperand* temp = TempRegister();
+
+ // Make sure that temp and result_temp are different registers.
+ LUnallocated* result_temp = TempRegister();
+ LInstruction* result = new LNumberTagD(value, temp);
+ return AssignPointerMap(Define(result, result_temp));
+ } else {
+ ASSERT(to.IsInteger32());
+ LOperand* value = UseRegister(instr->value());
+ return AssignEnvironment(DefineAsRegister(new LDoubleToI(value)));
+ }
+ } else if (from.IsInteger32()) {
+ if (to.IsTagged()) {
+ HValue* val = instr->value();
+ LOperand* value = UseRegister(val);
+ if (val->HasRange() && val->range()->IsInSmiRange()) {
+ return DefineSameAsFirst(new LSmiTag(value));
+ } else {
+ LInstruction* result = new LNumberTagI(value);
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ }
+ } else {
+ ASSERT(to.IsDouble());
+ return DefineAsRegister(new LInteger32ToDouble(Use(instr->value())));
+ }
+ }
+ UNREACHABLE();
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return AssignEnvironment(new LCheckSmi(value, zero));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* temp = TempRegister();
+ LInstruction* result = new LCheckInstanceType(value, temp);
+ return AssignEnvironment(result);
+}
+
+
+LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
+ LOperand* temp = TempRegister();
+ LInstruction* result =
+ new LCheckPrototypeMaps(temp,
+ instr->holder(),
+ instr->receiver_map());
+ return AssignEnvironment(result);
+}
+
+
+LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return AssignEnvironment(new LCheckSmi(value, not_zero));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return AssignEnvironment(new LCheckFunction(value));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LInstruction* result = new LCheckMap(value);
+ return AssignEnvironment(result);
+}
+
+
+LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
+ return new LReturn(UseFixed(instr->value(), eax));
+}
+
+
+LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
+ Representation r = instr->representation();
+ if (r.IsInteger32()) {
+ int32_t value = instr->Integer32Value();
+ return DefineAsRegister(new LConstantI(value));
+ } else if (r.IsDouble()) {
+ double value = instr->DoubleValue();
+ return DefineAsRegister(new LConstantD(value));
+ } else if (r.IsTagged()) {
+ return DefineAsRegister(new LConstantT(instr->handle()));
+ } else {
+ Abort("unsupported constant of type double");
+ return NULL;
+ }
+}
+
+
+LInstruction* LChunkBuilder::DoLoadGlobal(HLoadGlobal* instr) {
+ LInstruction* result = new LLoadGlobal;
+ return instr->check_hole_value()
+ ? AssignEnvironment(DefineAsRegister(result))
+ : DefineAsRegister(result);
+}
+
+
+LInstruction* LChunkBuilder::DoStoreGlobal(HStoreGlobal* instr) {
+ return new LStoreGlobal(UseRegisterAtStart(instr->value()));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
+ return DefineAsRegister(
+ new LLoadNamedField(UseRegisterAtStart(instr->object())));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
+ LOperand* object = UseFixed(instr->object(), eax);
+ LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), eax);
+ return MarkAsCall(result, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
+ LOperand* input = UseRegisterAtStart(instr->value());
+ return DefineSameAsFirst(new LLoadElements(input));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
+ HLoadKeyedFastElement* instr) {
+ Representation r = instr->representation();
+ LOperand* obj = UseRegisterAtStart(instr->object());
+ ASSERT(instr->key()->representation().IsInteger32());
+ LOperand* key = UseRegisterAtStart(instr->key());
+ LOperand* load_result = NULL;
+ // Double needs an extra temp, because the result is converted from heap
+ // number to a double register.
+ if (r.IsDouble()) load_result = TempRegister();
+ LInstruction* result = new LLoadKeyedFastElement(obj,
+ key,
+ load_result);
+ if (r.IsDouble()) {
+ result = DefineAsRegister(result);
+ } else {
+ result = DefineSameAsFirst(result);
+ }
+ return AssignEnvironment(result);
+}
+
+
+LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
+ LOperand* object = UseFixed(instr->object(), edx);
+ LOperand* key = UseFixed(instr->key(), eax);
+
+ LInstruction* result =
+ DefineFixed(new LLoadKeyedGeneric(object, key), eax);
+ return MarkAsCall(result, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
+ HStoreKeyedFastElement* instr) {
+ bool needs_write_barrier = instr->NeedsWriteBarrier();
+ ASSERT(instr->value()->representation().IsTagged());
+ ASSERT(instr->object()->representation().IsTagged());
+ ASSERT(instr->key()->representation().IsInteger32());
+
+ LOperand* obj = UseTempRegister(instr->object());
+ LOperand* val = needs_write_barrier
+ ? UseTempRegister(instr->value())
+ : UseRegisterAtStart(instr->value());
+ LOperand* key = needs_write_barrier
+ ? UseTempRegister(instr->key())
+ : UseRegisterOrConstantAtStart(instr->key());
+
+ return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
+}
+
+
+LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
+ LOperand* obj = UseFixed(instr->object(), edx);
+ LOperand* key = UseFixed(instr->key(), ecx);
+ LOperand* val = UseFixed(instr->value(), eax);
+
+ ASSERT(instr->object()->representation().IsTagged());
+ ASSERT(instr->key()->representation().IsTagged());
+ ASSERT(instr->value()->representation().IsTagged());
+
+ return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
+ bool needs_write_barrier = !instr->value()->type().IsSmi();
+
+ LOperand* obj = needs_write_barrier
+ ? UseTempRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+
+ LOperand* val = needs_write_barrier
+ ? UseTempRegister(instr->value())
+ : UseRegister(instr->value());
+
+ // We only need a scratch register if we have a write barrier or we
+ // have a store into the properties array (not in-object-property).
+ LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
+ ? TempRegister() : NULL;
+
+ return new LStoreNamedField(obj,
+ instr->name(),
+ val,
+ instr->is_in_object(),
+ instr->offset(),
+ temp,
+ needs_write_barrier,
+ instr->transition());
+}
+
+
+LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
+ LOperand* obj = UseFixed(instr->object(), edx);
+ LOperand* val = UseFixed(instr->value(), eax);
+
+ LInstruction* result = new LStoreNamedGeneric(obj, instr->name(), val);
+ return MarkAsCall(result, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
+ return MarkAsCall(DefineFixed(new LArrayLiteral, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
+ return MarkAsCall(DefineFixed(new LObjectLiteral, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
+ return MarkAsCall(DefineFixed(new LRegExpLiteral, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
+ return MarkAsCall(DefineFixed(new LFunctionLiteral, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
+ LInstruction* result = new LDeleteProperty(Use(instr->object()),
+ UseOrConstant(instr->key()));
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
+ allocator_->MarkAsOsrEntry();
+ current_block_->last_environment()->set_ast_id(instr->ast_id());
+ return AssignEnvironment(new LOsrEntry);
+}
+
+
+LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
+ int spill_index = chunk()->GetParameterStackSlot(instr->index());
+ return DefineAsSpilled(new LParameter, spill_index);
+}
+
+
+LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
+ int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
+ return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+}
+
+
+LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallStub, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
+ // There are no real uses of the arguments object (we bail out in all other
+ // cases).
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
+ LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* length = UseTempRegister(instr->length());
+ LOperand* index = Use(instr->index());
+ LInstruction* result = new LAccessArgumentsAt(arguments, length, index);
+ return DefineAsRegister(AssignEnvironment(result));
+}
+
+
+LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
+ LInstruction* result = new LTypeof(Use(instr->value()));
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
+LInstruction* LChunkBuilder::DoTypeofIs(HTypeofIs* instr) {
+ return DefineSameAsFirst(new LTypeofIs(UseRegister(instr->value())));
+}
+
+LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
+ HEnvironment* env = current_block_->last_environment();
+ ASSERT(env != NULL);
+
+ env->set_ast_id(instr->ast_id());
+
+ env->Drop(instr->pop_count());
+ for (int i = 0; i < instr->values()->length(); ++i) {
+ HValue* value = instr->values()->at(i);
+ if (instr->HasAssignedIndexAt(i)) {
+ env->Bind(instr->GetAssignedIndexAt(i), value);
+ } else {
+ env->Push(value);
+ }
+ }
+
+ if (FLAG_trace_environment) {
+ PrintF("Reconstructed environment ast_id=%d, instr_id=%d\n",
+ instr->ast_id(),
+ instr->id());
+ env->PrintToStd();
+ }
+ ASSERT(env->values()->length() == instr->environment_height());
+
+ // If there is an instruction pending deoptimization environment create a
+ // lazy bailout instruction to capture the environment.
+ if (pending_deoptimization_ast_id_ == instr->ast_id()) {
+ LInstruction* result = new LLazyBailout;
+ result = AssignEnvironment(result);
+ instructions_pending_deoptimization_environment_->
+ set_deoptimization_environment(result->environment());
+ ClearInstructionPendingDeoptimizationEnvironment();
+ return result;
+ }
+
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
+ return MarkAsCall(new LStackCheck, instr);
+}
+
+
+LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
+ HEnvironment* outer = current_block_->last_environment();
+ HConstant* undefined = graph()->GetConstantUndefined();
+ HEnvironment* inner = outer->CopyForInlining(instr->closure(),
+ instr->function(),
+ false,
+ undefined);
+ current_block_->UpdateEnvironment(inner);
+ chunk_->AddInlinedClosure(instr->closure());
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
+ HEnvironment* outer = current_block_->last_environment()->outer();
+ current_block_->UpdateEnvironment(outer);
+ return NULL;
+}
+
+
+void LPointerMap::RecordPointer(LOperand* op) {
+ // Do not record arguments as pointers.
+ if (op->IsStackSlot() && op->index() < 0) return;
+ ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
+ pointer_operands_.Add(op);
+}
+
+
+void LPointerMap::PrintTo(StringStream* stream) const {
+ stream->Add("{");
+ for (int i = 0; i < pointer_operands_.length(); ++i) {
+ if (i != 0) stream->Add(";");
+ pointer_operands_[i]->PrintTo(stream);
+ }
+ stream->Add("} @%d", position());
+}
+
+} } // namespace v8::internal