1/*
2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "LLIntData.h"
28
29#include "ArithProfile.h"
30#include "BytecodeConventions.h"
31#include "CodeBlock.h"
32#include "CodeType.h"
33#include "Instruction.h"
34#include "JSScope.h"
35#include "LLIntCLoop.h"
36#include "MaxFrameExtentForSlowPathCall.h"
37#include "Opcode.h"
38#include "PropertyOffset.h"
39#include "ShadowChicken.h"
40#include "WriteBarrier.h"
41
42#define STATIC_ASSERT(cond) static_assert(cond, "LLInt assumes " #cond)
43
44
45namespace JSC {
46
47namespace LLInt {
48
49
50uint8_t Data::s_exceptionInstructions[maxOpcodeLength + 1] = { };
51Opcode g_opcodeMap[numOpcodeIDs] = { };
52Opcode g_opcodeMapWide16[numOpcodeIDs] = { };
53Opcode g_opcodeMapWide32[numOpcodeIDs] = { };
54
55#if !ENABLE(C_LOOP)
56extern "C" void llint_entry(void*, void*, void*);
57#endif
58
59void initialize()
60{
61#if ENABLE(C_LOOP)
62 CLoop::initialize();
63
64#else // !ENABLE(C_LOOP)
65 llint_entry(&g_opcodeMap, &g_opcodeMapWide16, &g_opcodeMapWide32);
66
67 for (int i = 0; i < numOpcodeIDs; ++i) {
68 g_opcodeMap[i] = tagCodePtr(g_opcodeMap[i], BytecodePtrTag);
69 g_opcodeMapWide16[i] = tagCodePtr(g_opcodeMapWide16[i], BytecodePtrTag);
70 g_opcodeMapWide32[i] = tagCodePtr(g_opcodeMapWide32[i], BytecodePtrTag);
71 }
72
73 ASSERT(llint_throw_from_slow_path_trampoline < UINT8_MAX);
74 for (int i = 0; i < maxOpcodeLength + 1; ++i)
75 Data::s_exceptionInstructions[i] = llint_throw_from_slow_path_trampoline;
76#endif // ENABLE(C_LOOP)
77}
78
79IGNORE_WARNINGS_BEGIN("missing-noreturn")
80void Data::performAssertions(VM& vm)
81{
82 UNUSED_PARAM(vm);
83
84 // Assertions to match LowLevelInterpreter.asm. If you change any of this code, be
85 // prepared to change LowLevelInterpreter.asm as well!!
86
87#if USE(JSVALUE64)
88 const ptrdiff_t CallFrameHeaderSlots = 5;
89#else // USE(JSVALUE64) // i.e. 32-bit version
90 const ptrdiff_t CallFrameHeaderSlots = 4;
91#endif
92 const ptrdiff_t MachineRegisterSize = sizeof(CPURegister);
93 const ptrdiff_t SlotSize = 8;
94
95 STATIC_ASSERT(sizeof(Register) == SlotSize);
96 STATIC_ASSERT(CallFrame::headerSizeInRegisters == CallFrameHeaderSlots);
97
98 ASSERT(!CallFrame::callerFrameOffset());
99 STATIC_ASSERT(CallerFrameAndPC::sizeInRegisters == (MachineRegisterSize * 2) / SlotSize);
100 ASSERT(CallFrame::returnPCOffset() == CallFrame::callerFrameOffset() + MachineRegisterSize);
101 ASSERT(CallFrameSlot::codeBlock * sizeof(Register) == CallFrame::returnPCOffset() + MachineRegisterSize);
102 STATIC_ASSERT(CallFrameSlot::callee * sizeof(Register) == CallFrameSlot::codeBlock * sizeof(Register) + SlotSize);
103 STATIC_ASSERT(CallFrameSlot::argumentCount * sizeof(Register) == CallFrameSlot::callee * sizeof(Register) + SlotSize);
104 STATIC_ASSERT(CallFrameSlot::thisArgument * sizeof(Register) == CallFrameSlot::argumentCount * sizeof(Register) + SlotSize);
105 STATIC_ASSERT(CallFrame::headerSizeInRegisters == CallFrameSlot::thisArgument);
106
107 ASSERT(CallFrame::argumentOffsetIncludingThis(0) == CallFrameSlot::thisArgument);
108
109#if CPU(BIG_ENDIAN)
110 STATIC_ASSERT(TagOffset == 0);
111 STATIC_ASSERT(PayloadOffset == 4);
112#else
113 STATIC_ASSERT(TagOffset == 4);
114 STATIC_ASSERT(PayloadOffset == 0);
115#endif
116#if USE(JSVALUE32_64)
117 STATIC_ASSERT(JSValue::Int32Tag == static_cast<unsigned>(-1));
118 STATIC_ASSERT(JSValue::BooleanTag == static_cast<unsigned>(-2));
119 STATIC_ASSERT(JSValue::NullTag == static_cast<unsigned>(-3));
120 STATIC_ASSERT(JSValue::UndefinedTag == static_cast<unsigned>(-4));
121 STATIC_ASSERT(JSValue::CellTag == static_cast<unsigned>(-5));
122 STATIC_ASSERT(JSValue::EmptyValueTag == static_cast<unsigned>(-6));
123 STATIC_ASSERT(JSValue::DeletedValueTag == static_cast<unsigned>(-7));
124 STATIC_ASSERT(JSValue::LowestTag == static_cast<unsigned>(-7));
125#else
126 STATIC_ASSERT(TagBitTypeOther == 0x2);
127 STATIC_ASSERT(TagBitBool == 0x4);
128 STATIC_ASSERT(TagBitUndefined == 0x8);
129 STATIC_ASSERT(ValueEmpty == 0x0);
130 STATIC_ASSERT(ValueFalse == (TagBitTypeOther | TagBitBool));
131 STATIC_ASSERT(ValueTrue == (TagBitTypeOther | TagBitBool | 1));
132 STATIC_ASSERT(ValueUndefined == (TagBitTypeOther | TagBitUndefined));
133 STATIC_ASSERT(ValueNull == TagBitTypeOther);
134#endif
135
136#if ENABLE(C_LOOP)
137 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 1);
138#elif USE(JSVALUE32_64)
139 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 1);
140#elif (CPU(X86_64) && !OS(WINDOWS)) || CPU(ARM64)
141 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 4);
142#elif (CPU(X86_64) && OS(WINDOWS))
143 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 4);
144#endif
145
146 ASSERT(!(reinterpret_cast<ptrdiff_t>((reinterpret_cast<WriteBarrier<JSCell>*>(0x4000)->slot())) - 0x4000));
147
148 // FIXME: make these assertions less horrible.
149#if !ASSERT_DISABLED
150 Vector<int> testVector;
151 testVector.resize(42);
152 ASSERT(bitwise_cast<uint32_t*>(&testVector)[sizeof(void*)/sizeof(uint32_t) + 1] == 42);
153 ASSERT(bitwise_cast<int**>(&testVector)[0] == testVector.begin());
154#endif
155
156 {
157 ArithProfile arithProfile;
158 arithProfile.lhsSawInt32();
159 arithProfile.rhsSawInt32();
160 ASSERT(arithProfile.bits() == ArithProfile::observedBinaryIntInt().bits());
161 STATIC_ASSERT(ArithProfile::observedBinaryIntInt().lhsObservedType().isOnlyInt32());
162 STATIC_ASSERT(ArithProfile::observedBinaryIntInt().rhsObservedType().isOnlyInt32());
163 }
164 {
165 ArithProfile arithProfile;
166 arithProfile.lhsSawNumber();
167 arithProfile.rhsSawInt32();
168 ASSERT(arithProfile.bits() == ArithProfile::observedBinaryNumberInt().bits());
169 STATIC_ASSERT(ArithProfile::observedBinaryNumberInt().lhsObservedType().isOnlyNumber());
170 STATIC_ASSERT(ArithProfile::observedBinaryNumberInt().rhsObservedType().isOnlyInt32());
171 }
172 {
173 ArithProfile arithProfile;
174 arithProfile.lhsSawNumber();
175 arithProfile.rhsSawNumber();
176 ASSERT(arithProfile.bits() == ArithProfile::observedBinaryNumberNumber().bits());
177 STATIC_ASSERT(ArithProfile::observedBinaryNumberNumber().lhsObservedType().isOnlyNumber());
178 STATIC_ASSERT(ArithProfile::observedBinaryNumberNumber().rhsObservedType().isOnlyNumber());
179 }
180 {
181 ArithProfile arithProfile;
182 arithProfile.lhsSawInt32();
183 arithProfile.rhsSawNumber();
184 ASSERT(arithProfile.bits() == ArithProfile::observedBinaryIntNumber().bits());
185 STATIC_ASSERT(ArithProfile::observedBinaryIntNumber().lhsObservedType().isOnlyInt32());
186 STATIC_ASSERT(ArithProfile::observedBinaryIntNumber().rhsObservedType().isOnlyNumber());
187 }
188}
189IGNORE_WARNINGS_END
190
191} } // namespace JSC::LLInt
192