1/*
2 * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGCommonData.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "CodeBlock.h"
32#include "DFGNode.h"
33#include "DFGPlan.h"
34#include "InlineCallFrame.h"
35#include "JSCInlines.h"
36#include "TrackedReferences.h"
37#include "VM.h"
38
39#include <wtf/NeverDestroyed.h>
40
41namespace JSC { namespace DFG {
42
43void CommonData::notifyCompilingStructureTransition(Plan& plan, CodeBlock* codeBlock, Node* node)
44{
45 plan.transitions().addLazily(
46 codeBlock,
47 node->origin.semantic.codeOriginOwner(),
48 node->transition()->previous.get(),
49 node->transition()->next.get());
50}
51
52CallSiteIndex CommonData::addCodeOrigin(CodeOrigin codeOrigin)
53{
54 if (codeOrigins.isEmpty()
55 || codeOrigins.last() != codeOrigin)
56 codeOrigins.append(codeOrigin);
57 unsigned index = codeOrigins.size() - 1;
58 ASSERT(codeOrigins[index] == codeOrigin);
59 return CallSiteIndex(index);
60}
61
62CallSiteIndex CommonData::addUniqueCallSiteIndex(CodeOrigin codeOrigin)
63{
64 if (callSiteIndexFreeList.size())
65 return CallSiteIndex(callSiteIndexFreeList.takeAny());
66
67 codeOrigins.append(codeOrigin);
68 unsigned index = codeOrigins.size() - 1;
69 ASSERT(codeOrigins[index] == codeOrigin);
70 return CallSiteIndex(index);
71}
72
73CallSiteIndex CommonData::lastCallSite() const
74{
75 RELEASE_ASSERT(codeOrigins.size());
76 return CallSiteIndex(codeOrigins.size() - 1);
77}
78
79void CommonData::removeCallSiteIndex(CallSiteIndex callSite)
80{
81 RELEASE_ASSERT(callSite.bits() < codeOrigins.size());
82 callSiteIndexFreeList.add(callSite.bits());
83}
84
85void CommonData::shrinkToFit()
86{
87 codeOrigins.shrinkToFit();
88 weakReferences.shrinkToFit();
89 transitions.shrinkToFit();
90 catchEntrypoints.shrinkToFit();
91}
92
93static Lock pcCodeBlockMapLock;
94inline HashMap<void*, CodeBlock*>& pcCodeBlockMap(AbstractLocker&)
95{
96 static NeverDestroyed<HashMap<void*, CodeBlock*>> pcCodeBlockMap;
97 return pcCodeBlockMap;
98}
99
100bool CommonData::invalidate()
101{
102 if (!isStillValid)
103 return false;
104
105 if (UNLIKELY(hasVMTrapsBreakpointsInstalled)) {
106 LockHolder locker(pcCodeBlockMapLock);
107 auto& map = pcCodeBlockMap(locker);
108 for (auto& jumpReplacement : jumpReplacements)
109 map.remove(jumpReplacement.dataLocation());
110 hasVMTrapsBreakpointsInstalled = false;
111 }
112
113 for (unsigned i = jumpReplacements.size(); i--;)
114 jumpReplacements[i].fire();
115 isStillValid = false;
116 return true;
117}
118
119CommonData::~CommonData()
120{
121 if (UNLIKELY(hasVMTrapsBreakpointsInstalled)) {
122 LockHolder locker(pcCodeBlockMapLock);
123 auto& map = pcCodeBlockMap(locker);
124 for (auto& jumpReplacement : jumpReplacements)
125 map.remove(jumpReplacement.dataLocation());
126 }
127}
128
129void CommonData::installVMTrapBreakpoints(CodeBlock* owner)
130{
131 LockHolder locker(pcCodeBlockMapLock);
132 if (!isStillValid || hasVMTrapsBreakpointsInstalled)
133 return;
134 hasVMTrapsBreakpointsInstalled = true;
135
136 auto& map = pcCodeBlockMap(locker);
137#if !defined(NDEBUG)
138 // We need to be able to handle more than one invalidation point at the same pc
139 // but we want to make sure we don't forget to remove a pc from the map.
140 HashSet<void*> newReplacements;
141#endif
142 for (auto& jumpReplacement : jumpReplacements) {
143 jumpReplacement.installVMTrapBreakpoint();
144 void* source = jumpReplacement.dataLocation();
145 auto result = map.add(source, owner);
146 UNUSED_PARAM(result);
147#if !defined(NDEBUG)
148 ASSERT(result.isNewEntry || newReplacements.contains(source));
149 newReplacements.add(source);
150#endif
151 }
152}
153
154CodeBlock* codeBlockForVMTrapPC(void* pc)
155{
156 ASSERT(isJITPC(pc));
157 LockHolder locker(pcCodeBlockMapLock);
158 auto& map = pcCodeBlockMap(locker);
159 auto result = map.find(pc);
160 if (result == map.end())
161 return nullptr;
162 return result->value;
163}
164
165bool CommonData::isVMTrapBreakpoint(void* address)
166{
167 if (!isStillValid)
168 return false;
169 for (unsigned i = jumpReplacements.size(); i--;) {
170 if (address == jumpReplacements[i].dataLocation())
171 return true;
172 }
173 return false;
174}
175
176void CommonData::validateReferences(const TrackedReferences& trackedReferences)
177{
178 if (InlineCallFrameSet* set = inlineCallFrames.get()) {
179 for (InlineCallFrame* inlineCallFrame : *set) {
180 for (ValueRecovery& recovery : inlineCallFrame->argumentsWithFixup) {
181 if (recovery.isConstant())
182 trackedReferences.check(recovery.constant());
183 }
184
185 if (CodeBlock* baselineCodeBlock = inlineCallFrame->baselineCodeBlock.get())
186 trackedReferences.check(baselineCodeBlock);
187
188 if (inlineCallFrame->calleeRecovery.isConstant())
189 trackedReferences.check(inlineCallFrame->calleeRecovery.constant());
190 }
191 }
192
193 for (AdaptiveStructureWatchpoint* watchpoint : adaptiveStructureWatchpoints)
194 watchpoint->key().validateReferences(trackedReferences);
195}
196
197void CommonData::finalizeCatchEntrypoints()
198{
199 std::sort(catchEntrypoints.begin(), catchEntrypoints.end(),
200 [] (const CatchEntrypointData& a, const CatchEntrypointData& b) { return a.bytecodeIndex < b.bytecodeIndex; });
201
202#if !ASSERT_DISABLED
203 for (unsigned i = 0; i + 1 < catchEntrypoints.size(); ++i)
204 ASSERT(catchEntrypoints[i].bytecodeIndex <= catchEntrypoints[i + 1].bytecodeIndex);
205#endif
206}
207
208void CommonData::clearWatchpoints()
209{
210 watchpoints.clear();
211 adaptiveStructureWatchpoints.clear();
212 adaptiveInferredPropertyValueWatchpoints.clear();
213}
214
215} } // namespace JSC::DFG
216
217#endif // ENABLE(DFG_JIT)
218
219