1 | // Copyright 2012 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #include "src/frames.h" |
6 | |
7 | #include <memory> |
8 | #include <sstream> |
9 | |
10 | #include "src/base/bits.h" |
11 | #include "src/deoptimizer.h" |
12 | #include "src/frames-inl.h" |
13 | #include "src/ic/ic-stats.h" |
14 | #include "src/macro-assembler.h" |
15 | #include "src/objects/code.h" |
16 | #include "src/objects/slots.h" |
17 | #include "src/objects/smi.h" |
18 | #include "src/register-configuration.h" |
19 | #include "src/safepoint-table.h" |
20 | #include "src/snapshot/snapshot.h" |
21 | #include "src/string-stream.h" |
22 | #include "src/visitors.h" |
23 | #include "src/vm-state-inl.h" |
24 | #include "src/wasm/wasm-code-manager.h" |
25 | #include "src/wasm/wasm-engine.h" |
26 | #include "src/wasm/wasm-objects-inl.h" |
27 | #include "src/zone/zone-containers.h" |
28 | |
29 | namespace v8 { |
30 | namespace internal { |
31 | |
32 | ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ = |
33 | nullptr; |
34 | |
35 | // Iterator that supports traversing the stack handlers of a |
36 | // particular frame. Needs to know the top of the handler chain. |
37 | class StackHandlerIterator { |
38 | public: |
39 | StackHandlerIterator(const StackFrame* frame, StackHandler* handler) |
40 | : limit_(frame->fp()), handler_(handler) { |
41 | // Make sure the handler has already been unwound to this frame. |
42 | DCHECK(frame->sp() <= handler->address()); |
43 | } |
44 | |
45 | StackHandler* handler() const { return handler_; } |
46 | |
47 | bool done() { return handler_ == nullptr || handler_->address() > limit_; } |
48 | void Advance() { |
49 | DCHECK(!done()); |
50 | handler_ = handler_->next(); |
51 | } |
52 | |
53 | private: |
54 | const Address limit_; |
55 | StackHandler* handler_; |
56 | }; |
57 | |
58 | |
59 | // ------------------------------------------------------------------------- |
60 | |
61 | |
62 | #define INITIALIZE_SINGLETON(type, field) field##_(this), |
63 | StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate, |
64 | bool can_access_heap_objects) |
65 | : isolate_(isolate), |
66 | STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr), |
67 | handler_(nullptr), |
68 | can_access_heap_objects_(can_access_heap_objects) {} |
69 | #undef INITIALIZE_SINGLETON |
70 | |
71 | StackFrameIterator::StackFrameIterator(Isolate* isolate) |
72 | : StackFrameIterator(isolate, isolate->thread_local_top()) {} |
73 | |
74 | StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t) |
75 | : StackFrameIteratorBase(isolate, true) { |
76 | Reset(t); |
77 | } |
78 | |
79 | void StackFrameIterator::Advance() { |
80 | DCHECK(!done()); |
81 | // Compute the state of the calling frame before restoring |
82 | // callee-saved registers and unwinding handlers. This allows the |
83 | // frame code that computes the caller state to access the top |
84 | // handler and the value of any callee-saved register if needed. |
85 | StackFrame::State state; |
86 | StackFrame::Type type = frame_->GetCallerState(&state); |
87 | |
88 | // Unwind handlers corresponding to the current frame. |
89 | StackHandlerIterator it(frame_, handler_); |
90 | while (!it.done()) it.Advance(); |
91 | handler_ = it.handler(); |
92 | |
93 | // Advance to the calling frame. |
94 | frame_ = SingletonFor(type, &state); |
95 | |
96 | // When we're done iterating over the stack frames, the handler |
97 | // chain must have been completely unwound. |
98 | DCHECK(!done() || handler_ == nullptr); |
99 | } |
100 | |
101 | |
102 | void StackFrameIterator::Reset(ThreadLocalTop* top) { |
103 | StackFrame::State state; |
104 | StackFrame::Type type = ExitFrame::GetStateForFramePointer( |
105 | Isolate::c_entry_fp(top), &state); |
106 | handler_ = StackHandler::FromAddress(Isolate::handler(top)); |
107 | frame_ = SingletonFor(type, &state); |
108 | } |
109 | |
110 | |
111 | StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type, |
112 | StackFrame::State* state) { |
113 | StackFrame* result = SingletonFor(type); |
114 | DCHECK((!result) == (type == StackFrame::NONE)); |
115 | if (result) result->state_ = *state; |
116 | return result; |
117 | } |
118 | |
119 | |
120 | StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) { |
121 | #define FRAME_TYPE_CASE(type, field) \ |
122 | case StackFrame::type: \ |
123 | return &field##_; |
124 | |
125 | switch (type) { |
126 | case StackFrame::NONE: |
127 | return nullptr; |
128 | STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE) |
129 | default: break; |
130 | } |
131 | return nullptr; |
132 | |
133 | #undef FRAME_TYPE_CASE |
134 | } |
135 | |
136 | // ------------------------------------------------------------------------- |
137 | |
138 | void JavaScriptFrameIterator::Advance() { |
139 | do { |
140 | iterator_.Advance(); |
141 | } while (!iterator_.done() && !iterator_.frame()->is_java_script()); |
142 | } |
143 | |
144 | // ------------------------------------------------------------------------- |
145 | |
146 | StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate) |
147 | : iterator_(isolate) { |
148 | if (!done() && !IsValidFrame(iterator_.frame())) Advance(); |
149 | } |
150 | |
151 | StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate, |
152 | StackFrame::Id id) |
153 | : StackTraceFrameIterator(isolate) { |
154 | while (!done() && frame()->id() != id) Advance(); |
155 | } |
156 | |
157 | void StackTraceFrameIterator::Advance() { |
158 | do { |
159 | iterator_.Advance(); |
160 | } while (!done() && !IsValidFrame(iterator_.frame())); |
161 | } |
162 | |
163 | bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const { |
164 | if (frame->is_java_script()) { |
165 | JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame); |
166 | if (!jsFrame->function()->IsJSFunction()) return false; |
167 | return jsFrame->function()->shared()->IsSubjectToDebugging(); |
168 | } |
169 | // apart from javascript, only wasm is valid |
170 | return frame->is_wasm(); |
171 | } |
172 | |
173 | // ------------------------------------------------------------------------- |
174 | |
175 | namespace { |
176 | |
177 | bool IsInterpreterFramePc(Isolate* isolate, Address pc, |
178 | StackFrame::State* state) { |
179 | Code interpreter_entry_trampoline = |
180 | isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); |
181 | Code interpreter_bytecode_advance = |
182 | isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance); |
183 | Code interpreter_bytecode_dispatch = |
184 | isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch); |
185 | |
186 | if (interpreter_entry_trampoline->contains(pc) || |
187 | interpreter_bytecode_advance->contains(pc) || |
188 | interpreter_bytecode_dispatch->contains(pc)) { |
189 | return true; |
190 | } else if (FLAG_interpreted_frames_native_stack) { |
191 | intptr_t marker = Memory<intptr_t>( |
192 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); |
193 | MSAN_MEMORY_IS_INITIALIZED( |
194 | state->fp + StandardFrameConstants::kFunctionOffset, |
195 | kSystemPointerSize); |
196 | Object maybe_function = Object( |
197 | Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset)); |
198 | // There's no need to run a full ContainsSlow if we know the frame can't be |
199 | // an InterpretedFrame, so we do these fast checks first |
200 | if (StackFrame::IsTypeMarker(marker) || maybe_function->IsSmi()) { |
201 | return false; |
202 | } else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) { |
203 | return false; |
204 | } |
205 | interpreter_entry_trampoline = |
206 | isolate->heap()->GcSafeFindCodeForInnerPointer(pc); |
207 | return interpreter_entry_trampoline->is_interpreter_trampoline_builtin(); |
208 | } else { |
209 | return false; |
210 | } |
211 | } |
212 | |
213 | DISABLE_ASAN Address ReadMemoryAt(Address address) { |
214 | return Memory<Address>(address); |
215 | } |
216 | |
217 | } // namespace |
218 | |
219 | SafeStackFrameIterator::SafeStackFrameIterator( |
220 | Isolate* isolate, |
221 | Address fp, Address sp, Address js_entry_sp) |
222 | : StackFrameIteratorBase(isolate, false), |
223 | low_bound_(sp), |
224 | high_bound_(js_entry_sp), |
225 | top_frame_type_(StackFrame::NONE), |
226 | external_callback_scope_(isolate->external_callback_scope()) { |
227 | StackFrame::State state; |
228 | StackFrame::Type type; |
229 | ThreadLocalTop* top = isolate->thread_local_top(); |
230 | bool advance_frame = true; |
231 | |
232 | Address fast_c_fp = isolate->isolate_data()->fast_c_call_caller_fp(); |
233 | // 'Fast C calls' are a special type of C call where we call directly from JS |
234 | // to C without an exit frame inbetween. The CEntryStub is responsible for |
235 | // setting Isolate::c_entry_fp, meaning that it won't be set for fast C calls. |
236 | // To keep the stack iterable, we store the FP and PC of the caller of the |
237 | // fast C call on the isolate. This is guaranteed to be the topmost JS frame, |
238 | // because fast C calls cannot call back into JS. We start iterating the stack |
239 | // from this topmost JS frame. |
240 | if (fast_c_fp) { |
241 | DCHECK_NE(kNullAddress, isolate->isolate_data()->fast_c_call_caller_pc()); |
242 | type = StackFrame::Type::OPTIMIZED; |
243 | top_frame_type_ = type; |
244 | state.fp = fast_c_fp; |
245 | state.sp = sp; |
246 | state.pc_address = isolate->isolate_data()->fast_c_call_caller_pc_address(); |
247 | advance_frame = false; |
248 | } else if (IsValidTop(top)) { |
249 | type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state); |
250 | top_frame_type_ = type; |
251 | } else if (IsValidStackAddress(fp)) { |
252 | DCHECK_NE(fp, kNullAddress); |
253 | state.fp = fp; |
254 | state.sp = sp; |
255 | state.pc_address = StackFrame::ResolveReturnAddressLocation( |
256 | reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp))); |
257 | |
258 | // If the top of stack is a return address to the interpreter trampoline, |
259 | // then we are likely in a bytecode handler with elided frame. In that |
260 | // case, set the PC properly and make sure we do not drop the frame. |
261 | if (IsValidStackAddress(sp)) { |
262 | MSAN_MEMORY_IS_INITIALIZED(sp, kSystemPointerSize); |
263 | Address tos = ReadMemoryAt(sp); |
264 | if (IsInterpreterFramePc(isolate, tos, &state)) { |
265 | state.pc_address = reinterpret_cast<Address*>(sp); |
266 | advance_frame = false; |
267 | } |
268 | } |
269 | |
270 | // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset, |
271 | // we check only that kMarkerOffset is within the stack bounds and do |
272 | // compile time check that kContextOffset slot is pushed on the stack before |
273 | // kMarkerOffset. |
274 | STATIC_ASSERT(StandardFrameConstants::kFunctionOffset < |
275 | StandardFrameConstants::kContextOffset); |
276 | Address frame_marker = fp + StandardFrameConstants::kFunctionOffset; |
277 | if (IsValidStackAddress(frame_marker)) { |
278 | type = StackFrame::ComputeType(this, &state); |
279 | top_frame_type_ = type; |
280 | // We only keep the top frame if we believe it to be interpreted frame. |
281 | if (type != StackFrame::INTERPRETED) { |
282 | advance_frame = true; |
283 | } |
284 | } else { |
285 | // Mark the frame as OPTIMIZED if we cannot determine its type. |
286 | // We chose OPTIMIZED rather than INTERPRETED because it's closer to |
287 | // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT |
288 | // referred to full-codegen frames (now removed from the tree), and |
289 | // OPTIMIZED refers to turbofan frames, both of which are generated |
290 | // code. INTERPRETED frames refer to bytecode. |
291 | // The frame anyways will be skipped. |
292 | type = StackFrame::OPTIMIZED; |
293 | // Top frame is incomplete so we cannot reliably determine its type. |
294 | top_frame_type_ = StackFrame::NONE; |
295 | } |
296 | } else { |
297 | return; |
298 | } |
299 | frame_ = SingletonFor(type, &state); |
300 | if (advance_frame && frame_) Advance(); |
301 | } |
302 | |
303 | |
304 | bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const { |
305 | Address c_entry_fp = Isolate::c_entry_fp(top); |
306 | if (!IsValidExitFrame(c_entry_fp)) return false; |
307 | // There should be at least one JS_ENTRY stack handler. |
308 | Address handler = Isolate::handler(top); |
309 | if (handler == kNullAddress) return false; |
310 | // Check that there are no js frames on top of the native frames. |
311 | return c_entry_fp < handler; |
312 | } |
313 | |
314 | |
315 | void SafeStackFrameIterator::AdvanceOneFrame() { |
316 | DCHECK(!done()); |
317 | StackFrame* last_frame = frame_; |
318 | Address last_sp = last_frame->sp(), last_fp = last_frame->fp(); |
319 | // Before advancing to the next stack frame, perform pointer validity tests. |
320 | if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) { |
321 | frame_ = nullptr; |
322 | return; |
323 | } |
324 | |
325 | // Advance to the previous frame. |
326 | StackFrame::State state; |
327 | StackFrame::Type type = frame_->GetCallerState(&state); |
328 | frame_ = SingletonFor(type, &state); |
329 | if (!frame_) return; |
330 | |
331 | // Check that we have actually moved to the previous frame in the stack. |
332 | if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) { |
333 | frame_ = nullptr; |
334 | } |
335 | } |
336 | |
337 | |
338 | bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const { |
339 | return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp()); |
340 | } |
341 | |
342 | |
343 | bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) { |
344 | StackFrame::State state; |
345 | if (frame->is_entry() || frame->is_construct_entry()) { |
346 | // See EntryFrame::GetCallerState. It computes the caller FP address |
347 | // and calls ExitFrame::GetStateForFramePointer on it. We need to be |
348 | // sure that caller FP address is valid. |
349 | Address caller_fp = |
350 | Memory<Address>(frame->fp() + EntryFrameConstants::kCallerFPOffset); |
351 | if (!IsValidExitFrame(caller_fp)) return false; |
352 | } else if (frame->is_arguments_adaptor()) { |
353 | // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that |
354 | // the number of arguments is stored on stack as Smi. We need to check |
355 | // that it really an Smi. |
356 | Object number_of_args = |
357 | reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->GetExpression(0); |
358 | if (!number_of_args->IsSmi()) { |
359 | return false; |
360 | } |
361 | } |
362 | frame->ComputeCallerState(&state); |
363 | return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) && |
364 | SingletonFor(frame->GetCallerState(&state)) != nullptr; |
365 | } |
366 | |
367 | |
368 | bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const { |
369 | if (!IsValidStackAddress(fp)) return false; |
370 | Address sp = ExitFrame::ComputeStackPointer(fp); |
371 | if (!IsValidStackAddress(sp)) return false; |
372 | StackFrame::State state; |
373 | ExitFrame::FillState(fp, sp, &state); |
374 | MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address)); |
375 | return *state.pc_address != kNullAddress; |
376 | } |
377 | |
378 | |
379 | void SafeStackFrameIterator::Advance() { |
380 | while (true) { |
381 | AdvanceOneFrame(); |
382 | if (done()) break; |
383 | ExternalCallbackScope* last_callback_scope = nullptr; |
384 | while (external_callback_scope_ != nullptr && |
385 | external_callback_scope_->scope_address() < frame_->fp()) { |
386 | // As long as the setup of a frame is not atomic, we may happen to be |
387 | // in an interval where an ExternalCallbackScope is already created, |
388 | // but the frame is not yet entered. So we are actually observing |
389 | // the previous frame. |
390 | // Skip all the ExternalCallbackScope's that are below the current fp. |
391 | last_callback_scope = external_callback_scope_; |
392 | external_callback_scope_ = external_callback_scope_->previous(); |
393 | } |
394 | if (frame_->is_java_script() || frame_->is_wasm()) break; |
395 | if (frame_->is_exit() || frame_->is_builtin_exit()) { |
396 | // Some of the EXIT frames may have ExternalCallbackScope allocated on |
397 | // top of them. In that case the scope corresponds to the first EXIT |
398 | // frame beneath it. There may be other EXIT frames on top of the |
399 | // ExternalCallbackScope, just skip them as we cannot collect any useful |
400 | // information about them. |
401 | if (last_callback_scope) { |
402 | frame_->state_.pc_address = |
403 | last_callback_scope->callback_entrypoint_address(); |
404 | } |
405 | break; |
406 | } |
407 | } |
408 | } |
409 | |
410 | |
411 | // ------------------------------------------------------------------------- |
412 | |
413 | namespace { |
414 | Code GetContainingCode(Isolate* isolate, Address pc) { |
415 | return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code; |
416 | } |
417 | } // namespace |
418 | |
419 | Code StackFrame::LookupCode() const { |
420 | Code result = GetContainingCode(isolate(), pc()); |
421 | DCHECK_GE(pc(), result->InstructionStart()); |
422 | DCHECK_LT(pc(), result->InstructionEnd()); |
423 | return result; |
424 | } |
425 | |
426 | void StackFrame::IteratePc(RootVisitor* v, Address* pc_address, |
427 | Address* constant_pool_address, Code holder) { |
428 | Address pc = *pc_address; |
429 | DCHECK(holder->GetHeap()->GcSafeCodeContains(holder, pc)); |
430 | unsigned pc_offset = static_cast<unsigned>(pc - holder->InstructionStart()); |
431 | Object code = holder; |
432 | v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&code)); |
433 | if (code == holder) return; |
434 | holder = Code::unchecked_cast(code); |
435 | pc = holder->InstructionStart() + pc_offset; |
436 | *pc_address = pc; |
437 | if (FLAG_enable_embedded_constant_pool && constant_pool_address) { |
438 | *constant_pool_address = holder->constant_pool(); |
439 | } |
440 | } |
441 | |
442 | |
443 | void StackFrame::SetReturnAddressLocationResolver( |
444 | ReturnAddressLocationResolver resolver) { |
445 | DCHECK_NULL(return_address_location_resolver_); |
446 | return_address_location_resolver_ = resolver; |
447 | } |
448 | |
449 | StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator, |
450 | State* state) { |
451 | DCHECK_NE(state->fp, kNullAddress); |
452 | |
453 | MSAN_MEMORY_IS_INITIALIZED( |
454 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset, |
455 | kSystemPointerSize); |
456 | intptr_t marker = Memory<intptr_t>( |
457 | state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); |
458 | if (!iterator->can_access_heap_objects_) { |
459 | // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really |
460 | // means that we are being called from the profiler, which can interrupt |
461 | // the VM with a signal at any arbitrary instruction, with essentially |
462 | // anything on the stack. So basically none of these checks are 100% |
463 | // reliable. |
464 | MSAN_MEMORY_IS_INITIALIZED( |
465 | state->fp + StandardFrameConstants::kFunctionOffset, |
466 | kSystemPointerSize); |
467 | Object maybe_function = Object( |
468 | Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset)); |
469 | if (!StackFrame::IsTypeMarker(marker)) { |
470 | if (maybe_function->IsSmi()) { |
471 | return NATIVE; |
472 | } else if (IsInterpreterFramePc(iterator->isolate(), *(state->pc_address), |
473 | state)) { |
474 | return INTERPRETED; |
475 | } else { |
476 | return OPTIMIZED; |
477 | } |
478 | } |
479 | } else { |
480 | Address pc = *(state->pc_address); |
481 | // If the {pc} does not point into WebAssembly code we can rely on the |
482 | // returned {wasm_code} to be null and fall back to {GetContainingCode}. |
483 | wasm::WasmCodeRefScope code_ref_scope; |
484 | wasm::WasmCode* wasm_code = |
485 | iterator->isolate()->wasm_engine()->code_manager()->LookupCode(pc); |
486 | if (wasm_code != nullptr) { |
487 | switch (wasm_code->kind()) { |
488 | case wasm::WasmCode::kFunction: |
489 | return WASM_COMPILED; |
490 | case wasm::WasmCode::kWasmToJsWrapper: |
491 | return WASM_TO_JS; |
492 | case wasm::WasmCode::kRuntimeStub: |
493 | // Some stubs, like e.g. {WasmCode::kWasmCompileLazy} build their own |
494 | // specialized frame which already carries a type marker. |
495 | // TODO(mstarzinger): This is only needed for the case where embedded |
496 | // builtins are disabled. It can be removed once all non-embedded |
497 | // builtins are gone. |
498 | if (StackFrame::IsTypeMarker(marker)) break; |
499 | return STUB; |
500 | case wasm::WasmCode::kInterpreterEntry: |
501 | return WASM_INTERPRETER_ENTRY; |
502 | default: |
503 | UNREACHABLE(); |
504 | } |
505 | } else { |
506 | // Look up the code object to figure out the type of the stack frame. |
507 | Code code_obj = GetContainingCode(iterator->isolate(), pc); |
508 | if (!code_obj.is_null()) { |
509 | switch (code_obj->kind()) { |
510 | case Code::BUILTIN: |
511 | if (StackFrame::IsTypeMarker(marker)) break; |
512 | if (code_obj->is_interpreter_trampoline_builtin()) { |
513 | return INTERPRETED; |
514 | } |
515 | if (code_obj->is_turbofanned()) { |
516 | // TODO(bmeurer): We treat frames for BUILTIN Code objects as |
517 | // OptimizedFrame for now (all the builtins with JavaScript |
518 | // linkage are actually generated with TurboFan currently, so |
519 | // this is sound). |
520 | return OPTIMIZED; |
521 | } |
522 | return BUILTIN; |
523 | case Code::OPTIMIZED_FUNCTION: |
524 | return OPTIMIZED; |
525 | case Code::WASM_FUNCTION: |
526 | return WASM_COMPILED; |
527 | case Code::WASM_TO_JS_FUNCTION: |
528 | return WASM_TO_JS; |
529 | case Code::JS_TO_WASM_FUNCTION: |
530 | return JS_TO_WASM; |
531 | case Code::WASM_INTERPRETER_ENTRY: |
532 | return WASM_INTERPRETER_ENTRY; |
533 | case Code::C_WASM_ENTRY: |
534 | return C_WASM_ENTRY; |
535 | default: |
536 | // All other types should have an explicit marker |
537 | break; |
538 | } |
539 | } else { |
540 | return NATIVE; |
541 | } |
542 | } |
543 | } |
544 | DCHECK(StackFrame::IsTypeMarker(marker)); |
545 | StackFrame::Type candidate = StackFrame::MarkerToType(marker); |
546 | switch (candidate) { |
547 | case ENTRY: |
548 | case CONSTRUCT_ENTRY: |
549 | case EXIT: |
550 | case BUILTIN_CONTINUATION: |
551 | case JAVA_SCRIPT_BUILTIN_CONTINUATION: |
552 | case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: |
553 | case BUILTIN_EXIT: |
554 | case STUB: |
555 | case INTERNAL: |
556 | case CONSTRUCT: |
557 | case ARGUMENTS_ADAPTOR: |
558 | case WASM_TO_JS: |
559 | case WASM_COMPILED: |
560 | case WASM_COMPILE_LAZY: |
561 | return candidate; |
562 | case JS_TO_WASM: |
563 | case OPTIMIZED: |
564 | case INTERPRETED: |
565 | default: |
566 | // Unoptimized and optimized JavaScript frames, including |
567 | // interpreted frames, should never have a StackFrame::Type |
568 | // marker. If we find one, we're likely being called from the |
569 | // profiler in a bogus stack frame. |
570 | return NATIVE; |
571 | } |
572 | } |
573 | |
574 | |
575 | #ifdef DEBUG |
576 | bool StackFrame::can_access_heap_objects() const { |
577 | return iterator_->can_access_heap_objects_; |
578 | } |
579 | #endif |
580 | |
581 | |
582 | StackFrame::Type StackFrame::GetCallerState(State* state) const { |
583 | ComputeCallerState(state); |
584 | return ComputeType(iterator_, state); |
585 | } |
586 | |
587 | |
588 | Address StackFrame::UnpaddedFP() const { |
589 | return fp(); |
590 | } |
591 | |
592 | Code NativeFrame::unchecked_code() const { return Code(); } |
593 | |
594 | void NativeFrame::ComputeCallerState(State* state) const { |
595 | state->sp = caller_sp(); |
596 | state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset); |
597 | state->pc_address = ResolveReturnAddressLocation( |
598 | reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset)); |
599 | state->callee_pc_address = nullptr; |
600 | state->constant_pool_address = nullptr; |
601 | } |
602 | |
603 | Code EntryFrame::unchecked_code() const { |
604 | return isolate()->heap()->builtin(Builtins::kJSEntry); |
605 | } |
606 | |
607 | |
608 | void EntryFrame::ComputeCallerState(State* state) const { |
609 | GetCallerState(state); |
610 | } |
611 | |
612 | |
613 | StackFrame::Type EntryFrame::GetCallerState(State* state) const { |
614 | const int offset = EntryFrameConstants::kCallerFPOffset; |
615 | Address fp = Memory<Address>(this->fp() + offset); |
616 | return ExitFrame::GetStateForFramePointer(fp, state); |
617 | } |
618 | |
619 | Code ConstructEntryFrame::unchecked_code() const { |
620 | return isolate()->heap()->builtin(Builtins::kJSConstructEntry); |
621 | } |
622 | |
623 | Code ExitFrame::unchecked_code() const { return Code(); } |
624 | |
625 | void ExitFrame::ComputeCallerState(State* state) const { |
626 | // Set up the caller state. |
627 | state->sp = caller_sp(); |
628 | state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset); |
629 | state->pc_address = ResolveReturnAddressLocation( |
630 | reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset)); |
631 | state->callee_pc_address = nullptr; |
632 | if (FLAG_enable_embedded_constant_pool) { |
633 | state->constant_pool_address = reinterpret_cast<Address*>( |
634 | fp() + ExitFrameConstants::kConstantPoolOffset); |
635 | } |
636 | } |
637 | |
638 | |
639 | void ExitFrame::Iterate(RootVisitor* v) const { |
640 | // The arguments are traversed as part of the expression stack of |
641 | // the calling frame. |
642 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); |
643 | } |
644 | |
645 | |
646 | Address ExitFrame::GetCallerStackPointer() const { |
647 | return fp() + ExitFrameConstants::kCallerSPOffset; |
648 | } |
649 | |
650 | |
651 | StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { |
652 | if (fp == 0) return NONE; |
653 | Address sp = ComputeStackPointer(fp); |
654 | FillState(fp, sp, state); |
655 | DCHECK_NE(*state->pc_address, kNullAddress); |
656 | |
657 | return ComputeFrameType(fp); |
658 | } |
659 | |
660 | StackFrame::Type ExitFrame::ComputeFrameType(Address fp) { |
661 | // Distinguish between between regular and builtin exit frames. |
662 | // Default to EXIT in all hairy cases (e.g., when called from profiler). |
663 | const int offset = ExitFrameConstants::kFrameTypeOffset; |
664 | Object marker(Memory<Address>(fp + offset)); |
665 | |
666 | if (!marker->IsSmi()) { |
667 | return EXIT; |
668 | } |
669 | |
670 | intptr_t marker_int = bit_cast<intptr_t>(marker); |
671 | |
672 | StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1); |
673 | if (frame_type == EXIT || frame_type == BUILTIN_EXIT) { |
674 | return frame_type; |
675 | } |
676 | |
677 | return EXIT; |
678 | } |
679 | |
680 | Address ExitFrame::ComputeStackPointer(Address fp) { |
681 | MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, |
682 | kSystemPointerSize); |
683 | return Memory<Address>(fp + ExitFrameConstants::kSPOffset); |
684 | } |
685 | |
686 | void ExitFrame::FillState(Address fp, Address sp, State* state) { |
687 | state->sp = sp; |
688 | state->fp = fp; |
689 | state->pc_address = ResolveReturnAddressLocation( |
690 | reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize)); |
691 | state->callee_pc_address = nullptr; |
692 | // The constant pool recorded in the exit frame is not associated |
693 | // with the pc in this state (the return address into a C entry |
694 | // stub). ComputeCallerState will retrieve the constant pool |
695 | // together with the associated caller pc. |
696 | state->constant_pool_address = nullptr; |
697 | } |
698 | |
699 | JSFunction BuiltinExitFrame::function() const { |
700 | return JSFunction::cast(target_slot_object()); |
701 | } |
702 | |
703 | Object BuiltinExitFrame::receiver() const { return receiver_slot_object(); } |
704 | |
705 | bool BuiltinExitFrame::IsConstructor() const { |
706 | return !new_target_slot_object()->IsUndefined(isolate()); |
707 | } |
708 | |
709 | Object BuiltinExitFrame::GetParameter(int i) const { |
710 | DCHECK(i >= 0 && i < ComputeParametersCount()); |
711 | int offset = |
712 | BuiltinExitFrameConstants::kFirstArgumentOffset + i * kSystemPointerSize; |
713 | return Object(Memory<Address>(fp() + offset)); |
714 | } |
715 | |
716 | int BuiltinExitFrame::ComputeParametersCount() const { |
717 | Object argc_slot = argc_slot_object(); |
718 | DCHECK(argc_slot->IsSmi()); |
719 | // Argc also counts the receiver, target, new target, and argc itself as args, |
720 | // therefore the real argument count is argc - 4. |
721 | int argc = Smi::ToInt(argc_slot) - 4; |
722 | DCHECK_GE(argc, 0); |
723 | return argc; |
724 | } |
725 | |
726 | namespace { |
727 | void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode, |
728 | int index) { |
729 | accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: " , index); |
730 | } |
731 | |
732 | const char* StringForStackFrameType(StackFrame::Type type) { |
733 | switch (type) { |
734 | #define CASE(value, name) \ |
735 | case StackFrame::value: \ |
736 | return #name; |
737 | STACK_FRAME_TYPE_LIST(CASE) |
738 | #undef CASE |
739 | default: |
740 | UNREACHABLE(); |
741 | } |
742 | } |
743 | } // namespace |
744 | |
745 | void StackFrame::Print(StringStream* accumulator, PrintMode mode, |
746 | int index) const { |
747 | DisallowHeapAllocation no_gc; |
748 | PrintIndex(accumulator, mode, index); |
749 | accumulator->Add(StringForStackFrameType(type())); |
750 | accumulator->Add(" [pc: %p]\n" , reinterpret_cast<void*>(pc())); |
751 | } |
752 | |
753 | void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode, |
754 | int index) const { |
755 | DisallowHeapAllocation no_gc; |
756 | Object receiver = this->receiver(); |
757 | JSFunction function = this->function(); |
758 | |
759 | accumulator->PrintSecurityTokenIfChanged(function); |
760 | PrintIndex(accumulator, mode, index); |
761 | accumulator->Add("builtin exit frame: " ); |
762 | Code code; |
763 | if (IsConstructor()) accumulator->Add("new " ); |
764 | accumulator->PrintFunction(function, receiver, &code); |
765 | |
766 | accumulator->Add("(this=%o" , receiver); |
767 | |
768 | // Print the parameters. |
769 | int parameters_count = ComputeParametersCount(); |
770 | for (int i = 0; i < parameters_count; i++) { |
771 | accumulator->Add(",%o" , GetParameter(i)); |
772 | } |
773 | |
774 | accumulator->Add(")\n\n" ); |
775 | } |
776 | |
777 | Address StandardFrame::GetExpressionAddress(int n) const { |
778 | const int offset = StandardFrameConstants::kExpressionsOffset; |
779 | return fp() + offset - n * kSystemPointerSize; |
780 | } |
781 | |
782 | Address InterpretedFrame::GetExpressionAddress(int n) const { |
783 | const int offset = InterpreterFrameConstants::kExpressionsOffset; |
784 | return fp() + offset - n * kSystemPointerSize; |
785 | } |
786 | |
787 | Script StandardFrame::script() const { |
788 | // This should only be called on frames which override this method. |
789 | UNREACHABLE(); |
790 | return Script(); |
791 | } |
792 | |
793 | Object StandardFrame::receiver() const { |
794 | return ReadOnlyRoots(isolate()).undefined_value(); |
795 | } |
796 | |
797 | Object StandardFrame::context() const { |
798 | return ReadOnlyRoots(isolate()).undefined_value(); |
799 | } |
800 | |
801 | int StandardFrame::position() const { |
802 | AbstractCode code = AbstractCode::cast(LookupCode()); |
803 | int code_offset = static_cast<int>(pc() - code->InstructionStart()); |
804 | return code->SourcePosition(code_offset); |
805 | } |
806 | |
807 | int StandardFrame::ComputeExpressionsCount() const { |
808 | Address base = GetExpressionAddress(0); |
809 | Address limit = sp() - kSystemPointerSize; |
810 | DCHECK(base >= limit); // stack grows downwards |
811 | // Include register-allocated locals in number of expressions. |
812 | return static_cast<int>((base - limit) / kSystemPointerSize); |
813 | } |
814 | |
815 | Object StandardFrame::GetParameter(int index) const { |
816 | // StandardFrame does not define any parameters. |
817 | UNREACHABLE(); |
818 | } |
819 | |
820 | int StandardFrame::ComputeParametersCount() const { return 0; } |
821 | |
822 | void StandardFrame::ComputeCallerState(State* state) const { |
823 | state->sp = caller_sp(); |
824 | state->fp = caller_fp(); |
825 | state->pc_address = ResolveReturnAddressLocation( |
826 | reinterpret_cast<Address*>(ComputePCAddress(fp()))); |
827 | state->callee_pc_address = pc_address(); |
828 | state->constant_pool_address = |
829 | reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp())); |
830 | } |
831 | |
832 | |
833 | bool StandardFrame::IsConstructor() const { return false; } |
834 | |
835 | void StandardFrame::Summarize(std::vector<FrameSummary>* functions) const { |
836 | // This should only be called on frames which override this method. |
837 | UNREACHABLE(); |
838 | } |
839 | |
840 | void StandardFrame::IterateCompiledFrame(RootVisitor* v) const { |
841 | // Make sure that we're not doing "safe" stack frame iteration. We cannot |
842 | // possibly find pointers in optimized frames in that state. |
843 | DCHECK(can_access_heap_objects()); |
844 | |
845 | // Find the code and compute the safepoint information. |
846 | Address inner_pointer = pc(); |
847 | const wasm::WasmCode* wasm_code = |
848 | isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer); |
849 | SafepointEntry safepoint_entry; |
850 | uint32_t stack_slots; |
851 | Code code; |
852 | bool has_tagged_params = false; |
853 | uint32_t tagged_parameter_slots = 0; |
854 | if (wasm_code != nullptr) { |
855 | SafepointTable table(wasm_code->instruction_start(), |
856 | wasm_code->safepoint_table_offset(), |
857 | wasm_code->stack_slots()); |
858 | safepoint_entry = table.FindEntry(inner_pointer); |
859 | stack_slots = wasm_code->stack_slots(); |
860 | has_tagged_params = wasm_code->kind() != wasm::WasmCode::kFunction; |
861 | tagged_parameter_slots = wasm_code->tagged_parameter_slots(); |
862 | } else { |
863 | InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry = |
864 | isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer); |
865 | if (!entry->safepoint_entry.is_valid()) { |
866 | entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer); |
867 | DCHECK(entry->safepoint_entry.is_valid()); |
868 | } else { |
869 | DCHECK(entry->safepoint_entry.Equals( |
870 | entry->code->GetSafepointEntry(inner_pointer))); |
871 | } |
872 | |
873 | code = entry->code; |
874 | safepoint_entry = entry->safepoint_entry; |
875 | stack_slots = code->stack_slots(); |
876 | has_tagged_params = code->has_tagged_params(); |
877 | } |
878 | uint32_t slot_space = stack_slots * kSystemPointerSize; |
879 | |
880 | // Determine the fixed header and spill slot area size. |
881 | int = StandardFrameConstants::kFixedFrameSizeFromFp; |
882 | intptr_t marker = |
883 | Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset); |
884 | if (StackFrame::IsTypeMarker(marker)) { |
885 | StackFrame::Type candidate = StackFrame::MarkerToType(marker); |
886 | switch (candidate) { |
887 | case ENTRY: |
888 | case CONSTRUCT_ENTRY: |
889 | case EXIT: |
890 | case BUILTIN_CONTINUATION: |
891 | case JAVA_SCRIPT_BUILTIN_CONTINUATION: |
892 | case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: |
893 | case BUILTIN_EXIT: |
894 | case ARGUMENTS_ADAPTOR: |
895 | case STUB: |
896 | case INTERNAL: |
897 | case CONSTRUCT: |
898 | case JS_TO_WASM: |
899 | case C_WASM_ENTRY: |
900 | frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp; |
901 | break; |
902 | case WASM_TO_JS: |
903 | case WASM_COMPILED: |
904 | case WASM_INTERPRETER_ENTRY: |
905 | case WASM_COMPILE_LAZY: |
906 | frame_header_size = WasmCompiledFrameConstants::kFixedFrameSizeFromFp; |
907 | break; |
908 | case OPTIMIZED: |
909 | case INTERPRETED: |
910 | case BUILTIN: |
911 | // These frame types have a context, but they are actually stored |
912 | // in the place on the stack that one finds the frame type. |
913 | UNREACHABLE(); |
914 | break; |
915 | case NATIVE: |
916 | case NONE: |
917 | case NUMBER_OF_TYPES: |
918 | case MANUAL: |
919 | UNREACHABLE(); |
920 | break; |
921 | } |
922 | } |
923 | slot_space -= |
924 | (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp); |
925 | |
926 | FullObjectSlot (&Memory<Address>(fp() - frame_header_size)); |
927 | FullObjectSlot ( |
928 | &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize)); |
929 | FullObjectSlot parameters_base(&Memory<Address>(sp())); |
930 | FullObjectSlot parameters_limit(frame_header_base.address() - slot_space); |
931 | |
932 | // Skip saved double registers. |
933 | if (safepoint_entry.has_doubles()) { |
934 | // Number of doubles not known at snapshot time. |
935 | DCHECK(!isolate()->serializer_enabled()); |
936 | parameters_base += |
937 | RegisterConfiguration::Default()->num_allocatable_double_registers() * |
938 | kDoubleSize / kSystemPointerSize; |
939 | } |
940 | |
941 | // Visit the registers that contain pointers if any. |
942 | if (safepoint_entry.HasRegisters()) { |
943 | for (int i = kNumSafepointRegisters - 1; i >=0; i--) { |
944 | if (safepoint_entry.HasRegisterAt(i)) { |
945 | int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i); |
946 | v->VisitRootPointer(Root::kTop, nullptr, |
947 | parameters_base + reg_stack_index); |
948 | } |
949 | } |
950 | // Skip the words containing the register values. |
951 | parameters_base += kNumSafepointRegisters; |
952 | } |
953 | |
954 | // We're done dealing with the register bits. |
955 | uint8_t* safepoint_bits = safepoint_entry.bits(); |
956 | safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2; |
957 | |
958 | // Visit the rest of the parameters if they are tagged. |
959 | if (has_tagged_params) { |
960 | v->VisitRootPointers(Root::kTop, nullptr, parameters_base, |
961 | parameters_limit); |
962 | } |
963 | |
964 | #ifdef V8_COMPRESS_POINTERS |
965 | Address isolate_root = isolate()->isolate_root(); |
966 | #endif |
967 | // Visit pointer spill slots and locals. |
968 | for (unsigned index = 0; index < stack_slots; index++) { |
969 | int byte_index = index >> kBitsPerByteLog2; |
970 | int bit_index = index & (kBitsPerByte - 1); |
971 | if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) { |
972 | FullObjectSlot spill_slot = parameters_limit + index; |
973 | #ifdef V8_COMPRESS_POINTERS |
974 | // Spill slots may contain compressed values in which case the upper |
975 | // 32-bits will contain zeros. In order to simplify handling of such |
976 | // slots in GC we ensure that the slot always contains full value. |
977 | |
978 | // The spill slot may actually contain weak references so we load/store |
979 | // values using spill_slot.location() in order to avoid dealing with |
980 | // FullMaybeObjectSlots here. |
981 | Tagged_t compressed_value = static_cast<Tagged_t>(*spill_slot.location()); |
982 | if (!HAS_SMI_TAG(compressed_value)) { |
983 | // We don't need to update smi values. |
984 | *spill_slot.location() = |
985 | DecompressTaggedPointer<OnHeapAddressKind::kIsolateRoot>( |
986 | isolate_root, compressed_value); |
987 | } |
988 | #endif |
989 | v->VisitRootPointer(Root::kTop, nullptr, spill_slot); |
990 | } |
991 | } |
992 | |
993 | // Visit tagged parameters that have been passed to the function of this |
994 | // frame. Conceptionally these parameters belong to the parent frame. However, |
995 | // the exact count is only known by this frame (in the presence of tail calls, |
996 | // this information cannot be derived from the call site). |
997 | if (tagged_parameter_slots > 0) { |
998 | FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp())); |
999 | FullObjectSlot tagged_parameter_limit = |
1000 | tagged_parameter_base + tagged_parameter_slots; |
1001 | |
1002 | v->VisitRootPointers(Root::kTop, nullptr, tagged_parameter_base, |
1003 | tagged_parameter_limit); |
1004 | } |
1005 | |
1006 | // For the off-heap code cases, we can skip this. |
1007 | if (!code.is_null()) { |
1008 | // Visit the return address in the callee and incoming arguments. |
1009 | IteratePc(v, pc_address(), constant_pool_address(), code); |
1010 | } |
1011 | |
1012 | // If this frame has JavaScript ABI, visit the context (in stub and JS |
1013 | // frames) and the function (in JS frames). If it has WebAssembly ABI, visit |
1014 | // the instance object. |
1015 | v->VisitRootPointers(Root::kTop, nullptr, frame_header_base, |
1016 | frame_header_limit); |
1017 | } |
1018 | |
1019 | void StubFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); } |
1020 | |
1021 | Code StubFrame::unchecked_code() const { |
1022 | return isolate()->FindCodeObject(pc()); |
1023 | } |
1024 | |
1025 | |
1026 | Address StubFrame::GetCallerStackPointer() const { |
1027 | return fp() + ExitFrameConstants::kCallerSPOffset; |
1028 | } |
1029 | |
1030 | int StubFrame::LookupExceptionHandlerInTable(int* stack_slots) { |
1031 | Code code = LookupCode(); |
1032 | DCHECK(code->is_turbofanned()); |
1033 | DCHECK_EQ(code->kind(), Code::BUILTIN); |
1034 | HandlerTable table(code); |
1035 | int pc_offset = static_cast<int>(pc() - code->InstructionStart()); |
1036 | *stack_slots = code->stack_slots(); |
1037 | return table.LookupReturn(pc_offset); |
1038 | } |
1039 | |
1040 | void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); } |
1041 | |
1042 | void JavaScriptFrame::SetParameterValue(int index, Object value) const { |
1043 | Memory<Address>(GetParameterSlot(index)) = value->ptr(); |
1044 | } |
1045 | |
1046 | bool JavaScriptFrame::IsConstructor() const { |
1047 | Address fp = caller_fp(); |
1048 | if (has_adapted_arguments()) { |
1049 | // Skip the arguments adaptor frame and look at the real caller. |
1050 | fp = Memory<Address>(fp + StandardFrameConstants::kCallerFPOffset); |
1051 | } |
1052 | return IsConstructFrame(fp); |
1053 | } |
1054 | |
1055 | |
1056 | bool JavaScriptFrame::HasInlinedFrames() const { |
1057 | std::vector<SharedFunctionInfo> functions; |
1058 | GetFunctions(&functions); |
1059 | return functions.size() > 1; |
1060 | } |
1061 | |
1062 | Code JavaScriptFrame::unchecked_code() const { return function()->code(); } |
1063 | |
1064 | int OptimizedFrame::ComputeParametersCount() const { |
1065 | Code code = LookupCode(); |
1066 | if (code->kind() == Code::BUILTIN) { |
1067 | return static_cast<int>( |
1068 | Memory<intptr_t>(fp() + OptimizedBuiltinFrameConstants::kArgCOffset)); |
1069 | } else { |
1070 | return JavaScriptFrame::ComputeParametersCount(); |
1071 | } |
1072 | } |
1073 | |
1074 | Address JavaScriptFrame::GetCallerStackPointer() const { |
1075 | return fp() + StandardFrameConstants::kCallerSPOffset; |
1076 | } |
1077 | |
1078 | void JavaScriptFrame::GetFunctions( |
1079 | std::vector<SharedFunctionInfo>* functions) const { |
1080 | DCHECK(functions->empty()); |
1081 | functions->push_back(function()->shared()); |
1082 | } |
1083 | |
1084 | void JavaScriptFrame::GetFunctions( |
1085 | std::vector<Handle<SharedFunctionInfo>>* functions) const { |
1086 | DCHECK(functions->empty()); |
1087 | std::vector<SharedFunctionInfo> raw_functions; |
1088 | GetFunctions(&raw_functions); |
1089 | for (const auto& raw_function : raw_functions) { |
1090 | functions->push_back( |
1091 | Handle<SharedFunctionInfo>(raw_function, function()->GetIsolate())); |
1092 | } |
1093 | } |
1094 | |
1095 | void JavaScriptFrame::Summarize(std::vector<FrameSummary>* functions) const { |
1096 | DCHECK(functions->empty()); |
1097 | Code code = LookupCode(); |
1098 | int offset = static_cast<int>(pc() - code->InstructionStart()); |
1099 | AbstractCode abstract_code = AbstractCode::cast(code); |
1100 | Handle<FixedArray> params = GetParameters(); |
1101 | FrameSummary::JavaScriptFrameSummary summary( |
1102 | isolate(), receiver(), function(), abstract_code, offset, IsConstructor(), |
1103 | *params); |
1104 | functions->push_back(summary); |
1105 | } |
1106 | |
1107 | JSFunction JavaScriptFrame::function() const { |
1108 | return JSFunction::cast(function_slot_object()); |
1109 | } |
1110 | |
1111 | Object JavaScriptFrame::unchecked_function() const { |
1112 | // During deoptimization of an optimized function, we may have yet to |
1113 | // materialize some closures on the stack. The arguments marker object |
1114 | // marks this case. |
1115 | DCHECK(function_slot_object()->IsJSFunction() || |
1116 | ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object()); |
1117 | return function_slot_object(); |
1118 | } |
1119 | |
1120 | Object JavaScriptFrame::receiver() const { return GetParameter(-1); } |
1121 | |
1122 | Object JavaScriptFrame::context() const { |
1123 | const int offset = StandardFrameConstants::kContextOffset; |
1124 | Object maybe_result(Memory<Address>(fp() + offset)); |
1125 | DCHECK(!maybe_result->IsSmi()); |
1126 | return maybe_result; |
1127 | } |
1128 | |
1129 | Script JavaScriptFrame::script() const { |
1130 | return Script::cast(function()->shared()->script()); |
1131 | } |
1132 | |
1133 | int JavaScriptFrame::LookupExceptionHandlerInTable( |
1134 | int* stack_depth, HandlerTable::CatchPrediction* prediction) { |
1135 | DCHECK(!LookupCode()->has_handler_table()); |
1136 | DCHECK(!LookupCode()->is_optimized_code()); |
1137 | return -1; |
1138 | } |
1139 | |
1140 | void JavaScriptFrame::PrintFunctionAndOffset(JSFunction function, |
1141 | AbstractCode code, int code_offset, |
1142 | FILE* file, |
1143 | bool print_line_number) { |
1144 | PrintF(file, "%s" , function->IsOptimized() ? "*" : "~" ); |
1145 | function->PrintName(file); |
1146 | PrintF(file, "+%d" , code_offset); |
1147 | if (print_line_number) { |
1148 | SharedFunctionInfo shared = function->shared(); |
1149 | int source_pos = code->SourcePosition(code_offset); |
1150 | Object maybe_script = shared->script(); |
1151 | if (maybe_script->IsScript()) { |
1152 | Script script = Script::cast(maybe_script); |
1153 | int line = script->GetLineNumber(source_pos) + 1; |
1154 | Object script_name_raw = script->name(); |
1155 | if (script_name_raw->IsString()) { |
1156 | String script_name = String::cast(script->name()); |
1157 | std::unique_ptr<char[]> c_script_name = |
1158 | script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); |
1159 | PrintF(file, " at %s:%d" , c_script_name.get(), line); |
1160 | } else { |
1161 | PrintF(file, " at <unknown>:%d" , line); |
1162 | } |
1163 | } else { |
1164 | PrintF(file, " at <unknown>:<unknown>" ); |
1165 | } |
1166 | } |
1167 | } |
1168 | |
1169 | void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args, |
1170 | bool print_line_number) { |
1171 | // constructor calls |
1172 | DisallowHeapAllocation no_allocation; |
1173 | JavaScriptFrameIterator it(isolate); |
1174 | while (!it.done()) { |
1175 | if (it.frame()->is_java_script()) { |
1176 | JavaScriptFrame* frame = it.frame(); |
1177 | if (frame->IsConstructor()) PrintF(file, "new " ); |
1178 | JSFunction function = frame->function(); |
1179 | int code_offset = 0; |
1180 | if (frame->is_interpreted()) { |
1181 | InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame); |
1182 | code_offset = iframe->GetBytecodeOffset(); |
1183 | } else { |
1184 | Code code = frame->unchecked_code(); |
1185 | code_offset = static_cast<int>(frame->pc() - code->InstructionStart()); |
1186 | } |
1187 | PrintFunctionAndOffset(function, function->abstract_code(), code_offset, |
1188 | file, print_line_number); |
1189 | if (print_args) { |
1190 | // function arguments |
1191 | // (we are intentionally only printing the actually |
1192 | // supplied parameters, not all parameters required) |
1193 | PrintF(file, "(this=" ); |
1194 | frame->receiver()->ShortPrint(file); |
1195 | const int length = frame->ComputeParametersCount(); |
1196 | for (int i = 0; i < length; i++) { |
1197 | PrintF(file, ", " ); |
1198 | frame->GetParameter(i)->ShortPrint(file); |
1199 | } |
1200 | PrintF(file, ")" ); |
1201 | } |
1202 | break; |
1203 | } |
1204 | it.Advance(); |
1205 | } |
1206 | } |
1207 | |
1208 | void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction function, |
1209 | AbstractCode code, |
1210 | int code_offset) { |
1211 | auto ic_stats = ICStats::instance(); |
1212 | ICInfo& ic_info = ic_stats->Current(); |
1213 | SharedFunctionInfo shared = function->shared(); |
1214 | |
1215 | ic_info.function_name = ic_stats->GetOrCacheFunctionName(function); |
1216 | ic_info.script_offset = code_offset; |
1217 | |
1218 | int source_pos = code->SourcePosition(code_offset); |
1219 | Object maybe_script = shared->script(); |
1220 | if (maybe_script->IsScript()) { |
1221 | Script script = Script::cast(maybe_script); |
1222 | ic_info.line_num = script->GetLineNumber(source_pos) + 1; |
1223 | ic_info.script_name = ic_stats->GetOrCacheScriptName(script); |
1224 | } |
1225 | } |
1226 | |
1227 | Object JavaScriptFrame::GetParameter(int index) const { |
1228 | return Object(Memory<Address>(GetParameterSlot(index))); |
1229 | } |
1230 | |
1231 | int JavaScriptFrame::ComputeParametersCount() const { |
1232 | DCHECK(can_access_heap_objects() && |
1233 | isolate()->heap()->gc_state() == Heap::NOT_IN_GC); |
1234 | return function()->shared()->internal_formal_parameter_count(); |
1235 | } |
1236 | |
1237 | Handle<FixedArray> JavaScriptFrame::GetParameters() const { |
1238 | if (V8_LIKELY(!FLAG_detailed_error_stack_trace)) { |
1239 | return isolate()->factory()->empty_fixed_array(); |
1240 | } |
1241 | int param_count = ComputeParametersCount(); |
1242 | Handle<FixedArray> parameters = |
1243 | isolate()->factory()->NewFixedArray(param_count); |
1244 | for (int i = 0; i < param_count; i++) { |
1245 | parameters->set(i, GetParameter(i)); |
1246 | } |
1247 | |
1248 | return parameters; |
1249 | } |
1250 | |
1251 | int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const { |
1252 | // Assert that the first allocatable register is also the argument count |
1253 | // register. |
1254 | DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0), |
1255 | kJavaScriptCallArgCountRegister.code()); |
1256 | Object argc_object( |
1257 | Memory<Address>(fp() + BuiltinContinuationFrameConstants::kArgCOffset)); |
1258 | return Smi::ToInt(argc_object); |
1259 | } |
1260 | |
1261 | intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const { |
1262 | Address height_slot = |
1263 | fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize; |
1264 | intptr_t height = Smi::ToInt(Smi(Memory<Address>(height_slot))); |
1265 | return height; |
1266 | } |
1267 | |
1268 | Object JavaScriptBuiltinContinuationFrame::context() const { |
1269 | return Object(Memory<Address>( |
1270 | fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset)); |
1271 | } |
1272 | |
1273 | void JavaScriptBuiltinContinuationWithCatchFrame::SetException( |
1274 | Object exception) { |
1275 | Address exception_argument_slot = |
1276 | fp() + JavaScriptFrameConstants::kLastParameterOffset + |
1277 | kSystemPointerSize; // Skip over return value slot. |
1278 | |
1279 | // Only allow setting exception if previous value was the hole. |
1280 | CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(), |
1281 | Object(Memory<Address>(exception_argument_slot))); |
1282 | Memory<Address>(exception_argument_slot) = exception->ptr(); |
1283 | } |
1284 | |
1285 | FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary( |
1286 | Isolate* isolate, Object receiver, JSFunction function, |
1287 | AbstractCode abstract_code, int code_offset, bool is_constructor, |
1288 | FixedArray parameters) |
1289 | : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT), |
1290 | receiver_(receiver, isolate), |
1291 | function_(function, isolate), |
1292 | abstract_code_(abstract_code, isolate), |
1293 | code_offset_(code_offset), |
1294 | is_constructor_(is_constructor), |
1295 | parameters_(parameters, isolate) { |
1296 | DCHECK(abstract_code->IsBytecodeArray() || |
1297 | Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION); |
1298 | } |
1299 | |
1300 | void FrameSummary::EnsureSourcePositionsAvailable() { |
1301 | if (IsJavaScript()) { |
1302 | java_script_summary_.EnsureSourcePositionsAvailable(); |
1303 | } |
1304 | } |
1305 | |
1306 | void FrameSummary::JavaScriptFrameSummary::EnsureSourcePositionsAvailable() { |
1307 | Handle<SharedFunctionInfo> shared(function()->shared(), isolate()); |
1308 | SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared); |
1309 | } |
1310 | |
1311 | bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const { |
1312 | return function()->shared()->IsSubjectToDebugging(); |
1313 | } |
1314 | |
1315 | int FrameSummary::JavaScriptFrameSummary::SourcePosition() const { |
1316 | return abstract_code()->SourcePosition(code_offset()); |
1317 | } |
1318 | |
1319 | int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const { |
1320 | return abstract_code()->SourceStatementPosition(code_offset()); |
1321 | } |
1322 | |
1323 | Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const { |
1324 | return handle(function_->shared()->script(), isolate()); |
1325 | } |
1326 | |
1327 | Handle<String> FrameSummary::JavaScriptFrameSummary::FunctionName() const { |
1328 | return JSFunction::GetDebugName(function_); |
1329 | } |
1330 | |
1331 | Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const { |
1332 | return handle(function_->context()->native_context(), isolate()); |
1333 | } |
1334 | |
1335 | FrameSummary::WasmFrameSummary::WasmFrameSummary( |
1336 | Isolate* isolate, FrameSummary::Kind kind, |
1337 | Handle<WasmInstanceObject> instance, bool at_to_number_conversion) |
1338 | : FrameSummaryBase(isolate, kind), |
1339 | wasm_instance_(instance), |
1340 | at_to_number_conversion_(at_to_number_conversion) {} |
1341 | |
1342 | Handle<Object> FrameSummary::WasmFrameSummary::receiver() const { |
1343 | return wasm_instance_->GetIsolate()->global_proxy(); |
1344 | } |
1345 | |
1346 | #define WASM_SUMMARY_DISPATCH(type, name) \ |
1347 | type FrameSummary::WasmFrameSummary::name() const { \ |
1348 | DCHECK(kind() == Kind::WASM_COMPILED || kind() == Kind::WASM_INTERPRETED); \ |
1349 | return kind() == Kind::WASM_COMPILED \ |
1350 | ? static_cast<const WasmCompiledFrameSummary*>(this)->name() \ |
1351 | : static_cast<const WasmInterpretedFrameSummary*>(this) \ |
1352 | ->name(); \ |
1353 | } |
1354 | |
1355 | WASM_SUMMARY_DISPATCH(uint32_t, function_index) |
1356 | WASM_SUMMARY_DISPATCH(int, byte_offset) |
1357 | |
1358 | #undef WASM_SUMMARY_DISPATCH |
1359 | |
1360 | int FrameSummary::WasmFrameSummary::SourcePosition() const { |
1361 | Handle<WasmModuleObject> module_object(wasm_instance()->module_object(), |
1362 | isolate()); |
1363 | return WasmModuleObject::GetSourcePosition(module_object, function_index(), |
1364 | byte_offset(), |
1365 | at_to_number_conversion()); |
1366 | } |
1367 | |
1368 | Handle<Script> FrameSummary::WasmFrameSummary::script() const { |
1369 | return handle(wasm_instance()->module_object()->script(), |
1370 | wasm_instance()->GetIsolate()); |
1371 | } |
1372 | |
1373 | Handle<String> FrameSummary::WasmFrameSummary::FunctionName() const { |
1374 | Handle<WasmModuleObject> module_object(wasm_instance()->module_object(), |
1375 | isolate()); |
1376 | return WasmModuleObject::GetFunctionName(isolate(), module_object, |
1377 | function_index()); |
1378 | } |
1379 | |
1380 | Handle<Context> FrameSummary::WasmFrameSummary::native_context() const { |
1381 | return handle(wasm_instance()->native_context(), isolate()); |
1382 | } |
1383 | |
1384 | FrameSummary::WasmCompiledFrameSummary::WasmCompiledFrameSummary( |
1385 | Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code, |
1386 | int code_offset, bool at_to_number_conversion) |
1387 | : WasmFrameSummary(isolate, WASM_COMPILED, instance, |
1388 | at_to_number_conversion), |
1389 | code_(code), |
1390 | code_offset_(code_offset) {} |
1391 | |
1392 | uint32_t FrameSummary::WasmCompiledFrameSummary::function_index() const { |
1393 | return code()->index(); |
1394 | } |
1395 | |
1396 | int FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition( |
1397 | const wasm::WasmCode* code, int offset) { |
1398 | int position = 0; |
1399 | // Subtract one because the current PC is one instruction after the call site. |
1400 | offset--; |
1401 | for (SourcePositionTableIterator iterator(code->source_positions()); |
1402 | !iterator.done() && iterator.code_offset() <= offset; |
1403 | iterator.Advance()) { |
1404 | position = iterator.source_position().ScriptOffset(); |
1405 | } |
1406 | return position; |
1407 | } |
1408 | |
1409 | int FrameSummary::WasmCompiledFrameSummary::byte_offset() const { |
1410 | return GetWasmSourcePosition(code_, code_offset()); |
1411 | } |
1412 | |
1413 | FrameSummary::WasmInterpretedFrameSummary::WasmInterpretedFrameSummary( |
1414 | Isolate* isolate, Handle<WasmInstanceObject> instance, |
1415 | uint32_t function_index, int byte_offset) |
1416 | : WasmFrameSummary(isolate, WASM_INTERPRETED, instance, false), |
1417 | function_index_(function_index), |
1418 | byte_offset_(byte_offset) {} |
1419 | |
1420 | FrameSummary::~FrameSummary() { |
1421 | #define FRAME_SUMMARY_DESTR(kind, type, field, desc) \ |
1422 | case kind: \ |
1423 | field.~type(); \ |
1424 | break; |
1425 | switch (base_.kind()) { |
1426 | FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR) |
1427 | default: |
1428 | UNREACHABLE(); |
1429 | } |
1430 | #undef FRAME_SUMMARY_DESTR |
1431 | } |
1432 | |
1433 | FrameSummary FrameSummary::GetTop(const StandardFrame* frame) { |
1434 | std::vector<FrameSummary> frames; |
1435 | frame->Summarize(&frames); |
1436 | DCHECK_LT(0, frames.size()); |
1437 | return frames.back(); |
1438 | } |
1439 | |
1440 | FrameSummary FrameSummary::GetBottom(const StandardFrame* frame) { |
1441 | return Get(frame, 0); |
1442 | } |
1443 | |
1444 | FrameSummary FrameSummary::GetSingle(const StandardFrame* frame) { |
1445 | std::vector<FrameSummary> frames; |
1446 | frame->Summarize(&frames); |
1447 | DCHECK_EQ(1, frames.size()); |
1448 | return frames.front(); |
1449 | } |
1450 | |
1451 | FrameSummary FrameSummary::Get(const StandardFrame* frame, int index) { |
1452 | DCHECK_LE(0, index); |
1453 | std::vector<FrameSummary> frames; |
1454 | frame->Summarize(&frames); |
1455 | DCHECK_GT(frames.size(), index); |
1456 | return frames[index]; |
1457 | } |
1458 | |
1459 | #define FRAME_SUMMARY_DISPATCH(ret, name) \ |
1460 | ret FrameSummary::name() const { \ |
1461 | switch (base_.kind()) { \ |
1462 | case JAVA_SCRIPT: \ |
1463 | return java_script_summary_.name(); \ |
1464 | case WASM_COMPILED: \ |
1465 | return wasm_compiled_summary_.name(); \ |
1466 | case WASM_INTERPRETED: \ |
1467 | return wasm_interpreted_summary_.name(); \ |
1468 | default: \ |
1469 | UNREACHABLE(); \ |
1470 | return ret{}; \ |
1471 | } \ |
1472 | } |
1473 | |
1474 | FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver) |
1475 | FRAME_SUMMARY_DISPATCH(int, code_offset) |
1476 | FRAME_SUMMARY_DISPATCH(bool, is_constructor) |
1477 | FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging) |
1478 | FRAME_SUMMARY_DISPATCH(Handle<Object>, script) |
1479 | FRAME_SUMMARY_DISPATCH(int, SourcePosition) |
1480 | FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition) |
1481 | FRAME_SUMMARY_DISPATCH(Handle<String>, FunctionName) |
1482 | FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context) |
1483 | |
1484 | #undef FRAME_SUMMARY_DISPATCH |
1485 | |
1486 | void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const { |
1487 | DCHECK(frames->empty()); |
1488 | DCHECK(is_optimized()); |
1489 | |
1490 | // Delegate to JS frame in absence of turbofan deoptimization. |
1491 | // TODO(turbofan): Revisit once we support deoptimization across the board. |
1492 | Code code = LookupCode(); |
1493 | if (code->kind() == Code::BUILTIN) { |
1494 | return JavaScriptFrame::Summarize(frames); |
1495 | } |
1496 | |
1497 | int deopt_index = Safepoint::kNoDeoptimizationIndex; |
1498 | DeoptimizationData const data = GetDeoptimizationData(&deopt_index); |
1499 | if (deopt_index == Safepoint::kNoDeoptimizationIndex) { |
1500 | CHECK(data.is_null()); |
1501 | FATAL("Missing deoptimization information for OptimizedFrame::Summarize." ); |
1502 | } |
1503 | |
1504 | // Prepare iteration over translation. Note that the below iteration might |
1505 | // materialize objects without storing them back to the Isolate, this will |
1506 | // lead to objects being re-materialized again for each summary. |
1507 | TranslatedState translated(this); |
1508 | translated.Prepare(fp()); |
1509 | |
1510 | // We create the summary in reverse order because the frames |
1511 | // in the deoptimization translation are ordered bottom-to-top. |
1512 | bool is_constructor = IsConstructor(); |
1513 | for (auto it = translated.begin(); it != translated.end(); it++) { |
1514 | if (it->kind() == TranslatedFrame::kInterpretedFunction || |
1515 | it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation || |
1516 | it->kind() == |
1517 | TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) { |
1518 | Handle<SharedFunctionInfo> shared_info = it->shared_info(); |
1519 | |
1520 | // The translation commands are ordered and the function is always |
1521 | // at the first position, and the receiver is next. |
1522 | TranslatedFrame::iterator translated_values = it->begin(); |
1523 | |
1524 | // Get or materialize the correct function in the optimized frame. |
1525 | Handle<JSFunction> function = |
1526 | Handle<JSFunction>::cast(translated_values->GetValue()); |
1527 | translated_values++; |
1528 | |
1529 | // Get or materialize the correct receiver in the optimized frame. |
1530 | Handle<Object> receiver = translated_values->GetValue(); |
1531 | translated_values++; |
1532 | |
1533 | // Determine the underlying code object and the position within it from |
1534 | // the translation corresponding to the frame type in question. |
1535 | Handle<AbstractCode> abstract_code; |
1536 | unsigned code_offset; |
1537 | if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation || |
1538 | it->kind() == |
1539 | TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) { |
1540 | code_offset = 0; |
1541 | abstract_code = |
1542 | handle(AbstractCode::cast(isolate()->builtins()->builtin( |
1543 | Builtins::GetBuiltinFromBailoutId(it->node_id()))), |
1544 | isolate()); |
1545 | } else { |
1546 | DCHECK_EQ(it->kind(), TranslatedFrame::kInterpretedFunction); |
1547 | code_offset = it->node_id().ToInt(); // Points to current bytecode. |
1548 | abstract_code = handle(shared_info->abstract_code(), isolate()); |
1549 | } |
1550 | |
1551 | // Append full summary of the encountered JS frame. |
1552 | Handle<FixedArray> params = GetParameters(); |
1553 | FrameSummary::JavaScriptFrameSummary summary( |
1554 | isolate(), *receiver, *function, *abstract_code, code_offset, |
1555 | is_constructor, *params); |
1556 | frames->push_back(summary); |
1557 | is_constructor = false; |
1558 | } else if (it->kind() == TranslatedFrame::kConstructStub) { |
1559 | // The next encountered JS frame will be marked as a constructor call. |
1560 | DCHECK(!is_constructor); |
1561 | is_constructor = true; |
1562 | } |
1563 | } |
1564 | } |
1565 | |
1566 | |
1567 | int OptimizedFrame::LookupExceptionHandlerInTable( |
1568 | int* stack_slots, HandlerTable::CatchPrediction* prediction) { |
1569 | // We cannot perform exception prediction on optimized code. Instead, we need |
1570 | // to use FrameSummary to find the corresponding code offset in unoptimized |
1571 | // code to perform prediction there. |
1572 | DCHECK_NULL(prediction); |
1573 | Code code = LookupCode(); |
1574 | HandlerTable table(code); |
1575 | int pc_offset = static_cast<int>(pc() - code->InstructionStart()); |
1576 | if (stack_slots) *stack_slots = code->stack_slots(); |
1577 | |
1578 | // When the return pc has been replaced by a trampoline there won't be |
1579 | // a handler for this trampoline. Thus we need to use the return pc that |
1580 | // _used to be_ on the stack to get the right ExceptionHandler. |
1581 | if (code->kind() == Code::OPTIMIZED_FUNCTION && |
1582 | code->marked_for_deoptimization()) { |
1583 | SafepointTable safepoints(code); |
1584 | pc_offset = safepoints.find_return_pc(pc_offset); |
1585 | } |
1586 | return table.LookupReturn(pc_offset); |
1587 | } |
1588 | |
1589 | DeoptimizationData OptimizedFrame::GetDeoptimizationData( |
1590 | int* deopt_index) const { |
1591 | DCHECK(is_optimized()); |
1592 | |
1593 | JSFunction opt_function = function(); |
1594 | Code code = opt_function->code(); |
1595 | |
1596 | // The code object may have been replaced by lazy deoptimization. Fall |
1597 | // back to a slow search in this case to find the original optimized |
1598 | // code object. |
1599 | if (!code->contains(pc())) { |
1600 | code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc()); |
1601 | } |
1602 | DCHECK(!code.is_null()); |
1603 | DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
1604 | |
1605 | SafepointEntry safepoint_entry = code->GetSafepointEntry(pc()); |
1606 | if (safepoint_entry.has_deoptimization_index()) { |
1607 | *deopt_index = safepoint_entry.deoptimization_index(); |
1608 | return DeoptimizationData::cast(code->deoptimization_data()); |
1609 | } |
1610 | *deopt_index = Safepoint::kNoDeoptimizationIndex; |
1611 | return DeoptimizationData(); |
1612 | } |
1613 | |
1614 | Object OptimizedFrame::receiver() const { |
1615 | Code code = LookupCode(); |
1616 | if (code->kind() == Code::BUILTIN) { |
1617 | Address argc_ptr = fp() + OptimizedBuiltinFrameConstants::kArgCOffset; |
1618 | intptr_t argc = *reinterpret_cast<intptr_t*>(argc_ptr); |
1619 | intptr_t args_size = |
1620 | (StandardFrameConstants::kFixedSlotCountAboveFp + argc) * |
1621 | kSystemPointerSize; |
1622 | Address receiver_ptr = fp() + args_size; |
1623 | return *FullObjectSlot(receiver_ptr); |
1624 | } else { |
1625 | return JavaScriptFrame::receiver(); |
1626 | } |
1627 | } |
1628 | |
1629 | void OptimizedFrame::GetFunctions( |
1630 | std::vector<SharedFunctionInfo>* functions) const { |
1631 | DCHECK(functions->empty()); |
1632 | DCHECK(is_optimized()); |
1633 | |
1634 | // Delegate to JS frame in absence of turbofan deoptimization. |
1635 | // TODO(turbofan): Revisit once we support deoptimization across the board. |
1636 | Code code = LookupCode(); |
1637 | if (code->kind() == Code::BUILTIN) { |
1638 | return JavaScriptFrame::GetFunctions(functions); |
1639 | } |
1640 | |
1641 | DisallowHeapAllocation no_gc; |
1642 | int deopt_index = Safepoint::kNoDeoptimizationIndex; |
1643 | DeoptimizationData const data = GetDeoptimizationData(&deopt_index); |
1644 | DCHECK(!data.is_null()); |
1645 | DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index); |
1646 | FixedArray const literal_array = data->LiteralArray(); |
1647 | |
1648 | TranslationIterator it(data->TranslationByteArray(), |
1649 | data->TranslationIndex(deopt_index)->value()); |
1650 | Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); |
1651 | DCHECK_EQ(Translation::BEGIN, opcode); |
1652 | it.Next(); // Skip frame count. |
1653 | int jsframe_count = it.Next(); |
1654 | it.Next(); // Skip update feedback count. |
1655 | |
1656 | // We insert the frames in reverse order because the frames |
1657 | // in the deoptimization translation are ordered bottom-to-top. |
1658 | while (jsframe_count != 0) { |
1659 | opcode = static_cast<Translation::Opcode>(it.Next()); |
1660 | if (opcode == Translation::INTERPRETED_FRAME || |
1661 | opcode == Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME || |
1662 | opcode == |
1663 | Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) { |
1664 | it.Next(); // Skip bailout id. |
1665 | jsframe_count--; |
1666 | |
1667 | // The second operand of the frame points to the function. |
1668 | Object shared = literal_array->get(it.Next()); |
1669 | functions->push_back(SharedFunctionInfo::cast(shared)); |
1670 | |
1671 | // Skip over remaining operands to advance to the next opcode. |
1672 | it.Skip(Translation::NumberOfOperandsFor(opcode) - 2); |
1673 | } else { |
1674 | // Skip over operands to advance to the next opcode. |
1675 | it.Skip(Translation::NumberOfOperandsFor(opcode)); |
1676 | } |
1677 | } |
1678 | } |
1679 | |
1680 | int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) { |
1681 | return StandardFrameConstants::kCallerSPOffset - |
1682 | ((slot_index + 1) * kSystemPointerSize); |
1683 | } |
1684 | |
1685 | Object OptimizedFrame::StackSlotAt(int index) const { |
1686 | return Object(Memory<Address>(fp() + StackSlotOffsetRelativeToFp(index))); |
1687 | } |
1688 | |
1689 | int InterpretedFrame::position() const { |
1690 | AbstractCode code = AbstractCode::cast(GetBytecodeArray()); |
1691 | int code_offset = GetBytecodeOffset(); |
1692 | return code->SourcePosition(code_offset); |
1693 | } |
1694 | |
1695 | int InterpretedFrame::LookupExceptionHandlerInTable( |
1696 | int* context_register, HandlerTable::CatchPrediction* prediction) { |
1697 | HandlerTable table(GetBytecodeArray()); |
1698 | return table.LookupRange(GetBytecodeOffset(), context_register, prediction); |
1699 | } |
1700 | |
1701 | int InterpretedFrame::GetBytecodeOffset() const { |
1702 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; |
1703 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp, |
1704 | InterpreterFrameConstants::kExpressionsOffset - |
1705 | index * kSystemPointerSize); |
1706 | int raw_offset = Smi::ToInt(GetExpression(index)); |
1707 | return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag; |
1708 | } |
1709 | |
1710 | int InterpretedFrame::GetBytecodeOffset(Address fp) { |
1711 | const int offset = InterpreterFrameConstants::kExpressionsOffset; |
1712 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; |
1713 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp, |
1714 | InterpreterFrameConstants::kExpressionsOffset - |
1715 | index * kSystemPointerSize); |
1716 | Address expression_offset = fp + offset - index * kSystemPointerSize; |
1717 | int raw_offset = Smi::ToInt(Object(Memory<Address>(expression_offset))); |
1718 | return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag; |
1719 | } |
1720 | |
1721 | void InterpretedFrame::PatchBytecodeOffset(int new_offset) { |
1722 | const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex; |
1723 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp, |
1724 | InterpreterFrameConstants::kExpressionsOffset - |
1725 | index * kSystemPointerSize); |
1726 | int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag; |
1727 | SetExpression(index, Smi::FromInt(raw_offset)); |
1728 | } |
1729 | |
1730 | BytecodeArray InterpretedFrame::GetBytecodeArray() const { |
1731 | const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex; |
1732 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp, |
1733 | InterpreterFrameConstants::kExpressionsOffset - |
1734 | index * kSystemPointerSize); |
1735 | return BytecodeArray::cast(GetExpression(index)); |
1736 | } |
1737 | |
1738 | void InterpretedFrame::PatchBytecodeArray(BytecodeArray bytecode_array) { |
1739 | const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex; |
1740 | DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp, |
1741 | InterpreterFrameConstants::kExpressionsOffset - |
1742 | index * kSystemPointerSize); |
1743 | SetExpression(index, bytecode_array); |
1744 | } |
1745 | |
1746 | Object InterpretedFrame::ReadInterpreterRegister(int register_index) const { |
1747 | const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex; |
1748 | DCHECK_EQ(InterpreterFrameConstants::kRegisterFileFromFp, |
1749 | InterpreterFrameConstants::kExpressionsOffset - |
1750 | index * kSystemPointerSize); |
1751 | return GetExpression(index + register_index); |
1752 | } |
1753 | |
1754 | void InterpretedFrame::WriteInterpreterRegister(int register_index, |
1755 | Object value) { |
1756 | const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex; |
1757 | DCHECK_EQ(InterpreterFrameConstants::kRegisterFileFromFp, |
1758 | InterpreterFrameConstants::kExpressionsOffset - |
1759 | index * kSystemPointerSize); |
1760 | return SetExpression(index + register_index, value); |
1761 | } |
1762 | |
1763 | void InterpretedFrame::Summarize(std::vector<FrameSummary>* functions) const { |
1764 | DCHECK(functions->empty()); |
1765 | AbstractCode abstract_code = AbstractCode::cast(GetBytecodeArray()); |
1766 | Handle<FixedArray> params = GetParameters(); |
1767 | FrameSummary::JavaScriptFrameSummary summary( |
1768 | isolate(), receiver(), function(), abstract_code, GetBytecodeOffset(), |
1769 | IsConstructor(), *params); |
1770 | functions->push_back(summary); |
1771 | } |
1772 | |
1773 | int ArgumentsAdaptorFrame::ComputeParametersCount() const { |
1774 | return Smi::ToInt(GetExpression(0)); |
1775 | } |
1776 | |
1777 | Code ArgumentsAdaptorFrame::unchecked_code() const { |
1778 | return isolate()->builtins()->builtin( |
1779 | Builtins::kArgumentsAdaptorTrampoline); |
1780 | } |
1781 | |
1782 | int BuiltinFrame::ComputeParametersCount() const { |
1783 | return Smi::ToInt(GetExpression(0)); |
1784 | } |
1785 | |
1786 | void BuiltinFrame::PrintFrameKind(StringStream* accumulator) const { |
1787 | accumulator->Add("builtin frame: " ); |
1788 | } |
1789 | |
1790 | Address InternalFrame::GetCallerStackPointer() const { |
1791 | // Internal frames have no arguments. The stack pointer of the |
1792 | // caller is at a fixed offset from the frame pointer. |
1793 | return fp() + StandardFrameConstants::kCallerSPOffset; |
1794 | } |
1795 | |
1796 | Code InternalFrame::unchecked_code() const { return Code(); } |
1797 | |
1798 | void WasmCompiledFrame::Print(StringStream* accumulator, PrintMode mode, |
1799 | int index) const { |
1800 | PrintIndex(accumulator, mode, index); |
1801 | accumulator->Add("WASM [" ); |
1802 | accumulator->PrintName(script()->name()); |
1803 | Address instruction_start = isolate() |
1804 | ->wasm_engine() |
1805 | ->code_manager() |
1806 | ->LookupCode(pc()) |
1807 | ->instruction_start(); |
1808 | Vector<const uint8_t> raw_func_name = |
1809 | module_object()->GetRawFunctionName(function_index()); |
1810 | const int kMaxPrintedFunctionName = 64; |
1811 | char func_name[kMaxPrintedFunctionName + 1]; |
1812 | int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length()); |
1813 | memcpy(func_name, raw_func_name.start(), func_name_len); |
1814 | func_name[func_name_len] = '\0'; |
1815 | int pos = position(); |
1816 | const wasm::WasmModule* module = wasm_instance()->module_object()->module(); |
1817 | int func_index = function_index(); |
1818 | int func_code_offset = module->functions[func_index].code.offset(); |
1819 | accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n" , |
1820 | func_index, func_name, reinterpret_cast<void*>(pc()), |
1821 | static_cast<int>(pc() - instruction_start), pos, |
1822 | pos - func_code_offset); |
1823 | if (mode != OVERVIEW) accumulator->Add("\n" ); |
1824 | } |
1825 | |
1826 | Code WasmCompiledFrame::unchecked_code() const { |
1827 | return isolate()->FindCodeObject(pc()); |
1828 | } |
1829 | |
1830 | void WasmCompiledFrame::Iterate(RootVisitor* v) const { |
1831 | IterateCompiledFrame(v); |
1832 | } |
1833 | |
1834 | Address WasmCompiledFrame::GetCallerStackPointer() const { |
1835 | return fp() + ExitFrameConstants::kCallerSPOffset; |
1836 | } |
1837 | |
1838 | wasm::WasmCode* WasmCompiledFrame::wasm_code() const { |
1839 | return isolate()->wasm_engine()->code_manager()->LookupCode(pc()); |
1840 | } |
1841 | |
1842 | WasmInstanceObject WasmCompiledFrame::wasm_instance() const { |
1843 | const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset; |
1844 | Object instance(Memory<Address>(fp() + offset)); |
1845 | return WasmInstanceObject::cast(instance); |
1846 | } |
1847 | |
1848 | WasmModuleObject WasmCompiledFrame::module_object() const { |
1849 | return wasm_instance()->module_object(); |
1850 | } |
1851 | |
1852 | uint32_t WasmCompiledFrame::function_index() const { |
1853 | return FrameSummary::GetSingle(this).AsWasmCompiled().function_index(); |
1854 | } |
1855 | |
1856 | Script WasmCompiledFrame::script() const { return module_object()->script(); } |
1857 | |
1858 | int WasmCompiledFrame::position() const { |
1859 | return FrameSummary::GetSingle(this).SourcePosition(); |
1860 | } |
1861 | |
1862 | void WasmCompiledFrame::Summarize(std::vector<FrameSummary>* functions) const { |
1863 | DCHECK(functions->empty()); |
1864 | // The {WasmCode*} escapes this scope via the {FrameSummary}, which is fine, |
1865 | // since this code object is part of our stack. |
1866 | wasm::WasmCodeRefScope code_ref_scope; |
1867 | wasm::WasmCode* code = wasm_code(); |
1868 | int offset = static_cast<int>(pc() - code->instruction_start()); |
1869 | Handle<WasmInstanceObject> instance(wasm_instance(), isolate()); |
1870 | FrameSummary::WasmCompiledFrameSummary summary( |
1871 | isolate(), instance, code, offset, at_to_number_conversion()); |
1872 | functions->push_back(summary); |
1873 | } |
1874 | |
1875 | bool WasmCompiledFrame::at_to_number_conversion() const { |
1876 | // Check whether our callee is a WASM_TO_JS frame, and this frame is at the |
1877 | // ToNumber conversion call. |
1878 | wasm::WasmCode* code = |
1879 | callee_pc() != kNullAddress |
1880 | ? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc()) |
1881 | : nullptr; |
1882 | if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false; |
1883 | int offset = static_cast<int>(callee_pc() - code->instruction_start()); |
1884 | int pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition( |
1885 | code, offset); |
1886 | DCHECK(pos == 0 || pos == 1); |
1887 | // The imported call has position 0, ToNumber has position 1. |
1888 | return !!pos; |
1889 | } |
1890 | |
1891 | int WasmCompiledFrame::LookupExceptionHandlerInTable(int* stack_slots) { |
1892 | DCHECK_NOT_NULL(stack_slots); |
1893 | wasm::WasmCode* code = |
1894 | isolate()->wasm_engine()->code_manager()->LookupCode(pc()); |
1895 | if (!code->IsAnonymous() && code->handler_table_offset() > 0) { |
1896 | HandlerTable table(code->instruction_start(), code->handler_table_offset()); |
1897 | int pc_offset = static_cast<int>(pc() - code->instruction_start()); |
1898 | *stack_slots = static_cast<int>(code->stack_slots()); |
1899 | return table.LookupReturn(pc_offset); |
1900 | } |
1901 | return -1; |
1902 | } |
1903 | |
1904 | void WasmInterpreterEntryFrame::Iterate(RootVisitor* v) const { |
1905 | IterateCompiledFrame(v); |
1906 | } |
1907 | |
1908 | void WasmInterpreterEntryFrame::Print(StringStream* accumulator, PrintMode mode, |
1909 | int index) const { |
1910 | PrintIndex(accumulator, mode, index); |
1911 | accumulator->Add("WASM INTERPRETER ENTRY [" ); |
1912 | Script script = this->script(); |
1913 | accumulator->PrintName(script->name()); |
1914 | accumulator->Add("]" ); |
1915 | if (mode != OVERVIEW) accumulator->Add("\n" ); |
1916 | } |
1917 | |
1918 | void WasmInterpreterEntryFrame::Summarize( |
1919 | std::vector<FrameSummary>* functions) const { |
1920 | Handle<WasmInstanceObject> instance(wasm_instance(), isolate()); |
1921 | std::vector<std::pair<uint32_t, int>> interpreted_stack = |
1922 | instance->debug_info()->GetInterpretedStack(fp()); |
1923 | |
1924 | for (auto& e : interpreted_stack) { |
1925 | FrameSummary::WasmInterpretedFrameSummary summary(isolate(), instance, |
1926 | e.first, e.second); |
1927 | functions->push_back(summary); |
1928 | } |
1929 | } |
1930 | |
1931 | Code WasmInterpreterEntryFrame::unchecked_code() const { return Code(); } |
1932 | |
1933 | WasmInstanceObject WasmInterpreterEntryFrame::wasm_instance() const { |
1934 | const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset; |
1935 | Object instance(Memory<Address>(fp() + offset)); |
1936 | return WasmInstanceObject::cast(instance); |
1937 | } |
1938 | |
1939 | WasmDebugInfo WasmInterpreterEntryFrame::debug_info() const { |
1940 | return wasm_instance()->debug_info(); |
1941 | } |
1942 | |
1943 | WasmModuleObject WasmInterpreterEntryFrame::module_object() const { |
1944 | return wasm_instance()->module_object(); |
1945 | } |
1946 | |
1947 | Script WasmInterpreterEntryFrame::script() const { |
1948 | return module_object()->script(); |
1949 | } |
1950 | |
1951 | int WasmInterpreterEntryFrame::position() const { |
1952 | return FrameSummary::GetBottom(this).AsWasmInterpreted().SourcePosition(); |
1953 | } |
1954 | |
1955 | Object WasmInterpreterEntryFrame::context() const { |
1956 | return wasm_instance()->native_context(); |
1957 | } |
1958 | |
1959 | Address WasmInterpreterEntryFrame::GetCallerStackPointer() const { |
1960 | return fp() + ExitFrameConstants::kCallerSPOffset; |
1961 | } |
1962 | |
1963 | Code WasmCompileLazyFrame::unchecked_code() const { return Code(); } |
1964 | |
1965 | WasmInstanceObject WasmCompileLazyFrame::wasm_instance() const { |
1966 | return WasmInstanceObject::cast(*wasm_instance_slot()); |
1967 | } |
1968 | |
1969 | FullObjectSlot WasmCompileLazyFrame::wasm_instance_slot() const { |
1970 | const int offset = WasmCompileLazyFrameConstants::kWasmInstanceOffset; |
1971 | return FullObjectSlot(&Memory<Address>(fp() + offset)); |
1972 | } |
1973 | |
1974 | void WasmCompileLazyFrame::Iterate(RootVisitor* v) const { |
1975 | const int = WasmCompileLazyFrameConstants::kFixedFrameSizeFromFp; |
1976 | FullObjectSlot base(&Memory<Address>(sp())); |
1977 | FullObjectSlot limit(&Memory<Address>(fp() - header_size)); |
1978 | v->VisitRootPointers(Root::kTop, nullptr, base, limit); |
1979 | v->VisitRootPointer(Root::kTop, nullptr, wasm_instance_slot()); |
1980 | } |
1981 | |
1982 | Address WasmCompileLazyFrame::GetCallerStackPointer() const { |
1983 | return fp() + WasmCompileLazyFrameConstants::kCallerSPOffset; |
1984 | } |
1985 | |
1986 | namespace { |
1987 | |
1988 | void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo shared, |
1989 | Code code) { |
1990 | if (FLAG_max_stack_trace_source_length != 0 && !code.is_null()) { |
1991 | std::ostringstream os; |
1992 | os << "--------- s o u r c e c o d e ---------\n" |
1993 | << SourceCodeOf(shared, FLAG_max_stack_trace_source_length) |
1994 | << "\n-----------------------------------------\n" ; |
1995 | accumulator->Add(os.str().c_str()); |
1996 | } |
1997 | } |
1998 | |
1999 | } // namespace |
2000 | |
2001 | |
2002 | void JavaScriptFrame::Print(StringStream* accumulator, |
2003 | PrintMode mode, |
2004 | int index) const { |
2005 | Handle<SharedFunctionInfo> shared = handle(function()->shared(), isolate()); |
2006 | SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared); |
2007 | |
2008 | DisallowHeapAllocation no_gc; |
2009 | Object receiver = this->receiver(); |
2010 | JSFunction function = this->function(); |
2011 | |
2012 | accumulator->PrintSecurityTokenIfChanged(function); |
2013 | PrintIndex(accumulator, mode, index); |
2014 | PrintFrameKind(accumulator); |
2015 | Code code; |
2016 | if (IsConstructor()) accumulator->Add("new " ); |
2017 | accumulator->PrintFunction(function, receiver, &code); |
2018 | accumulator->Add(" [%p]" , function); |
2019 | |
2020 | // Get scope information for nicer output, if possible. If code is nullptr, or |
2021 | // doesn't contain scope info, scope_info will return 0 for the number of |
2022 | // parameters, stack local variables, context local variables, stack slots, |
2023 | // or context slots. |
2024 | ScopeInfo scope_info = shared->scope_info(); |
2025 | Object script_obj = shared->script(); |
2026 | if (script_obj->IsScript()) { |
2027 | Script script = Script::cast(script_obj); |
2028 | accumulator->Add(" [" ); |
2029 | accumulator->PrintName(script->name()); |
2030 | |
2031 | if (is_interpreted()) { |
2032 | const InterpretedFrame* iframe = |
2033 | reinterpret_cast<const InterpretedFrame*>(this); |
2034 | BytecodeArray bytecodes = iframe->GetBytecodeArray(); |
2035 | int offset = iframe->GetBytecodeOffset(); |
2036 | int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset); |
2037 | int line = script->GetLineNumber(source_pos) + 1; |
2038 | accumulator->Add(":%d] [bytecode=%p offset=%d]" , line, |
2039 | reinterpret_cast<void*>(bytecodes.ptr()), offset); |
2040 | } else { |
2041 | int function_start_pos = shared->StartPosition(); |
2042 | int line = script->GetLineNumber(function_start_pos) + 1; |
2043 | accumulator->Add(":~%d] [pc=%p]" , line, reinterpret_cast<void*>(pc())); |
2044 | } |
2045 | } |
2046 | |
2047 | accumulator->Add("(this=%o" , receiver); |
2048 | |
2049 | // Print the parameters. |
2050 | int parameters_count = ComputeParametersCount(); |
2051 | for (int i = 0; i < parameters_count; i++) { |
2052 | accumulator->Add("," ); |
2053 | accumulator->Add("%o" , GetParameter(i)); |
2054 | } |
2055 | |
2056 | accumulator->Add(")" ); |
2057 | if (mode == OVERVIEW) { |
2058 | accumulator->Add("\n" ); |
2059 | return; |
2060 | } |
2061 | if (is_optimized()) { |
2062 | accumulator->Add(" {\n// optimized frame\n" ); |
2063 | PrintFunctionSource(accumulator, *shared, code); |
2064 | accumulator->Add("}\n" ); |
2065 | return; |
2066 | } |
2067 | accumulator->Add(" {\n" ); |
2068 | |
2069 | // Compute the number of locals and expression stack elements. |
2070 | int heap_locals_count = scope_info->ContextLocalCount(); |
2071 | int expressions_count = ComputeExpressionsCount(); |
2072 | |
2073 | // Try to get hold of the context of this frame. |
2074 | Context context; |
2075 | if (this->context()->IsContext()) { |
2076 | context = Context::cast(this->context()); |
2077 | while (context->IsWithContext()) { |
2078 | context = context->previous(); |
2079 | DCHECK(!context.is_null()); |
2080 | } |
2081 | } |
2082 | |
2083 | // Print heap-allocated local variables. |
2084 | if (heap_locals_count > 0) { |
2085 | accumulator->Add(" // heap-allocated locals\n" ); |
2086 | } |
2087 | for (int i = 0; i < heap_locals_count; i++) { |
2088 | accumulator->Add(" var " ); |
2089 | accumulator->PrintName(scope_info->ContextLocalName(i)); |
2090 | accumulator->Add(" = " ); |
2091 | if (!context.is_null()) { |
2092 | int index = Context::MIN_CONTEXT_SLOTS + i; |
2093 | if (index < context->length()) { |
2094 | accumulator->Add("%o" , context->get(index)); |
2095 | } else { |
2096 | accumulator->Add( |
2097 | "// warning: missing context slot - inconsistent frame?" ); |
2098 | } |
2099 | } else { |
2100 | accumulator->Add("// warning: no context found - inconsistent frame?" ); |
2101 | } |
2102 | accumulator->Add("\n" ); |
2103 | } |
2104 | |
2105 | // Print the expression stack. |
2106 | if (0 < expressions_count) { |
2107 | accumulator->Add(" // expression stack (top to bottom)\n" ); |
2108 | } |
2109 | for (int i = expressions_count - 1; i >= 0; i--) { |
2110 | accumulator->Add(" [%02d] : %o\n" , i, GetExpression(i)); |
2111 | } |
2112 | |
2113 | PrintFunctionSource(accumulator, *shared, code); |
2114 | |
2115 | accumulator->Add("}\n\n" ); |
2116 | } |
2117 | |
2118 | |
2119 | void ArgumentsAdaptorFrame::Print(StringStream* accumulator, |
2120 | PrintMode mode, |
2121 | int index) const { |
2122 | int actual = ComputeParametersCount(); |
2123 | int expected = -1; |
2124 | JSFunction function = this->function(); |
2125 | expected = function->shared()->internal_formal_parameter_count(); |
2126 | |
2127 | PrintIndex(accumulator, mode, index); |
2128 | accumulator->Add("arguments adaptor frame: %d->%d" , actual, expected); |
2129 | if (mode == OVERVIEW) { |
2130 | accumulator->Add("\n" ); |
2131 | return; |
2132 | } |
2133 | accumulator->Add(" {\n" ); |
2134 | |
2135 | // Print actual arguments. |
2136 | if (actual > 0) accumulator->Add(" // actual arguments\n" ); |
2137 | for (int i = 0; i < actual; i++) { |
2138 | accumulator->Add(" [%02d] : %o" , i, GetParameter(i)); |
2139 | if (expected != -1 && i >= expected) { |
2140 | accumulator->Add(" // not passed to callee" ); |
2141 | } |
2142 | accumulator->Add("\n" ); |
2143 | } |
2144 | |
2145 | accumulator->Add("}\n\n" ); |
2146 | } |
2147 | |
2148 | void EntryFrame::Iterate(RootVisitor* v) const { |
2149 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); |
2150 | } |
2151 | |
2152 | void StandardFrame::IterateExpressions(RootVisitor* v) const { |
2153 | const int offset = StandardFrameConstants::kLastObjectOffset; |
2154 | FullObjectSlot base(&Memory<Address>(sp())); |
2155 | FullObjectSlot limit(&Memory<Address>(fp() + offset) + 1); |
2156 | v->VisitRootPointers(Root::kTop, nullptr, base, limit); |
2157 | } |
2158 | |
2159 | void JavaScriptFrame::Iterate(RootVisitor* v) const { |
2160 | IterateExpressions(v); |
2161 | IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); |
2162 | } |
2163 | |
2164 | void InternalFrame::Iterate(RootVisitor* v) const { |
2165 | Code code = LookupCode(); |
2166 | IteratePc(v, pc_address(), constant_pool_address(), code); |
2167 | // Internal frames typically do not receive any arguments, hence their stack |
2168 | // only contains tagged pointers. |
2169 | // We are misusing the has_tagged_params flag here to tell us whether |
2170 | // the full stack frame contains only tagged pointers or only raw values. |
2171 | // This is used for the WasmCompileLazy builtin, where we actually pass |
2172 | // untagged arguments and also store untagged values on the stack. |
2173 | if (code->has_tagged_params()) IterateExpressions(v); |
2174 | } |
2175 | |
2176 | // ------------------------------------------------------------------------- |
2177 | |
2178 | namespace { |
2179 | |
2180 | uint32_t PcAddressForHashing(Isolate* isolate, Address address) { |
2181 | if (InstructionStream::PcIsOffHeap(isolate, address)) { |
2182 | // Ensure that we get predictable hashes for addresses in embedded code. |
2183 | return EmbeddedData::FromBlob(isolate).AddressForHashing(address); |
2184 | } |
2185 | return ObjectAddressForHashing(address); |
2186 | } |
2187 | |
2188 | } // namespace |
2189 | |
2190 | InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* |
2191 | InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) { |
2192 | isolate_->counters()->pc_to_code()->Increment(); |
2193 | DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize)); |
2194 | uint32_t hash = |
2195 | ComputeUnseededHash(PcAddressForHashing(isolate_, inner_pointer)); |
2196 | uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1); |
2197 | InnerPointerToCodeCacheEntry* entry = cache(index); |
2198 | if (entry->inner_pointer == inner_pointer) { |
2199 | isolate_->counters()->pc_to_code_cached()->Increment(); |
2200 | DCHECK(entry->code == |
2201 | isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer)); |
2202 | } else { |
2203 | // Because this code may be interrupted by a profiling signal that |
2204 | // also queries the cache, we cannot update inner_pointer before the code |
2205 | // has been set. Otherwise, we risk trying to use a cache entry before |
2206 | // the code has been computed. |
2207 | entry->code = |
2208 | isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer); |
2209 | entry->safepoint_entry.Reset(); |
2210 | entry->inner_pointer = inner_pointer; |
2211 | } |
2212 | return entry; |
2213 | } |
2214 | } // namespace internal |
2215 | } // namespace v8 |
2216 | |