5 #include "src/libsampler/sampler.h" 7 #if V8_OS_POSIX && !V8_OS_CYGWIN && !V8_OS_FUCHSIA 17 #if !V8_OS_QNX && !V8_OS_AIX 18 #include <sys/syscall.h> 22 #include <mach/mach.h> 31 #elif V8_OS_WIN || V8_OS_CYGWIN 33 #include "src/base/win32-headers.h" 37 #include <zircon/process.h> 38 #include <zircon/syscalls.h> 39 #include <zircon/syscalls/debug.h> 40 #include <zircon/types.h> 43 #if defined(ZX_THREAD_STATE_REGSET0) 44 #define ZX_THREAD_STATE_GENERAL_REGS ZX_THREAD_STATE_REGSET0 45 zx_status_t zx_thread_read_state(zx_handle_t h,
uint32_t k,
void* b,
size_t l) {
47 return zx_thread_read_state(h, k, b, static_cast<uint32_t>(l),
50 #if defined(__x86_64__) 51 typedef zx_x86_64_general_regs_t zx_thread_state_general_regs_t;
53 typedef zx_arm64_general_regs_t zx_thread_state_general_regs_t;
55 #endif // !defined(ZX_THREAD_STATE_GENERAL_REGS) 63 #include "src/base/atomic-utils.h" 64 #include "src/base/hashmap.h" 65 #include "src/base/platform/platform.h" 67 #if V8_OS_ANDROID && !defined(__BIONIC_HAVE_UCONTEXT_T) 78 typedef struct sigcontext mcontext_t;
80 typedef struct ucontext {
82 struct ucontext* uc_link;
84 mcontext_t uc_mcontext;
88 #elif defined(__aarch64__) 90 typedef struct sigcontext mcontext_t;
92 typedef struct ucontext {
94 struct ucontext *uc_link;
96 mcontext_t uc_mcontext;
100 #elif defined(__mips__) 123 typedef struct ucontext {
125 struct ucontext* uc_link;
127 mcontext_t uc_mcontext;
131 #elif defined(__i386__) 140 typedef uint32_t kernel_sigset_t[2];
141 typedef struct ucontext {
143 struct ucontext* uc_link;
145 mcontext_t uc_mcontext;
148 enum { REG_EBP = 6, REG_ESP = 7, REG_EIP = 14 };
150 #elif defined(__x86_64__) 155 uint64_t __reserved1[8];
158 typedef struct ucontext {
160 struct ucontext *uc_link;
162 mcontext_t uc_mcontext;
165 enum { REG_RBP = 10, REG_RSP = 15, REG_RIP = 16 };
168 #endif // V8_OS_ANDROID && !defined(__BIONIC_HAVE_UCONTEXT_T) 176 #if defined(USE_SIGNALS) 177 typedef std::vector<Sampler*> SamplerList;
178 typedef SamplerList::iterator SamplerListIterator;
179 typedef std::atomic_bool AtomicMutex;
183 explicit AtomicGuard(AtomicMutex* atomic,
bool is_blocking =
true)
184 : atomic_(atomic), is_success_(false) {
186 bool expected =
false;
187 is_success_ = atomic->compare_exchange_weak(expected,
true);
188 }
while (is_blocking && !is_success_);
191 bool is_success()
const {
return is_success_; }
194 if (!is_success_)
return;
195 atomic_->store(
false);
199 AtomicMutex*
const atomic_;
204 void* ThreadKey(pthread_t thread_id) {
205 return reinterpret_cast<void*
>(thread_id);
209 uint32_t ThreadHash(pthread_t thread_id) {
211 return static_cast<uint32_t>(
reinterpret_cast<intptr_t
>(thread_id));
213 return static_cast<uint32_t>(thread_id);
217 #endif // USE_SIGNALS 221 #if defined(USE_SIGNALS) 223 class Sampler::PlatformData {
225 PlatformData() : vm_tid_(pthread_self()) {}
226 pthread_t vm_tid()
const {
return vm_tid_; }
232 class SamplerManager {
234 SamplerManager() : sampler_map_() {}
236 void AddSampler(Sampler* sampler) {
237 AtomicGuard atomic_guard(&samplers_access_counter_);
238 DCHECK(sampler->IsActive() || !sampler->IsRegistered());
240 pthread_t thread_id = sampler->platform_data()->vm_tid();
241 base::HashMap::Entry* entry =
242 sampler_map_.LookupOrInsert(ThreadKey(thread_id),
243 ThreadHash(thread_id));
244 DCHECK_NOT_NULL(entry);
245 if (entry->value ==
nullptr) {
246 SamplerList* samplers =
new SamplerList();
247 samplers->push_back(sampler);
248 entry->value = samplers;
250 SamplerList* samplers =
reinterpret_cast<SamplerList*
>(entry->value);
252 for (SamplerListIterator iter = samplers->begin();
253 iter != samplers->end(); ++iter) {
254 if (*iter == sampler) {
260 samplers->push_back(sampler);
265 void RemoveSampler(Sampler* sampler) {
266 AtomicGuard atomic_guard(&samplers_access_counter_);
267 DCHECK(sampler->IsActive() || sampler->IsRegistered());
269 pthread_t thread_id = sampler->platform_data()->vm_tid();
270 void* thread_key = ThreadKey(thread_id);
271 uint32_t thread_hash = ThreadHash(thread_id);
272 base::HashMap::Entry* entry = sampler_map_.Lookup(thread_key, thread_hash);
273 DCHECK_NOT_NULL(entry);
274 SamplerList* samplers =
reinterpret_cast<SamplerList*
>(entry->value);
275 for (SamplerListIterator iter = samplers->begin(); iter != samplers->end();
277 if (*iter == sampler) {
278 samplers->erase(iter);
282 if (samplers->empty()) {
283 sampler_map_.Remove(thread_key, thread_hash);
288 #if defined(USE_SIGNALS) 290 AtomicGuard atomic_guard(&SamplerManager::samplers_access_counter_,
false);
291 if (!atomic_guard.is_success())
return;
292 pthread_t thread_id = pthread_self();
293 base::HashMap::Entry* entry =
294 sampler_map_.Lookup(ThreadKey(thread_id), ThreadHash(thread_id));
296 SamplerList& samplers = *
static_cast<SamplerList*
>(entry->value);
298 for (
size_t i = 0;
i < samplers.size(); ++
i) {
299 Sampler* sampler = samplers[
i];
300 Isolate* isolate = sampler->isolate();
302 if (isolate ==
nullptr || !isolate->IsInUse())
continue;
303 if (v8::Locker::IsActive() && !Locker::IsLocked(isolate))
continue;
304 sampler->SampleStack(state);
309 static SamplerManager* instance() {
return instance_.Pointer(); }
312 base::HashMap sampler_map_;
313 static AtomicMutex samplers_access_counter_;
314 static base::LazyInstance<SamplerManager>::type instance_;
317 AtomicMutex SamplerManager::samplers_access_counter_;
318 base::LazyInstance<SamplerManager>::type SamplerManager::instance_ =
319 LAZY_INSTANCE_INITIALIZER;
321 #elif V8_OS_WIN || V8_OS_CYGWIN 327 class Sampler::PlatformData {
335 : profiled_thread_(OpenThread(THREAD_GET_CONTEXT |
336 THREAD_SUSPEND_RESUME |
337 THREAD_QUERY_INFORMATION,
339 GetCurrentThreadId())) {}
342 if (profiled_thread_ !=
nullptr) {
343 CloseHandle(profiled_thread_);
344 profiled_thread_ =
nullptr;
348 HANDLE profiled_thread() {
return profiled_thread_; }
351 HANDLE profiled_thread_;
356 class Sampler::PlatformData {
359 zx_handle_duplicate(zx_thread_self(), ZX_RIGHT_SAME_RIGHTS,
363 if (profiled_thread_ != ZX_HANDLE_INVALID) {
364 zx_handle_close(profiled_thread_);
365 profiled_thread_ = ZX_HANDLE_INVALID;
369 zx_handle_t profiled_thread() {
return profiled_thread_; }
372 zx_handle_t profiled_thread_ = ZX_HANDLE_INVALID;
375 #endif // USE_SIGNALS 378 #if defined(USE_SIGNALS) 379 class SignalHandler {
381 static void SetUp() {
if (!mutex_) mutex_ =
new base::Mutex(); }
382 static void TearDown() {
387 static void IncreaseSamplerCount() {
388 base::MutexGuard lock_guard(mutex_);
389 if (++client_count_ == 1) Install();
392 static void DecreaseSamplerCount() {
393 base::MutexGuard lock_guard(mutex_);
394 if (--client_count_ == 0) Restore();
397 static bool Installed() {
398 base::MutexGuard lock_guard(mutex_);
399 return signal_handler_installed_;
403 static void Install() {
405 sa.sa_sigaction = &HandleProfilerSignal;
406 sigemptyset(&sa.sa_mask);
408 sa.sa_flags = SA_SIGINFO;
410 sa.sa_flags = SA_RESTART | SA_SIGINFO;
412 signal_handler_installed_ =
413 (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
416 static void Restore() {
417 if (signal_handler_installed_) {
418 sigaction(SIGPROF, &old_signal_handler_,
nullptr);
419 signal_handler_installed_ =
false;
423 static void FillRegisterState(
void* context, RegisterState* regs);
424 static void HandleProfilerSignal(
int signal, siginfo_t* info,
void* context);
427 static base::Mutex* mutex_;
428 static int client_count_;
429 static bool signal_handler_installed_;
430 static struct sigaction old_signal_handler_;
433 base::Mutex* SignalHandler::mutex_ =
nullptr;
434 int SignalHandler::client_count_ = 0;
435 struct sigaction SignalHandler::old_signal_handler_;
436 bool SignalHandler::signal_handler_installed_ = false;
439 void SignalHandler::HandleProfilerSignal(int signal, siginfo_t* info,
442 if (signal != SIGPROF)
return;
444 FillRegisterState(context, &state);
445 SamplerManager::instance()->DoSample(state);
448 void SignalHandler::FillRegisterState(
void* context, RegisterState* state) {
450 ucontext_t* ucontext =
reinterpret_cast<ucontext_t*
>(context);
451 #if !(V8_OS_OPENBSD || (V8_OS_LINUX && (V8_HOST_ARCH_PPC || V8_HOST_ARCH_S390))) 452 mcontext_t& mcontext = ucontext->uc_mcontext;
455 #if V8_HOST_ARCH_IA32 456 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_EIP]);
457 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_ESP]);
458 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_EBP]);
459 #elif V8_HOST_ARCH_X64 460 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_RIP]);
461 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_RSP]);
462 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_RBP]);
463 #elif V8_HOST_ARCH_ARM 464 #if V8_LIBC_GLIBC && !V8_GLIBC_PREREQ(2, 4) 467 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[R15]);
468 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[R13]);
469 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[R11]);
471 state->pc =
reinterpret_cast<void*
>(mcontext.arm_pc);
472 state->sp =
reinterpret_cast<void*
>(mcontext.arm_sp);
473 state->fp =
reinterpret_cast<void*
>(mcontext.arm_fp);
474 #endif // V8_LIBC_GLIBC && !V8_GLIBC_PREREQ(2, 4) 475 #elif V8_HOST_ARCH_ARM64 476 state->pc =
reinterpret_cast<void*
>(mcontext.pc);
477 state->sp =
reinterpret_cast<void*
>(mcontext.sp);
479 state->fp =
reinterpret_cast<void*
>(mcontext.regs[29]);
480 #elif V8_HOST_ARCH_MIPS 481 state->pc =
reinterpret_cast<void*
>(mcontext.pc);
482 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[29]);
483 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[30]);
484 #elif V8_HOST_ARCH_MIPS64 485 state->pc =
reinterpret_cast<void*
>(mcontext.pc);
486 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[29]);
487 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[30]);
488 #elif V8_HOST_ARCH_PPC 490 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->nip);
492 reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->gpr[PT_R1]);
494 reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->gpr[PT_R31]);
497 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[32]);
498 state->sp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[1]);
499 state->fp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[31]);
501 #elif V8_HOST_ARCH_S390 502 #if V8_TARGET_ARCH_32_BIT 506 reinterpret_cast<void*
>(ucontext->uc_mcontext.psw.addr & 0x7FFFFFFF);
508 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.psw.addr);
509 #endif // V8_TARGET_ARCH_32_BIT 510 state->sp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gregs[15]);
511 state->fp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gregs[11]);
512 #endif // V8_HOST_ARCH_* 516 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__rip);
517 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__rsp);
518 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__rbp);
519 #else // !__DARWIN_UNIX03 520 state->pc =
reinterpret_cast<void*
>(mcontext->ss.rip);
521 state->sp =
reinterpret_cast<void*
>(mcontext->ss.rsp);
522 state->fp =
reinterpret_cast<void*
>(mcontext->ss.rbp);
523 #endif // __DARWIN_UNIX03 524 #elif V8_HOST_ARCH_IA32 526 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__eip);
527 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__esp);
528 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__ebp);
529 #else // !__DARWIN_UNIX03 530 state->pc =
reinterpret_cast<void*
>(mcontext->ss.eip);
531 state->sp =
reinterpret_cast<void*
>(mcontext->ss.esp);
532 state->fp =
reinterpret_cast<void*
>(mcontext->ss.ebp);
533 #endif // __DARWIN_UNIX03 534 #endif // V8_HOST_ARCH_IA32 536 #if V8_HOST_ARCH_IA32 537 state->pc =
reinterpret_cast<void*
>(mcontext.mc_eip);
538 state->sp =
reinterpret_cast<void*
>(mcontext.mc_esp);
539 state->fp =
reinterpret_cast<void*
>(mcontext.mc_ebp);
540 #elif V8_HOST_ARCH_X64 541 state->pc =
reinterpret_cast<void*
>(mcontext.mc_rip);
542 state->sp =
reinterpret_cast<void*
>(mcontext.mc_rsp);
543 state->fp =
reinterpret_cast<void*
>(mcontext.mc_rbp);
544 #elif V8_HOST_ARCH_ARM 545 state->pc =
reinterpret_cast<void*
>(mcontext.mc_r15);
546 state->sp =
reinterpret_cast<void*
>(mcontext.mc_r13);
547 state->fp =
reinterpret_cast<void*
>(mcontext.mc_r11);
548 #endif // V8_HOST_ARCH_* 550 #if V8_HOST_ARCH_IA32 551 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_EIP]);
552 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_ESP]);
553 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_EBP]);
554 #elif V8_HOST_ARCH_X64 555 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RIP]);
556 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RSP]);
557 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RBP]);
558 #endif // V8_HOST_ARCH_* 560 #if V8_HOST_ARCH_IA32 561 state->pc =
reinterpret_cast<void*
>(ucontext->sc_eip);
562 state->sp =
reinterpret_cast<void*
>(ucontext->sc_esp);
563 state->fp =
reinterpret_cast<void*
>(ucontext->sc_ebp);
564 #elif V8_HOST_ARCH_X64 565 state->pc =
reinterpret_cast<void*
>(ucontext->sc_rip);
566 state->sp =
reinterpret_cast<void*
>(ucontext->sc_rsp);
567 state->fp =
reinterpret_cast<void*
>(ucontext->sc_rbp);
568 #endif // V8_HOST_ARCH_* 570 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_PC]);
571 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_SP]);
572 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_FP]);
574 #if V8_HOST_ARCH_IA32 575 state->pc =
reinterpret_cast<void*
>(mcontext.cpu.eip);
576 state->sp =
reinterpret_cast<void*
>(mcontext.cpu.esp);
577 state->fp =
reinterpret_cast<void*
>(mcontext.cpu.ebp);
578 #elif V8_HOST_ARCH_ARM 579 state->pc =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_PC]);
580 state->sp =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_SP]);
581 state->fp =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_FP]);
582 #endif // V8_HOST_ARCH_* 584 state->pc =
reinterpret_cast<void*
>(mcontext.jmp_context.iar);
585 state->sp =
reinterpret_cast<void*
>(mcontext.jmp_context.gpr[1]);
586 state->fp =
reinterpret_cast<void*
>(mcontext.jmp_context.gpr[31]);
590 #endif // USE_SIGNALS 593 void Sampler::SetUp() {
594 #if defined(USE_SIGNALS) 595 SignalHandler::SetUp();
600 void Sampler::TearDown() {
601 #if defined(USE_SIGNALS) 602 SignalHandler::TearDown();
606 Sampler::Sampler(Isolate* isolate)
607 : is_counting_samples_(false),
609 external_sample_count_(0),
612 has_processing_thread_(false),
615 data_ =
new PlatformData;
618 void Sampler::UnregisterIfRegistered() {
619 #if defined(USE_SIGNALS) 620 if (IsRegistered()) {
621 SamplerManager::instance()->RemoveSampler(
this);
622 SetRegistered(
false);
627 Sampler::~Sampler() {
629 DCHECK(!IsRegistered());
633 void Sampler::Start() {
636 #if defined(USE_SIGNALS) 637 SamplerManager::instance()->AddSampler(
this);
642 void Sampler::Stop() {
643 #if defined(USE_SIGNALS) 644 SamplerManager::instance()->RemoveSampler(
this);
648 SetRegistered(
false);
652 void Sampler::IncreaseProfilingDepth() {
653 base::Relaxed_AtomicIncrement(&profiling_, 1);
654 #if defined(USE_SIGNALS) 655 SignalHandler::IncreaseSamplerCount();
660 void Sampler::DecreaseProfilingDepth() {
661 #if defined(USE_SIGNALS) 662 SignalHandler::DecreaseSamplerCount();
664 base::Relaxed_AtomicIncrement(&profiling_, -1);
668 #if defined(USE_SIGNALS) 670 void Sampler::DoSample() {
671 if (!SignalHandler::Installed())
return;
672 if (!IsActive() && !IsRegistered()) {
673 SamplerManager::instance()->AddSampler(
this);
676 pthread_kill(platform_data()->vm_tid(), SIGPROF);
679 #elif V8_OS_WIN || V8_OS_CYGWIN 681 void Sampler::DoSample() {
682 HANDLE profiled_thread = platform_data()->profiled_thread();
683 if (profiled_thread ==
nullptr)
return;
685 const DWORD kSuspendFailed =
static_cast<DWORD
>(-1);
686 if (SuspendThread(profiled_thread) == kSuspendFailed)
return;
690 memset(&context, 0,
sizeof(context));
691 context.ContextFlags = CONTEXT_FULL;
692 if (GetThreadContext(profiled_thread, &context) != 0) {
695 state.pc =
reinterpret_cast<void*
>(context.Rip);
696 state.sp =
reinterpret_cast<void*
>(context.Rsp);
697 state.fp =
reinterpret_cast<void*
>(context.Rbp);
698 #elif V8_HOST_ARCH_ARM64 699 state.pc =
reinterpret_cast<void*
>(context.Pc);
700 state.sp =
reinterpret_cast<void*
>(context.Sp);
701 state.fp =
reinterpret_cast<void*
>(context.Fp);
703 state.pc =
reinterpret_cast<void*
>(context.Eip);
704 state.sp =
reinterpret_cast<void*
>(context.Esp);
705 state.fp =
reinterpret_cast<void*
>(context.Ebp);
709 ResumeThread(profiled_thread);
714 void Sampler::DoSample() {
715 zx_handle_t profiled_thread = platform_data()->profiled_thread();
716 if (profiled_thread == ZX_HANDLE_INVALID)
return;
718 zx_handle_t suspend_token = ZX_HANDLE_INVALID;
719 if (zx_task_suspend_token(profiled_thread, &suspend_token) != ZX_OK)
return;
726 zx_signals_t signals = 0;
727 zx_status_t suspended = zx_object_wait_one(
728 profiled_thread, ZX_THREAD_SUSPENDED | ZX_THREAD_TERMINATED,
729 zx_deadline_after(ZX_MSEC(100)), &signals);
730 if (suspended != ZX_OK || (signals & ZX_THREAD_SUSPENDED) == 0) {
731 zx_handle_close(suspend_token);
736 zx_thread_state_general_regs_t thread_state = {};
737 if (zx_thread_read_state(profiled_thread, ZX_THREAD_STATE_GENERAL_REGS,
738 &thread_state,
sizeof(thread_state)) == ZX_OK) {
741 state.pc =
reinterpret_cast<void*
>(thread_state.rip);
742 state.sp =
reinterpret_cast<void*
>(thread_state.rsp);
743 state.fp =
reinterpret_cast<void*
>(thread_state.rbp);
744 #elif V8_HOST_ARCH_ARM64 745 state.pc =
reinterpret_cast<void*
>(thread_state.pc);
746 state.sp =
reinterpret_cast<void*
>(thread_state.sp);
747 state.fp =
reinterpret_cast<void*
>(thread_state.r[29]);
752 zx_handle_close(suspend_token);
756 #if defined(ZX_THREAD_STATE_REGSET0) 757 #undef ZX_THREAD_STATE_GENERAL_REGS 760 #endif // USE_SIGNALS