5 #include "src/base/platform/time.h" 13 #include <mach/mach.h> 14 #include <mach/mach_time.h> 22 #include "src/base/atomicops.h" 23 #include "src/base/lazy-instance.h" 24 #include "src/base/win32-headers.h" 26 #include "src/base/cpu.h" 27 #include "src/base/logging.h" 28 #include "src/base/platform/platform.h" 34 mach_msg_type_number_t thread_info_count = THREAD_BASIC_INFO_COUNT;
35 thread_basic_info_data_t thread_info_data;
36 kern_return_t kr = thread_info(
37 pthread_mach_thread_np(pthread_self()),
39 reinterpret_cast<thread_info_t>(&thread_info_data),
41 CHECK_EQ(kr, KERN_SUCCESS);
44 thread_info_data.user_time.seconds +
45 thread_info_data.system_time.seconds);
46 absolute_micros *= v8::base::Time::kMicrosecondsPerSecond;
47 absolute_micros += (thread_info_data.user_time.microseconds +
48 thread_info_data.system_time.microseconds);
49 return absolute_micros.ValueOrDie();
56 V8_INLINE
int64_t ClockNow(clockid_t clk_id) {
57 #if (defined(_POSIX_MONOTONIC_CLOCK) && _POSIX_MONOTONIC_CLOCK >= 0) || \ 58 defined(V8_OS_BSD) || defined(V8_OS_ANDROID) 61 #if defined(V8_OS_AIX) 63 if (clk_id == CLOCK_THREAD_CPUTIME_ID) {
64 if (thread_cputime(-1, &tc) != 0) {
70 if (clock_gettime(clk_id, &ts) != 0) {
74 result *= v8::base::Time::kMicrosecondsPerSecond;
75 #if defined(V8_OS_AIX) 76 if (clk_id == CLOCK_THREAD_CPUTIME_ID) {
77 result += (tc.stime / v8::base::Time::kNanosecondsPerMicrosecond);
79 result += (ts.tv_nsec / v8::base::Time::kNanosecondsPerMicrosecond);
82 result += (ts.tv_nsec / v8::base::Time::kNanosecondsPerMicrosecond);
84 return result.ValueOrDie();
85 #else // Monotonic clock not supported. 90 V8_INLINE
bool IsHighResolutionTimer(clockid_t clk_id) {
95 ClockNow(clk_id) + 100 * v8::base::Time::kMicrosecondsPerMillisecond;
98 start = ClockNow(clk_id);
103 delta = ClockNow(clk_id) - start;
104 }
while (delta == 0);
105 }
while (delta > 1 && start < end);
110 V8_INLINE
bool IsQPCReliable() {
113 return strcmp(cpu.vendor(),
"AuthenticAMD") == 0 && cpu.family() == 15;
117 V8_INLINE uint64_t QPCNowRaw() {
118 LARGE_INTEGER perf_counter_now = {};
122 BOOL result = ::QueryPerformanceCounter(&perf_counter_now);
125 return perf_counter_now.QuadPart;
127 #endif // V8_OS_MACOSX 135 int TimeDelta::InDays()
const {
138 return std::numeric_limits<int>::max();
140 return static_cast<int>(delta_ / Time::kMicrosecondsPerDay);
143 int TimeDelta::InHours()
const {
146 return std::numeric_limits<int>::max();
148 return static_cast<int>(delta_ / Time::kMicrosecondsPerHour);
151 int TimeDelta::InMinutes()
const {
154 return std::numeric_limits<int>::max();
156 return static_cast<int>(delta_ / Time::kMicrosecondsPerMinute);
159 double TimeDelta::InSecondsF()
const {
162 return std::numeric_limits<double>::infinity();
164 return static_cast<double>(delta_) / Time::kMicrosecondsPerSecond;
167 int64_t TimeDelta::InSeconds()
const {
170 return std::numeric_limits<int64_t>::max();
172 return delta_ / Time::kMicrosecondsPerSecond;
175 double TimeDelta::InMillisecondsF()
const {
178 return std::numeric_limits<double>::infinity();
180 return static_cast<double>(delta_) / Time::kMicrosecondsPerMillisecond;
183 int64_t TimeDelta::InMilliseconds()
const {
186 return std::numeric_limits<int64_t>::max();
188 return delta_ / Time::kMicrosecondsPerMillisecond;
191 int64_t TimeDelta::InMillisecondsRoundedUp()
const {
194 return std::numeric_limits<int64_t>::max();
196 return (delta_ + Time::kMicrosecondsPerMillisecond - 1) /
197 Time::kMicrosecondsPerMillisecond;
200 int64_t TimeDelta::InMicroseconds()
const {
203 return std::numeric_limits<int64_t>::max();
208 int64_t TimeDelta::InNanoseconds()
const {
211 return std::numeric_limits<int64_t>::max();
213 return delta_ * Time::kNanosecondsPerMicrosecond;
219 TimeDelta TimeDelta::FromMachTimespec(
struct mach_timespec ts) {
220 DCHECK_GE(ts.tv_nsec, 0);
221 DCHECK_LT(ts.tv_nsec,
222 static_cast<long>(Time::kNanosecondsPerSecond));
223 return TimeDelta(ts.tv_sec * Time::kMicrosecondsPerSecond +
224 ts.tv_nsec / Time::kNanosecondsPerMicrosecond);
228 struct mach_timespec TimeDelta::ToMachTimespec() const {
229 struct mach_timespec ts;
230 DCHECK_GE(delta_, 0);
231 ts.tv_sec =
static_cast<unsigned>(delta_ / Time::kMicrosecondsPerSecond);
232 ts.tv_nsec = (delta_ % Time::kMicrosecondsPerSecond) *
233 Time::kNanosecondsPerMicrosecond;
237 #endif // V8_OS_MACOSX 242 TimeDelta TimeDelta::FromTimespec(
struct timespec ts) {
243 DCHECK_GE(ts.tv_nsec, 0);
244 DCHECK_LT(ts.tv_nsec,
245 static_cast<long>(Time::kNanosecondsPerSecond));
246 return TimeDelta(ts.tv_sec * Time::kMicrosecondsPerSecond +
247 ts.tv_nsec / Time::kNanosecondsPerMicrosecond);
251 struct timespec TimeDelta::ToTimespec() const {
253 ts.tv_sec =
static_cast<time_t
>(delta_ / Time::kMicrosecondsPerSecond);
254 ts.tv_nsec = (delta_ % Time::kMicrosecondsPerSecond) *
255 Time::kNanosecondsPerMicrosecond;
259 #endif // V8_OS_POSIX 269 Clock() : initial_ticks_(GetSystemTicks()), initial_time_(GetSystemTime()) {}
273 const TimeDelta kMaxElapsedTime = TimeDelta::FromMinutes(1);
275 MutexGuard lock_guard(&mutex_);
278 TimeTicks ticks = GetSystemTicks();
279 Time time = GetSystemTime();
283 TimeDelta elapsed = ticks - initial_ticks_;
284 if (time < initial_time_ || elapsed > kMaxElapsedTime) {
285 initial_ticks_ = ticks;
286 initial_time_ = time;
290 return initial_time_ + elapsed;
293 Time NowFromSystemTime() {
294 MutexGuard lock_guard(&mutex_);
295 initial_ticks_ = GetSystemTicks();
296 initial_time_ = GetSystemTime();
297 return initial_time_;
301 static TimeTicks GetSystemTicks() {
302 return TimeTicks::Now();
305 static Time GetSystemTime() {
307 ::GetSystemTimeAsFileTime(&ft);
308 return Time::FromFiletime(ft);
311 TimeTicks initial_ticks_;
317 static LazyStaticInstance<Clock, DefaultConstructTrait<Clock>,
318 ThreadSafeInitOnceTrait>::type clock =
319 LAZY_STATIC_INSTANCE_INITIALIZER;
323 return clock.Pointer()->Now();
327 Time Time::NowFromSystemTime() {
328 return clock.Pointer()->NowFromSystemTime();
333 static const int64_t kTimeToEpochInMicroseconds =
int64_t{11644473600000000};
335 Time Time::FromFiletime(FILETIME ft) {
336 if (ft.dwLowDateTime == 0 && ft.dwHighDateTime == 0) {
339 if (ft.dwLowDateTime == std::numeric_limits<DWORD>::max() &&
340 ft.dwHighDateTime == std::numeric_limits<DWORD>::max()) {
343 int64_t us = (
static_cast<uint64_t
>(ft.dwLowDateTime) +
344 (static_cast<uint64_t>(ft.dwHighDateTime) << 32)) / 10;
345 return Time(us - kTimeToEpochInMicroseconds);
349 FILETIME Time::ToFiletime()
const {
353 ft.dwLowDateTime = 0;
354 ft.dwHighDateTime = 0;
358 ft.dwLowDateTime = std::numeric_limits<DWORD>::max();
359 ft.dwHighDateTime = std::numeric_limits<DWORD>::max();
362 uint64_t us =
static_cast<uint64_t
>(us_ + kTimeToEpochInMicroseconds) * 10;
363 ft.dwLowDateTime =
static_cast<DWORD
>(us);
364 ft.dwHighDateTime =
static_cast<DWORD
>(us >> 32);
372 int result = gettimeofday(&tv,
nullptr);
373 DCHECK_EQ(0, result);
375 return FromTimeval(tv);
379 Time Time::NowFromSystemTime() {
384 Time Time::FromTimespec(
struct timespec ts) {
385 DCHECK_GE(ts.tv_nsec, 0);
386 DCHECK_LT(ts.tv_nsec, kNanosecondsPerSecond);
387 if (ts.tv_nsec == 0 && ts.tv_sec == 0) {
390 if (ts.tv_nsec == static_cast<long>(kNanosecondsPerSecond - 1) &&
391 ts.tv_sec == std::numeric_limits<time_t>::max()) {
394 return Time(ts.tv_sec * kMicrosecondsPerSecond +
395 ts.tv_nsec / kNanosecondsPerMicrosecond);
399 struct timespec Time::ToTimespec() const {
407 ts.tv_sec = std::numeric_limits<time_t>::max();
408 ts.tv_nsec =
static_cast<long>(kNanosecondsPerSecond - 1);
411 ts.tv_sec =
static_cast<time_t
>(us_ / kMicrosecondsPerSecond);
412 ts.tv_nsec = (us_ % kMicrosecondsPerSecond) * kNanosecondsPerMicrosecond;
417 Time Time::FromTimeval(
struct timeval tv) {
418 DCHECK_GE(tv.tv_usec, 0);
419 DCHECK(tv.tv_usec < static_cast<suseconds_t>(kMicrosecondsPerSecond));
420 if (tv.tv_usec == 0 && tv.tv_sec == 0) {
423 if (tv.tv_usec == static_cast<suseconds_t>(kMicrosecondsPerSecond - 1) &&
424 tv.tv_sec == std::numeric_limits<time_t>::max()) {
427 return Time(tv.tv_sec * kMicrosecondsPerSecond + tv.tv_usec);
431 struct timeval Time::ToTimeval() const {
439 tv.tv_sec = std::numeric_limits<time_t>::max();
440 tv.tv_usec =
static_cast<suseconds_t
>(kMicrosecondsPerSecond - 1);
443 tv.tv_sec =
static_cast<time_t
>(us_ / kMicrosecondsPerSecond);
444 tv.tv_usec = us_ % kMicrosecondsPerSecond;
451 TimeTicks TimeTicks::HighResolutionNow() {
457 return TimeTicks::Now();
460 Time Time::FromJsTime(
double ms_since_epoch) {
463 if (ms_since_epoch == std::numeric_limits<double>::max()) {
467 static_cast<int64_t>(ms_since_epoch * kMicrosecondsPerMillisecond));
471 double Time::ToJsTime()
const {
478 return std::numeric_limits<double>::max();
480 return static_cast<double>(us_) / kMicrosecondsPerMillisecond;
484 std::ostream& operator<<(std::ostream& os,
const Time& time) {
485 return os << time.ToJsTime();
496 DWORD timeGetTimeWrapper() {
return timeGetTime(); }
498 DWORD (*g_tick_function)(void) = &timeGetTimeWrapper;
502 union LastTimeAndRolloversState {
504 base::Atomic32 as_opaque_32;
520 base::Atomic32 g_last_time_and_rollovers = 0;
521 static_assert(
sizeof(LastTimeAndRolloversState) <=
522 sizeof(g_last_time_and_rollovers),
523 "LastTimeAndRolloversState does not fit in a single atomic word");
530 TimeTicks RolloverProtectedNow() {
531 LastTimeAndRolloversState state;
539 int32_t original = base::Acquire_Load(&g_last_time_and_rollovers);
540 state.as_opaque_32 = original;
541 now = g_tick_function();
542 uint8_t now_8 =
static_cast<uint8_t
>(now >> 24);
543 if (now_8 < state.as_values.last_8) ++state.as_values.rollovers;
544 state.as_values.last_8 = now_8;
547 if (state.as_opaque_32 == original)
break;
551 int32_t check = base::Release_CompareAndSwap(&g_last_time_and_rollovers,
552 original, state.as_opaque_32);
553 if (check == original)
break;
559 TimeDelta::FromMilliseconds(
560 now + (static_cast<uint64_t>(state.as_values.rollovers) << 32));
598 TimeTicks InitialTimeTicksNowFunction();
602 using TimeTicksNowFunction = decltype(&TimeTicks::Now);
603 TimeTicksNowFunction g_time_ticks_now_function = &InitialTimeTicksNowFunction;
604 int64_t g_qpc_ticks_per_second = 0;
609 #define ATOMIC_THREAD_FENCE(memory_order) _ReadWriteBarrier(); 611 TimeDelta QPCValueToTimeDelta(LONGLONG qpc_value) {
614 ATOMIC_THREAD_FENCE(memory_order_acquire);
616 DCHECK_GT(g_qpc_ticks_per_second, 0);
620 if (qpc_value < TimeTicks::kQPCOverflowThreshold) {
621 return TimeDelta::FromMicroseconds(
622 qpc_value * TimeTicks::kMicrosecondsPerSecond / g_qpc_ticks_per_second);
626 int64_t whole_seconds = qpc_value / g_qpc_ticks_per_second;
627 int64_t leftover_ticks = qpc_value - (whole_seconds * g_qpc_ticks_per_second);
628 return TimeDelta::FromMicroseconds(
629 (whole_seconds * TimeTicks::kMicrosecondsPerSecond) +
630 ((leftover_ticks * TimeTicks::kMicrosecondsPerSecond) /
631 g_qpc_ticks_per_second));
634 TimeTicks QPCNow() {
return TimeTicks() + QPCValueToTimeDelta(QPCNowRaw()); }
636 bool IsBuggyAthlon(
const CPU& cpu) {
638 return strcmp(cpu.vendor(),
"AuthenticAMD") == 0 && cpu.family() == 15;
641 void InitializeTimeTicksNowFunctionPointer() {
642 LARGE_INTEGER ticks_per_sec = {};
643 if (!QueryPerformanceFrequency(&ticks_per_sec)) ticks_per_sec.QuadPart = 0;
656 TimeTicksNowFunction now_function;
658 if (ticks_per_sec.QuadPart <= 0 || !cpu.has_non_stop_time_stamp_counter() ||
659 IsBuggyAthlon(cpu)) {
660 now_function = &RolloverProtectedNow;
662 now_function = &QPCNow;
674 g_qpc_ticks_per_second = ticks_per_sec.QuadPart;
675 ATOMIC_THREAD_FENCE(memory_order_release);
676 g_time_ticks_now_function = now_function;
679 TimeTicks InitialTimeTicksNowFunction() {
680 InitializeTimeTicksNowFunctionPointer();
681 return g_time_ticks_now_function();
684 #undef ATOMIC_THREAD_FENCE 689 TimeTicks TimeTicks::Now() {
691 TimeTicks ticks(g_time_ticks_now_function());
692 DCHECK(!ticks.IsNull());
697 bool TimeTicks::IsHighResolution() {
698 if (g_time_ticks_now_function == &InitialTimeTicksNowFunction)
699 InitializeTimeTicksNowFunctionPointer();
700 return g_time_ticks_now_function == &QPCNow;
705 TimeTicks TimeTicks::Now() {
708 static struct mach_timebase_info info;
709 if (info.denom == 0) {
710 kern_return_t result = mach_timebase_info(&info);
711 DCHECK_EQ(KERN_SUCCESS, result);
714 ticks = (mach_absolute_time() / Time::kNanosecondsPerMicrosecond *
715 info.numer / info.denom);
717 ticks = (gethrtime() / Time::kNanosecondsPerMicrosecond);
719 ticks = ClockNow(CLOCK_MONOTONIC);
721 #error platform does not implement TimeTicks::HighResolutionNow. 722 #endif // V8_OS_MACOSX 724 return TimeTicks(ticks + 1);
728 bool TimeTicks::IsHighResolution() {
732 static bool is_high_resolution = IsHighResolutionTimer(CLOCK_MONOTONIC);
733 return is_high_resolution;
742 bool ThreadTicks::IsSupported() {
743 #if (defined(_POSIX_THREAD_CPUTIME) && (_POSIX_THREAD_CPUTIME >= 0)) || \ 744 defined(V8_OS_MACOSX) || defined(V8_OS_ANDROID) || defined(V8_OS_SOLARIS) 746 #elif defined(V8_OS_WIN) 747 return IsSupportedWin();
754 ThreadTicks ThreadTicks::Now() {
756 return ThreadTicks(ComputeThreadTicks());
757 #elif(defined(_POSIX_THREAD_CPUTIME) && (_POSIX_THREAD_CPUTIME >= 0)) || \ 758 defined(V8_OS_ANDROID) 759 return ThreadTicks(ClockNow(CLOCK_THREAD_CPUTIME_ID));
761 return ThreadTicks(gethrvtime() / Time::kNanosecondsPerMicrosecond);
763 return ThreadTicks::GetForThread(::GetCurrentThread());
771 ThreadTicks ThreadTicks::GetForThread(
const HANDLE& thread_handle) {
772 DCHECK(IsSupported());
775 ULONG64 thread_cycle_time = 0;
776 ::QueryThreadCycleTime(thread_handle, &thread_cycle_time);
779 double tsc_ticks_per_second = TSCTicksPerSecond();
780 if (tsc_ticks_per_second == 0)
781 return ThreadTicks();
784 double thread_time_seconds = thread_cycle_time / tsc_ticks_per_second;
786 static_cast<int64_t>(thread_time_seconds * Time::kMicrosecondsPerSecond));
790 bool ThreadTicks::IsSupportedWin() {
791 static bool is_supported = base::CPU().has_non_stop_time_stamp_counter() &&
797 void ThreadTicks::WaitUntilInitializedWin() {
798 while (TSCTicksPerSecond() == 0)
802 #ifdef V8_HOST_ARCH_ARM64 803 #define ReadCycleCounter() _ReadStatusReg(ARM64_PMCCNTR_EL0) 805 #define ReadCycleCounter() __rdtsc() 808 double ThreadTicks::TSCTicksPerSecond() {
809 DCHECK(IsSupported());
817 static double tsc_ticks_per_second = 0;
818 if (tsc_ticks_per_second != 0)
819 return tsc_ticks_per_second;
823 int previous_priority = ::GetThreadPriority(::GetCurrentThread());
824 ::SetThreadPriority(::GetCurrentThread(), THREAD_PRIORITY_HIGHEST);
828 static const uint64_t tsc_initial = ReadCycleCounter();
829 static const uint64_t perf_counter_initial = QPCNowRaw();
833 uint64_t tsc_now = ReadCycleCounter();
834 uint64_t perf_counter_now = QPCNowRaw();
837 ::SetThreadPriority(::GetCurrentThread(), previous_priority);
848 LARGE_INTEGER perf_counter_frequency = {};
849 ::QueryPerformanceFrequency(&perf_counter_frequency);
850 DCHECK_GE(perf_counter_now, perf_counter_initial);
851 uint64_t perf_counter_ticks = perf_counter_now - perf_counter_initial;
852 double elapsed_time_seconds =
853 perf_counter_ticks /
static_cast<double>(perf_counter_frequency.QuadPart);
855 const double kMinimumEvaluationPeriodSeconds = 0.05;
856 if (elapsed_time_seconds < kMinimumEvaluationPeriodSeconds)
860 DCHECK_GE(tsc_now, tsc_initial);
861 uint64_t tsc_ticks = tsc_now - tsc_initial;
862 tsc_ticks_per_second = tsc_ticks / elapsed_time_seconds;
864 return tsc_ticks_per_second;
866 #undef ReadCycleCounter