21#ifndef BENCHMARK_CYCLECLOCK_H_
22#define BENCHMARK_CYCLECLOCK_H_
26#include "benchmark/benchmark.h"
27#include "internal_macros.h"
29#if defined(BENCHMARK_OS_MACOSX)
30#include <mach/mach_time.h>
39#if defined(COMPILER_MSVC) && !defined(_M_IX86) && !defined(_M_ARM64)
40extern "C" uint64_t __rdtsc();
41#pragma intrinsic(__rdtsc)
44#if !defined(BENCHMARK_OS_WINDOWS) || defined(BENCHMARK_OS_MINGW)
49#ifdef BENCHMARK_OS_EMSCRIPTEN
50#include <emscripten.h>
61inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
62#if defined(BENCHMARK_OS_MACOSX)
72 return mach_absolute_time();
73#elif defined(BENCHMARK_OS_EMSCRIPTEN)
76 return static_cast<int64_t
>(emscripten_get_now() * 1e+6);
77#elif defined(__i386__)
79 __asm__
volatile(
"rdtsc" :
"=A"(ret));
81#elif defined(__x86_64__) || defined(__amd64__)
83 __asm__
volatile(
"rdtsc" :
"=a"(low),
"=d"(high));
84 return (high << 32) | low;
85#elif defined(__powerpc__) || defined(__ppc__)
87#if defined(__powerpc64__) || defined(__ppc64__)
89 asm volatile(
"mfspr %0, 268" :
"=r"(tb));
92 uint32_t tbl, tbu0, tbu1;
97 :
"=r"(tbu0),
"=r"(tbl),
"=r"(tbu1));
98 tbl &= -
static_cast<int32_t
>(tbu0 == tbu1);
100 return (
static_cast<uint64_t
>(tbu1) << 32) | tbl;
102#elif defined(__sparc__)
104 asm(
".byte 0x83, 0x41, 0x00, 0x00");
105 asm(
"mov %%g1, %0" :
"=r"(tick));
107#elif defined(__ia64__)
109 asm(
"mov %0 = ar.itc" :
"=r"(itc));
111#elif defined(COMPILER_MSVC) && defined(_M_IX86)
117#elif defined(COMPILER_MSVC) && defined(_M_ARM64)
120 int64_t virtual_timer_value;
121 virtual_timer_value = _ReadStatusReg(ARM64_CNTVCT);
122 return virtual_timer_value;
123#elif defined(COMPILER_MSVC)
125#elif defined(BENCHMARK_OS_NACL)
138 struct timespec ts = {0, 0};
139 clock_gettime(CLOCK_MONOTONIC, &ts);
140 return static_cast<int64_t
>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
141#elif defined(__aarch64__)
146 int64_t virtual_timer_value;
147 asm volatile(
"mrs %0, cntvct_el0" :
"=r"(virtual_timer_value));
148 return virtual_timer_value;
149#elif defined(__ARM_ARCH)
157 asm volatile(
"mrc p15, 0, %0, c9, c14, 0" :
"=r"(pmuseren));
159 asm volatile(
"mrc p15, 0, %0, c9, c12, 1" :
"=r"(pmcntenset));
160 if (pmcntenset & 0x80000000ul) {
161 asm volatile(
"mrc p15, 0, %0, c9, c13, 0" :
"=r"(pmccntr));
163 return static_cast<int64_t
>(pmccntr) * 64;
168 gettimeofday(&tv,
nullptr);
169 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
170#elif defined(__mips__) || defined(__m68k__)
174 gettimeofday(&tv,
nullptr);
175 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
176#elif defined(__loongarch__) || defined(__csky__)
178 gettimeofday(&tv,
nullptr);
179 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
180#elif defined(__s390__)
183#if defined(BENCHMARK_OS_ZOS) && defined(COMPILER_IBMXL)
185 asm(
" stck %0" :
"=m"(tsc) : :
"cc");
187 asm(
"stck %0" :
"=Q"(tsc) : :
"cc");
190#elif defined(__riscv)
192#if __riscv_xlen == 32
193 uint32_t cycles_lo, cycles_hi0, cycles_hi1;
204 :
"=r"(cycles_hi0),
"=r"(cycles_lo),
"=r"(cycles_hi1));
205 return (
static_cast<uint64_t
>(cycles_hi1) << 32) | cycles_lo;
208 asm volatile(
"rdcycle %0" :
"=r"(cycles));
211#elif defined(__e2k__) || defined(__elbrus__)
213 gettimeofday(&tv,
nullptr);
214 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
215#elif defined(__hexagon__)
217 asm volatile(
"%0 = C15:14" :
"=r"(pcycle));
218 return static_cast<double>(pcycle);
223#error You need to define CycleTimer for your OS and CPU