我几个星期前运行我的项目,它向我显示窗口,显示时间和处理速度和内存利用,但现在我在相同的工具上运行相同的代码,但它没有显示我的屏幕。我在Visual Studio 2010和2015的两个版本上尝试了相同的代码,但我正在获得该屏幕。
这是代码
#ifdef __GNUC__
#include <time.h>
float getticks()
{
struct timespec ts;
if (clock_gettime(CLOCK_MONOTONIC, &ts) < 0)
return -1.0f;
return ts.tv_sec + 1e-9f*ts.tv_nsec;
}
#else
#include <windows.h>
float getticks()
{
static double freq = -1.0;
LARGE_INTEGER lint;
if (freq < 0.0)
{
if (!QueryPerformanceFrequency(&lint))
return -1.0f;
freq = lint.QuadPart;
}
if (!QueryPerformanceCounter(&lint))
return -1.0f;
return (float)(lint.QuadPart / freq);
}
#endif
由于
答案 0 :(得分:0)
不要像这样投降float
。 float
只有4个字节,无法处理QuadPart
。而是使用unsigned long long
或long long
LARGE_INTEGER freq;
QueryPerformanceFrequency(&freq);
LARGE_INTEGER t1;
QueryPerformanceCounter(&t1);
//sleep for about 1 second
Sleep(1000);
LARGE_INTEGER t2;
QueryPerformanceCounter(&t2);
long long diff = t2.QuadPart - t1.QuadPart;
//print the result with varying accuracy:
cout << "seconds: " << 1 * diff / freq.QuadPart << endl;
cout << "milliseconds: " << 1000 * diff / freq.QuadPart << endl;
cout << "microseconds: " << 1000000 * diff / freq.QuadPart << endl;
cout << "nanoseconds: " << 1000000000 * diff / freq.QuadPart << endl;
还有GetTickCount64
(它是一个准确度约为1毫秒的随机计数器),GetSystemTime
和GetSystemTimeAsFileTime
...取决于您要做的事情。