Fix overflow condition with QueryPerformanceCounter
The previous code for OS_Windows::get_ticks_usec() multiplied the tick count by 1000000 before dividing by ticks_per_second. The ticks is counted in a 64 bit integer and is susceptible to overflow when a machine has been running for a long period of time (days) with a high frequency timer. This PR separates the overall calculation into one for seconds and one for the remainder, removing the possibility of overflow due to the multiplier.
This commit is contained in:
parent
5f1107aa30
commit
db9fa88160
|
@ -575,11 +575,28 @@ void OS_UWP::delay_usec(uint32_t p_usec) const {
|
||||||
uint64_t OS_UWP::get_ticks_usec() const {
|
uint64_t OS_UWP::get_ticks_usec() const {
|
||||||
|
|
||||||
uint64_t ticks;
|
uint64_t ticks;
|
||||||
uint64_t time;
|
|
||||||
// This is the number of clock ticks since start
|
// This is the number of clock ticks since start
|
||||||
QueryPerformanceCounter((LARGE_INTEGER *)&ticks);
|
QueryPerformanceCounter((LARGE_INTEGER *)&ticks);
|
||||||
|
|
||||||
// Divide by frequency to get the time in seconds
|
// Divide by frequency to get the time in seconds
|
||||||
time = ticks * 1000000L / ticks_per_second;
|
// original calculation shown below is subject to overflow
|
||||||
|
// with high ticks_per_second and a number of days since the last reboot.
|
||||||
|
// time = ticks * 1000000L / ticks_per_second;
|
||||||
|
|
||||||
|
// we can prevent this by either using 128 bit math
|
||||||
|
// or separating into a calculation for seconds, and the fraction
|
||||||
|
uint64_t seconds = ticks / ticks_per_second;
|
||||||
|
|
||||||
|
// compiler will optimize these two into one divide
|
||||||
|
uint64_t leftover = ticks % ticks_per_second;
|
||||||
|
|
||||||
|
// remainder
|
||||||
|
uint64_t time = (leftover * 1000000L) / ticks_per_second;
|
||||||
|
|
||||||
|
// seconds
|
||||||
|
time += seconds * 1000000L;
|
||||||
|
|
||||||
// Subtract the time at game start to get
|
// Subtract the time at game start to get
|
||||||
// the time since the game started
|
// the time since the game started
|
||||||
time -= ticks_start;
|
time -= ticks_start;
|
||||||
|
|
|
@ -418,12 +418,29 @@ void OS_Windows::delay_usec(uint32_t p_usec) const {
|
||||||
uint64_t OS_Windows::get_ticks_usec() const {
|
uint64_t OS_Windows::get_ticks_usec() const {
|
||||||
|
|
||||||
uint64_t ticks;
|
uint64_t ticks;
|
||||||
uint64_t time;
|
|
||||||
// This is the number of clock ticks since start
|
// This is the number of clock ticks since start
|
||||||
if (!QueryPerformanceCounter((LARGE_INTEGER *)&ticks))
|
if (!QueryPerformanceCounter((LARGE_INTEGER *)&ticks))
|
||||||
ticks = (UINT64)timeGetTime();
|
ticks = (UINT64)timeGetTime();
|
||||||
|
|
||||||
// Divide by frequency to get the time in seconds
|
// Divide by frequency to get the time in seconds
|
||||||
time = ticks * 1000000L / ticks_per_second;
|
// original calculation shown below is subject to overflow
|
||||||
|
// with high ticks_per_second and a number of days since the last reboot.
|
||||||
|
// time = ticks * 1000000L / ticks_per_second;
|
||||||
|
|
||||||
|
// we can prevent this by either using 128 bit math
|
||||||
|
// or separating into a calculation for seconds, and the fraction
|
||||||
|
uint64_t seconds = ticks / ticks_per_second;
|
||||||
|
|
||||||
|
// compiler will optimize these two into one divide
|
||||||
|
uint64_t leftover = ticks % ticks_per_second;
|
||||||
|
|
||||||
|
// remainder
|
||||||
|
uint64_t time = (leftover * 1000000L) / ticks_per_second;
|
||||||
|
|
||||||
|
// seconds
|
||||||
|
time += seconds * 1000000L;
|
||||||
|
|
||||||
// Subtract the time at game start to get
|
// Subtract the time at game start to get
|
||||||
// the time since the game started
|
// the time since the game started
|
||||||
time -= ticks_start;
|
time -= ticks_start;
|
||||||
|
|
Loading…
Reference in New Issue