Switched the macOS code to use timerprivInt128ToUint64(). Also rearranged the arguments of timerprivInt128ToInt64() and added proper timerDurationMin/Max constants instead of the incorrect-for-that-function timerTimeMin/Max in the Windows code.

This commit is contained in:
Pietro Gagliardi 2019-05-05 11:16:17 -04:00
parent ff803bf792
commit d49f8ae7e6
6 changed files with 21 additions and 17 deletions

View File

@ -53,6 +53,12 @@ struct testParams {
timerSysError err;
};
/*
TODO if I remove the uiQuit() from this test on Windows, I will occasionally get
=== RUN TestQueueMain_DifferentThread
../test/initmain.c:161: uiMain() timed out (5s)
--- FAIL: TestQueueMain_DifferentThread (4.9989539s)
*/
static void queued(void *data)
{
struct testParams *p = (struct testParams *) data;
@ -147,12 +153,6 @@ static void queueThread(void *data)
uiQueueMain(queued, p);
}
/*
TODO if I remove the uiQuit() from this test on Windows, I will occasionally get
=== RUN TestQueueMain_DifferentThread
../test/initmain.c:161: uiMain() timed out (5s)
--- FAIL: TestQueueMain_DifferentThread (4.9989539s)
*/
testingTest(QueueMain_DifferentThread)
{
threadThread *thread;

View File

@ -306,7 +306,7 @@ void timerprivMulDivUint64(uint64_t x, uint64_t y, uint64_t z, timerprivInt128 *
int128MulDiv64(&a, &b, &c, quot);
}
int64_t timerprivInt128ToInt64(const timerprivInt128 *n, int64_t min, int64_t max, int64_t minCap, int64_t maxCap)
int64_t timerprivInt128ToInt64(const timerprivInt128 *n, int64_t min, int64_t minCap, int64_t max, int64_t maxCap)
{
if (n->neg) {
int64_t ret;
@ -347,6 +347,9 @@ uint64_t timerprivInt128ToUint64(const timerprivInt128 *n, uint64_t max, uint64_
{
if (n->neg)
return 0;
// TODO
return 0;
if (n->high != 0)
return maxCap;
if (n->low > maxCap)
return maxCap;
return n->low;
}

View File

@ -5,8 +5,9 @@
typedef int64_t timerDuration;
typedef int64_t timerTime;
#define timerTimeMin ((timerTime) INT64_MIN)
#define timerTimeMax ((timerTime) INT64_MAX)
#define timerDurationMin ((timerDuration) INT64_MIN)
#define timerDurationMax ((timerDuration) INT64_MAX)
#define timerNanosecond ((timerDuration) 1)
#define timerMicrosecond ((timerDuration) 1000)

View File

@ -55,10 +55,9 @@ timerTime timerMonotonicNow(void)
t = mach_absolute_time() - base;
timerprivMulDivUint64(t, mt.numer, mt.denom, &quot);
// on overflow, return the maximum possible timerTime; this is inspired by what Go does
if (quot.high == 0)
if (quot.low <= ((uint64_t) timerTimeMax))
return (timerTime) (quot.low);
return timerTimeMax;
// the limit check will ensure we can safely cast the return value to a timerTime
return (timerTime) timerprivInt128ToUint64(&quot,
(uint64_t) timerTimeMax, (uint64_t) timerTimeMax);
}
#else

View File

@ -186,9 +186,10 @@ timerDuration timerTimeSub(timerTime end, timerTime start)
QueryPerformanceFrequency(&qpf);
timerprivMulDivInt64(end - start, timerSecond, qpf.QuadPart, &quot);
// on underflow/overflow, return the minimum/maximum possible timerDuration (respectively); this is based on what Go does
return timerprivInt128ToInt64(&quot,
INT64_MIN, INT64_MAX,
timerTimeMin, timerTimeMax);
INT64_MIN, timerDurationMin,
INT64_MAX, timerDurationMax);
}
// note: the idea for the SetThreadContext() nuttery is from https://www.codeproject.com/Articles/71529/Exception-Injection-Throwing-an-Exception-in-Other

View File

@ -10,5 +10,5 @@ struct timerprivInt128 {
extern void timerprivMulDivInt64(int64_t x, int64_t y, int64_t z, timerprivInt128 *quot);
extern void timerprivMulDivUint64(uint64_t x, uint64_t y, uint64_t z, timerprivInt128 *quot);
extern int64_t timerprivInt128ToInt64(const timerprivInt128 *n, int64_t min, int64_t max, int64_t minCap, int64_t maxCap);
extern int64_t timerprivInt128ToInt64(const timerprivInt128 *n, int64_t min, int64_t minCap, int64_t max, int64_t maxCap);
extern uint64_t timerprivInt128ToUint64(const timerprivInt128 *n, uint64_t max, uint64_t maxCap);