platform_system_core/libutils/Timers.cpp
Brett Chabot 1af6acc4d3 Use Android systemTime implementation for host linux.
The Android framework, notably android.view.Choreographer, assumes
that System.nanoTime and SystemClock.uptimeMills return consistent
values. This was true on device, but not on host.

This commit makes those values consistent on host linux. The necessary
support should be in place in kernels 2.6.39 and newer, which have been
available since 2012.

Test: m -j libutils
Change-Id: I833a89a810ae9fb3e8c01f6095ee2aca893c284f
2019-09-19 15:32:33 -07:00

67 lines
2 KiB
C++

/*
* Copyright (C) 2005 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Timer functions.
//
#include <utils/Timers.h>
#include <limits.h>
#include <time.h>
// host linux support requires Linux 2.6.39+
#if defined(__linux__)
nsecs_t systemTime(int clock)
{
static const clockid_t clocks[] = {
CLOCK_REALTIME,
CLOCK_MONOTONIC,
CLOCK_PROCESS_CPUTIME_ID,
CLOCK_THREAD_CPUTIME_ID,
CLOCK_BOOTTIME
};
struct timespec t;
t.tv_sec = t.tv_nsec = 0;
clock_gettime(clocks[clock], &t);
return nsecs_t(t.tv_sec)*1000000000LL + t.tv_nsec;
}
#else
nsecs_t systemTime(int /*clock*/)
{
// Clock support varies widely across hosts. Mac OS doesn't support
// CLOCK_BOOTTIME, and Windows is windows.
struct timeval t;
t.tv_sec = t.tv_usec = 0;
gettimeofday(&t, nullptr);
return nsecs_t(t.tv_sec)*1000000000LL + nsecs_t(t.tv_usec)*1000LL;
}
#endif
int toMillisecondTimeoutDelay(nsecs_t referenceTime, nsecs_t timeoutTime)
{
nsecs_t timeoutDelayMillis;
if (timeoutTime > referenceTime) {
uint64_t timeoutDelay = uint64_t(timeoutTime - referenceTime);
if (timeoutDelay > uint64_t((INT_MAX - 1) * 1000000LL)) {
timeoutDelayMillis = -1;
} else {
timeoutDelayMillis = (timeoutDelay + 999999LL) / 1000000LL;
}
} else {
timeoutDelayMillis = 0;
}
return (int)timeoutDelayMillis;
}