summaryrefslogtreecommitdiff
path: root/kernel/time.c
diff options
context:
space:
mode:
Diffstat (limited to 'kernel/time.c')
-rw-r--r--kernel/time.c63
1 files changed, 29 insertions, 34 deletions
diff --git a/kernel/time.c b/kernel/time.c
index 35d373a98782..6a08660b4fac 100644
--- a/kernel/time.c
+++ b/kernel/time.c
@@ -35,6 +35,8 @@
#include <linux/syscalls.h>
#include <linux/security.h>
#include <linux/fs.h>
+#include <linux/slab.h>
+#include <linux/math64.h>
#include <asm/uaccess.h>
#include <asm/unistd.h>
@@ -244,7 +246,7 @@ unsigned int inline jiffies_to_msecs(const unsigned long j)
return (j + (HZ / MSEC_PER_SEC) - 1)/(HZ / MSEC_PER_SEC);
#else
# if BITS_PER_LONG == 32
- return ((u64)HZ_TO_MSEC_MUL32 * j) >> HZ_TO_MSEC_SHR32;
+ return (HZ_TO_MSEC_MUL32 * j) >> HZ_TO_MSEC_SHR32;
# else
return (j * HZ_TO_MSEC_NUM) / HZ_TO_MSEC_DEN;
# endif
@@ -260,7 +262,7 @@ unsigned int inline jiffies_to_usecs(const unsigned long j)
return (j + (HZ / USEC_PER_SEC) - 1)/(HZ / USEC_PER_SEC);
#else
# if BITS_PER_LONG == 32
- return ((u64)HZ_TO_USEC_MUL32 * j) >> HZ_TO_USEC_SHR32;
+ return (HZ_TO_USEC_MUL32 * j) >> HZ_TO_USEC_SHR32;
# else
return (j * HZ_TO_USEC_NUM) / HZ_TO_USEC_DEN;
# endif
@@ -390,13 +392,17 @@ EXPORT_SYMBOL(set_normalized_timespec);
struct timespec ns_to_timespec(const s64 nsec)
{
struct timespec ts;
+ s32 rem;
if (!nsec)
return (struct timespec) {0, 0};
- ts.tv_sec = div_long_long_rem_signed(nsec, NSEC_PER_SEC, &ts.tv_nsec);
- if (unlikely(nsec < 0))
- set_normalized_timespec(&ts, ts.tv_sec, ts.tv_nsec);
+ ts.tv_sec = div_s64_rem(nsec, NSEC_PER_SEC, &rem);
+ if (unlikely(rem < 0)) {
+ ts.tv_sec--;
+ rem += NSEC_PER_SEC;
+ }
+ ts.tv_nsec = rem;
return ts;
}
@@ -470,7 +476,7 @@ unsigned long msecs_to_jiffies(const unsigned int m)
if (HZ > MSEC_PER_SEC && m > jiffies_to_msecs(MAX_JIFFY_OFFSET))
return MAX_JIFFY_OFFSET;
- return ((u64)MSEC_TO_HZ_MUL32 * m + MSEC_TO_HZ_ADJ32)
+ return (MSEC_TO_HZ_MUL32 * m + MSEC_TO_HZ_ADJ32)
>> MSEC_TO_HZ_SHR32;
#endif
}
@@ -485,7 +491,7 @@ unsigned long usecs_to_jiffies(const unsigned int u)
#elif HZ > USEC_PER_SEC && !(HZ % USEC_PER_SEC)
return u * (HZ / USEC_PER_SEC);
#else
- return ((u64)USEC_TO_HZ_MUL32 * u + USEC_TO_HZ_ADJ32)
+ return (USEC_TO_HZ_MUL32 * u + USEC_TO_HZ_ADJ32)
>> USEC_TO_HZ_SHR32;
#endif
}
@@ -526,8 +532,10 @@ jiffies_to_timespec(const unsigned long jiffies, struct timespec *value)
* Convert jiffies to nanoseconds and separate with
* one divide.
*/
- u64 nsec = (u64)jiffies * TICK_NSEC;
- value->tv_sec = div_long_long_rem(nsec, NSEC_PER_SEC, &value->tv_nsec);
+ u32 rem;
+ value->tv_sec = div_u64_rem((u64)jiffies * TICK_NSEC,
+ NSEC_PER_SEC, &rem);
+ value->tv_nsec = rem;
}
EXPORT_SYMBOL(jiffies_to_timespec);
@@ -565,12 +573,11 @@ void jiffies_to_timeval(const unsigned long jiffies, struct timeval *value)
* Convert jiffies to nanoseconds and separate with
* one divide.
*/
- u64 nsec = (u64)jiffies * TICK_NSEC;
- long tv_usec;
+ u32 rem;
- value->tv_sec = div_long_long_rem(nsec, NSEC_PER_SEC, &tv_usec);
- tv_usec /= NSEC_PER_USEC;
- value->tv_usec = tv_usec;
+ value->tv_sec = div_u64_rem((u64)jiffies * TICK_NSEC,
+ NSEC_PER_SEC, &rem);
+ value->tv_usec = rem / NSEC_PER_USEC;
}
EXPORT_SYMBOL(jiffies_to_timeval);
@@ -586,9 +593,7 @@ clock_t jiffies_to_clock_t(long x)
return x / (HZ / USER_HZ);
# endif
#else
- u64 tmp = (u64)x * TICK_NSEC;
- do_div(tmp, (NSEC_PER_SEC / USER_HZ));
- return (long)tmp;
+ return div_u64((u64)x * TICK_NSEC, NSEC_PER_SEC / USER_HZ);
#endif
}
EXPORT_SYMBOL(jiffies_to_clock_t);
@@ -600,16 +605,12 @@ unsigned long clock_t_to_jiffies(unsigned long x)
return ~0UL;
return x * (HZ / USER_HZ);
#else
- u64 jif;
-
/* Don't worry about loss of precision here .. */
if (x >= ~0UL / HZ * USER_HZ)
return ~0UL;
/* .. but do try to contain it here */
- jif = x * (u64) HZ;
- do_div(jif, USER_HZ);
- return jif;
+ return div_u64((u64)x * HZ, USER_HZ);
#endif
}
EXPORT_SYMBOL(clock_t_to_jiffies);
@@ -618,10 +619,9 @@ u64 jiffies_64_to_clock_t(u64 x)
{
#if (TICK_NSEC % (NSEC_PER_SEC / USER_HZ)) == 0
# if HZ < USER_HZ
- x *= USER_HZ;
- do_div(x, HZ);
+ x = div_u64(x * USER_HZ, HZ);
# elif HZ > USER_HZ
- do_div(x, HZ / USER_HZ);
+ x = div_u64(x, HZ / USER_HZ);
# else
/* Nothing to do */
# endif
@@ -631,8 +631,7 @@ u64 jiffies_64_to_clock_t(u64 x)
* but even this doesn't overflow in hundreds of years
* in 64 bits, so..
*/
- x *= TICK_NSEC;
- do_div(x, (NSEC_PER_SEC / USER_HZ));
+ x = div_u64(x * TICK_NSEC, (NSEC_PER_SEC / USER_HZ));
#endif
return x;
}
@@ -641,21 +640,17 @@ EXPORT_SYMBOL(jiffies_64_to_clock_t);
u64 nsec_to_clock_t(u64 x)
{
#if (NSEC_PER_SEC % USER_HZ) == 0
- do_div(x, (NSEC_PER_SEC / USER_HZ));
+ return div_u64(x, NSEC_PER_SEC / USER_HZ);
#elif (USER_HZ % 512) == 0
- x *= USER_HZ/512;
- do_div(x, (NSEC_PER_SEC / 512));
+ return div_u64(x * USER_HZ / 512, NSEC_PER_SEC / 512);
#else
/*
* max relative error 5.7e-8 (1.8s per year) for USER_HZ <= 1024,
* overflow after 64.99 years.
* exact for HZ=60, 72, 90, 120, 144, 180, 300, 600, 900, ...
*/
- x *= 9;
- do_div(x, (unsigned long)((9ull * NSEC_PER_SEC + (USER_HZ/2)) /
- USER_HZ));
+ return div_u64(x * 9, (9ull * NSEC_PER_SEC + (USER_HZ / 2)) / USER_HZ);
#endif
- return x;
}
#if (BITS_PER_LONG < 64)