blob: 473a8113277f097c6fffdf61a64c22447d7cb2e3 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
|
/*
* delay.h - delay functions
*
* Copyright (c) 2004-2007 Analog Devices Inc.
*
* Licensed under the GPL-2 or later.
*/
#ifndef __ASM_DELAY_H__
#define __ASM_DELAY_H__
#include <asm/mach/anomaly.h>
static inline void __delay(unsigned long loops)
{
if (ANOMALY_05000312) {
/* Interrupted loads to loop registers -> bad */
unsigned long tmp;
__asm__ __volatile__(
"[--SP] = LC0;"
"[--SP] = LT0;"
"[--SP] = LB0;"
"LSETUP (1f,1f) LC0 = %1;"
"1: NOP;"
/* We take advantage of the fact that LC0 is 0 at
* the end of the loop. Otherwise we'd need some
* NOPs after the CLI here.
*/
"CLI %0;"
"LB0 = [SP++];"
"LT0 = [SP++];"
"LC0 = [SP++];"
"STI %0;"
: "=d" (tmp)
: "a" (loops)
);
} else
__asm__ __volatile__ (
"LSETUP(1f, 1f) LC0 = %0;"
"1: NOP;"
:
: "a" (loops)
: "LT0", "LB0", "LC0"
);
}
#include <linux/param.h> /* needed for HZ */
/*
* Use only for very small delays ( < 1 msec). Should probably use a
* lookup table, really, as the multiplications take much too long with
* short delays. This is a "reasonable" implementation, though (and the
* first constant multiplications gets optimized away if the delay is
* a constant)
*/
static inline void udelay(unsigned long usecs)
{
extern unsigned long loops_per_jiffy;
__delay(usecs * loops_per_jiffy / (1000000 / HZ));
}
#endif
|