aboutsummaryrefslogtreecommitdiff
path: root/arch/blackfin/include/asm/delay.h
diff options
context:
space:
mode:
authorSimon Horman <horms@verge.net.au>2008-09-10 09:14:52 +1000
committerSimon Horman <horms@verge.net.au>2008-09-10 09:14:52 +1000
commitc051a0a2c9e283c1123ed3ce65e66e41d2ce5e24 (patch)
tree1202d018129ca5538cd98b1e4542b239045c1a2d /arch/blackfin/include/asm/delay.h
parente9c0ce232e7a36daae1ca08282609d7f0c57c567 (diff)
parent28faa979746b2352cd78a376bf9f52db953bda46 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/net-next-2.6 into lvs-next-2.6
Diffstat (limited to 'arch/blackfin/include/asm/delay.h')
-rw-r--r--arch/blackfin/include/asm/delay.h62
1 files changed, 62 insertions, 0 deletions
diff --git a/arch/blackfin/include/asm/delay.h b/arch/blackfin/include/asm/delay.h
new file mode 100644
index 00000000000..0889c3abb59
--- /dev/null
+++ b/arch/blackfin/include/asm/delay.h
@@ -0,0 +1,62 @@
+/*
+ * delay.h - delay functions
+ *
+ * Copyright (c) 2004-2007 Analog Devices Inc.
+ *
+ * Licensed under the GPL-2 or later.
+ */
+
+#ifndef __ASM_DELAY_H__
+#define __ASM_DELAY_H__
+
+#include <mach/anomaly.h>
+
+static inline void __delay(unsigned long loops)
+{
+ if (ANOMALY_05000312) {
+ /* Interrupted loads to loop registers -> bad */
+ unsigned long tmp;
+ __asm__ __volatile__(
+ "[--SP] = LC0;"
+ "[--SP] = LT0;"
+ "[--SP] = LB0;"
+ "LSETUP (1f,1f) LC0 = %1;"
+ "1: NOP;"
+ /* We take advantage of the fact that LC0 is 0 at
+ * the end of the loop. Otherwise we'd need some
+ * NOPs after the CLI here.
+ */
+ "CLI %0;"
+ "LB0 = [SP++];"
+ "LT0 = [SP++];"
+ "LC0 = [SP++];"
+ "STI %0;"
+ : "=d" (tmp)
+ : "a" (loops)
+ );
+ } else
+ __asm__ __volatile__ (
+ "LSETUP(1f, 1f) LC0 = %0;"
+ "1: NOP;"
+ :
+ : "a" (loops)
+ : "LT0", "LB0", "LC0"
+ );
+}
+
+#include <linux/param.h> /* needed for HZ */
+
+/*
+ * Use only for very small delays ( < 1 msec). Should probably use a
+ * lookup table, really, as the multiplications take much too long with
+ * short delays. This is a "reasonable" implementation, though (and the
+ * first constant multiplications gets optimized away if the delay is
+ * a constant)
+ */
+static inline void udelay(unsigned long usecs)
+{
+ extern unsigned long loops_per_jiffy;
+ __delay(usecs * loops_per_jiffy / (1000000 / HZ));
+}
+
+#endif