aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 13:31:08 +0100
committerIngo Molnar <mingo@elte.hu>2008-01-30 13:31:08 +0100
commita6b4655258efd39b590e519815ed43bb74cd7188 (patch)
tree3da933deec46772bd4078a7f1e4fc30de1192c79 /include
parentd89542229b657bdcce6a6f76168f9098ee3e9344 (diff)
x86: unify load_segment macro
This patch unifies the load_segment() macro, making them equal in both x86_64 and i386 architectures. The common version goes to system.h, and the old are deleted. Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/system.h21
-rw-r--r--include/asm-x86/system_32.h22
-rw-r--r--include/asm-x86/system_64.h20
3 files changed, 21 insertions, 42 deletions
diff --git a/include/asm-x86/system.h b/include/asm-x86/system.h
index d0803f8c70c..3740bada097 100644
--- a/include/asm-x86/system.h
+++ b/include/asm-x86/system.h
@@ -39,6 +39,27 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
#define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1))
/*
+ * Load a segment. Fall back on loading the zero
+ * segment if something goes wrong..
+ */
+#define loadsegment(seg, value) \
+ asm volatile("\n" \
+ "1:\t" \
+ "movl %k0,%%" #seg "\n" \
+ "2:\n" \
+ ".section .fixup,\"ax\"\n" \
+ "3:\t" \
+ "movl %k1, %%" #seg "\n\t" \
+ "jmp 2b\n" \
+ ".previous\n" \
+ ".section __ex_table,\"a\"\n\t" \
+ _ASM_ALIGN "\n\t" \
+ _ASM_PTR " 1b,3b\n" \
+ ".previous" \
+ : :"r" (value), "r" (0))
+
+
+/*
* Save a segment register away
*/
#define savesegment(seg, value) \
diff --git a/include/asm-x86/system_32.h b/include/asm-x86/system_32.h
index fb457642ac5..8db478984ed 100644
--- a/include/asm-x86/system_32.h
+++ b/include/asm-x86/system_32.h
@@ -34,28 +34,6 @@ extern struct task_struct * FASTCALL(__switch_to(struct task_struct *prev, struc
"2" (prev), "d" (next)); \
} while (0)
-/*
- * Load a segment. Fall back on loading the zero
- * segment if something goes wrong..
- */
-#define loadsegment(seg,value) \
- asm volatile("\n" \
- "1:\t" \
- "mov %0,%%" #seg "\n" \
- "2:\n" \
- ".section .fixup,\"ax\"\n" \
- "3:\t" \
- "pushl $0\n\t" \
- "popl %%" #seg "\n\t" \
- "jmp 2b\n" \
- ".previous\n" \
- ".section __ex_table,\"a\"\n\t" \
- ".align 4\n\t" \
- ".long 1b,3b\n" \
- ".previous" \
- : :"rm" (value))
-
-
static inline void native_clts(void)
{
asm volatile ("clts");
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h
index cc5b2666a04..0885caace5d 100644
--- a/include/asm-x86/system_64.h
+++ b/include/asm-x86/system_64.h
@@ -50,26 +50,6 @@
extern void load_gs_index(unsigned);
/*
- * Load a segment. Fall back on loading the zero
- * segment if something goes wrong..
- */
-#define loadsegment(seg,value) \
- asm volatile("\n" \
- "1:\t" \
- "movl %k0,%%" #seg "\n" \
- "2:\n" \
- ".section .fixup,\"ax\"\n" \
- "3:\t" \
- "movl %1,%%" #seg "\n\t" \
- "jmp 2b\n" \
- ".previous\n" \
- ".section __ex_table,\"a\"\n\t" \
- ".align 8\n\t" \
- ".quad 1b,3b\n" \
- ".previous" \
- : :"r" (value), "r" (0))
-
-/*
* Clear and set 'TS' bit respectively
*/
#define clts() __asm__ __volatile__ ("clts")