diff options
author | Glauber de Oliveira Costa <gcosta@redhat.com> | 2008-01-30 13:31:08 +0100 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 13:31:08 +0100 |
commit | a6b4655258efd39b590e519815ed43bb74cd7188 (patch) | |
tree | 3da933deec46772bd4078a7f1e4fc30de1192c79 /include/asm-x86/system.h | |
parent | d89542229b657bdcce6a6f76168f9098ee3e9344 (diff) |
x86: unify load_segment macro
This patch unifies the load_segment() macro, making them equal in both
x86_64 and i386 architectures. The common version goes to system.h,
and the old are deleted.
Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/system.h')
-rw-r--r-- | include/asm-x86/system.h | 21 |
1 files changed, 21 insertions, 0 deletions
diff --git a/include/asm-x86/system.h b/include/asm-x86/system.h index d0803f8c70c4..3740bada097c 100644 --- a/include/asm-x86/system.h +++ b/include/asm-x86/system.h @@ -39,6 +39,27 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \ #define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1)) /* + * Load a segment. Fall back on loading the zero + * segment if something goes wrong.. + */ +#define loadsegment(seg, value) \ + asm volatile("\n" \ + "1:\t" \ + "movl %k0,%%" #seg "\n" \ + "2:\n" \ + ".section .fixup,\"ax\"\n" \ + "3:\t" \ + "movl %k1, %%" #seg "\n\t" \ + "jmp 2b\n" \ + ".previous\n" \ + ".section __ex_table,\"a\"\n\t" \ + _ASM_ALIGN "\n\t" \ + _ASM_PTR " 1b,3b\n" \ + ".previous" \ + : :"r" (value), "r" (0)) + + +/* * Save a segment register away */ #define savesegment(seg, value) \ |