diff options
author | H. Peter Anvin <hpa@zytor.com> | 2008-09-04 09:04:45 -0700 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2008-09-04 09:04:45 -0700 |
commit | fe47784ba5cbb6b713c013e046859946789b45e4 (patch) | |
tree | 6384958d55e29be0d2eb8ae78fa437c10636d8d6 /arch/x86/power | |
parent | 83b8e28b14d63db928cb39e5c5ed2a548246bd71 (diff) | |
parent | af2e1f276ff08f17192411ea3b71c13a758dfe12 (diff) |
Merge branch 'x86/cpu' into x86/xsave
Conflicts:
arch/x86/kernel/cpu/feature_names.c
include/asm-x86/cpufeature.h
Diffstat (limited to 'arch/x86/power')
-rw-r--r-- | arch/x86/power/cpu_32.c | 6 | ||||
-rw-r--r-- | arch/x86/power/hibernate_asm_32.S | 26 |
2 files changed, 19 insertions, 13 deletions
diff --git a/arch/x86/power/cpu_32.c b/arch/x86/power/cpu_32.c index 02f36f53558c..274d06082f48 100644 --- a/arch/x86/power/cpu_32.c +++ b/arch/x86/power/cpu_32.c @@ -46,7 +46,7 @@ static void __save_processor_state(struct saved_context *ctxt) ctxt->cr0 = read_cr0(); ctxt->cr2 = read_cr2(); ctxt->cr3 = read_cr3(); - ctxt->cr4 = read_cr4(); + ctxt->cr4 = read_cr4_safe(); } /* Needed by apm.c */ @@ -99,7 +99,9 @@ static void __restore_processor_state(struct saved_context *ctxt) /* * control registers */ - write_cr4(ctxt->cr4); + /* cr4 was introduced in the Pentium CPU */ + if (ctxt->cr4) + write_cr4(ctxt->cr4); write_cr3(ctxt->cr3); write_cr2(ctxt->cr2); write_cr0(ctxt->cr0); diff --git a/arch/x86/power/hibernate_asm_32.S b/arch/x86/power/hibernate_asm_32.S index b95aa6cfe3cb..4fc7e872c85e 100644 --- a/arch/x86/power/hibernate_asm_32.S +++ b/arch/x86/power/hibernate_asm_32.S @@ -28,9 +28,9 @@ ENTRY(swsusp_arch_suspend) ret ENTRY(restore_image) - movl resume_pg_dir, %ecx - subl $__PAGE_OFFSET, %ecx - movl %ecx, %cr3 + movl resume_pg_dir, %eax + subl $__PAGE_OFFSET, %eax + movl %eax, %cr3 movl restore_pblist, %edx .p2align 4,,7 @@ -52,17 +52,21 @@ copy_loop: done: /* go back to the original page tables */ - movl $swapper_pg_dir, %ecx - subl $__PAGE_OFFSET, %ecx - movl %ecx, %cr3 + movl $swapper_pg_dir, %eax + subl $__PAGE_OFFSET, %eax + movl %eax, %cr3 /* Flush TLB, including "global" things (vmalloc) */ - movl mmu_cr4_features, %eax - movl %eax, %edx + movl mmu_cr4_features, %ecx + jecxz 1f # cr4 Pentium and higher, skip if zero + movl %ecx, %edx andl $~(1<<7), %edx; # PGE movl %edx, %cr4; # turn off PGE - movl %cr3, %ecx; # flush TLB - movl %ecx, %cr3 - movl %eax, %cr4; # turn PGE back on +1: + movl %cr3, %eax; # flush TLB + movl %eax, %cr3 + jecxz 1f # cr4 Pentium and higher, skip if zero + movl %ecx, %cr4; # turn PGE back on +1: movl saved_context_esp, %esp movl saved_context_ebp, %ebp |