aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/xor.h
diff options
context:
space:
mode:
authorZachary Amsden <zach@vmware.com>2005-09-03 18:56:36 -0400
committerLinus Torvalds <torvalds@evo.osdl.org>2005-09-05 03:06:11 -0400
commit4bb0d3ec3e5b1e9e2399cdc641b3b6521ac9cdaa (patch)
tree5e8d7646f5c6a2cec990b6d591f230d496b20664 /include/asm-i386/xor.h
parent2a0694d15d55d0deed928786a6393d5e45e37d76 (diff)
[PATCH] i386: inline asm cleanup
i386 Inline asm cleanup. Use cr/dr accessor functions. Also, a potential bugfix. Also, some CR accessors really should be volatile. Reads from CR0 (numeric state may change in an exception handler), writes to CR4 (flipping CR4.TSD) and reads from CR2 (page fault) prevent instruction re-ordering. I did not add memory clobber to CR3 / CR4 / CR0 updates, as it was not there to begin with, and in no case should kernel memory be clobbered, except when doing a TLB flush, which already has memory clobber. I noticed that page invalidation does not have a memory clobber. I can't find a bug as a result, but there is definitely a potential for a bug here: #define __flush_tlb_single(addr) \ __asm__ __volatile__("invlpg %0": :"m" (*(char *) addr)) Signed-off-by: Zachary Amsden <zach@vmware.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/xor.h')
-rw-r--r--include/asm-i386/xor.h26
1 files changed, 13 insertions, 13 deletions
diff --git a/include/asm-i386/xor.h b/include/asm-i386/xor.h
index f80e2dbe1b56..23c86cef3b25 100644
--- a/include/asm-i386/xor.h
+++ b/include/asm-i386/xor.h
@@ -535,14 +535,14 @@ static struct xor_block_template xor_block_p5_mmx = {
535 535
536#define XMMS_SAVE do { \ 536#define XMMS_SAVE do { \
537 preempt_disable(); \ 537 preempt_disable(); \
538 cr0 = read_cr0(); \
539 clts(); \
538 __asm__ __volatile__ ( \ 540 __asm__ __volatile__ ( \
539 "movl %%cr0,%0 ;\n\t" \ 541 "movups %%xmm0,(%0) ;\n\t" \
540 "clts ;\n\t" \ 542 "movups %%xmm1,0x10(%0) ;\n\t" \
541 "movups %%xmm0,(%1) ;\n\t" \ 543 "movups %%xmm2,0x20(%0) ;\n\t" \
542 "movups %%xmm1,0x10(%1) ;\n\t" \ 544 "movups %%xmm3,0x30(%0) ;\n\t" \
543 "movups %%xmm2,0x20(%1) ;\n\t" \ 545 : \
544 "movups %%xmm3,0x30(%1) ;\n\t" \
545 : "=&r" (cr0) \
546 : "r" (xmm_save) \ 546 : "r" (xmm_save) \
547 : "memory"); \ 547 : "memory"); \
548} while(0) 548} while(0)
@@ -550,14 +550,14 @@ static struct xor_block_template xor_block_p5_mmx = {
550#define XMMS_RESTORE do { \ 550#define XMMS_RESTORE do { \
551 __asm__ __volatile__ ( \ 551 __asm__ __volatile__ ( \
552 "sfence ;\n\t" \ 552 "sfence ;\n\t" \
553 "movups (%1),%%xmm0 ;\n\t" \ 553 "movups (%0),%%xmm0 ;\n\t" \
554 "movups 0x10(%1),%%xmm1 ;\n\t" \ 554 "movups 0x10(%0),%%xmm1 ;\n\t" \
555 "movups 0x20(%1),%%xmm2 ;\n\t" \ 555 "movups 0x20(%0),%%xmm2 ;\n\t" \
556 "movups 0x30(%1),%%xmm3 ;\n\t" \ 556 "movups 0x30(%0),%%xmm3 ;\n\t" \
557 "movl %0,%%cr0 ;\n\t" \
558 : \ 557 : \
559 : "r" (cr0), "r" (xmm_save) \ 558 : "r" (xmm_save) \
560 : "memory"); \ 559 : "memory"); \
560 write_cr0(cr0); \
561 preempt_enable(); \ 561 preempt_enable(); \
562} while(0) 562} while(0)
563 563