diff options
author | Matthew Wilcox <matthew@wil.cx> | 2009-05-22 16:49:49 -0400 |
---|---|---|
committer | Tony Luck <tony.luck@intel.com> | 2009-06-17 12:33:49 -0400 |
commit | e088a4ad7fa53c3dc3c29f930025f41ccf01953e (patch) | |
tree | 07b012952bbbaccfe4ef3bb44b1ea0a3a3bb3868 /arch/ia64/include/asm/gcc_intrin.h | |
parent | e56e2dcd381d9ec35379328f332221581eda4787 (diff) |
[IA64] Convert ia64 to use int-ll64.h
It is generally agreed that it would be beneficial for u64 to be an
unsigned long long on all architectures. ia64 (in common with several
other 64-bit architectures) currently uses unsigned long. Migrating
piecemeal is too painful; this giant patch fixes all compilation warnings
and errors that come as a result of switching to use int-ll64.h.
Note that userspace will still see __u64 defined as unsigned long. This
is important as it affects C++ name mangling.
[Updated by Tony Luck to change efi.h:efi_freemem_callback_t to use
u64 for start/end rather than unsigned long]
Signed-off-by: Matthew Wilcox <willy@linux.intel.com>
Signed-off-by: Tony Luck <tony.luck@intel.com>
Diffstat (limited to 'arch/ia64/include/asm/gcc_intrin.h')
-rw-r--r-- | arch/ia64/include/asm/gcc_intrin.h | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/arch/ia64/include/asm/gcc_intrin.h b/arch/ia64/include/asm/gcc_intrin.h index c2c5fd8fcac4..21ddee54adae 100644 --- a/arch/ia64/include/asm/gcc_intrin.h +++ b/arch/ia64/include/asm/gcc_intrin.h | |||
@@ -388,7 +388,7 @@ register unsigned long ia64_r13 asm ("r13") __used; | |||
388 | 388 | ||
389 | #define ia64_native_thash(addr) \ | 389 | #define ia64_native_thash(addr) \ |
390 | ({ \ | 390 | ({ \ |
391 | __u64 ia64_intri_res; \ | 391 | unsigned long ia64_intri_res; \ |
392 | asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \ | 392 | asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \ |
393 | ia64_intri_res; \ | 393 | ia64_intri_res; \ |
394 | }) | 394 | }) |
@@ -419,7 +419,7 @@ register unsigned long ia64_r13 asm ("r13") __used; | |||
419 | 419 | ||
420 | #define ia64_tpa(addr) \ | 420 | #define ia64_tpa(addr) \ |
421 | ({ \ | 421 | ({ \ |
422 | __u64 ia64_pa; \ | 422 | unsigned long ia64_pa; \ |
423 | asm volatile ("tpa %0 = %1" : "=r"(ia64_pa) : "r"(addr) : "memory"); \ | 423 | asm volatile ("tpa %0 = %1" : "=r"(ia64_pa) : "r"(addr) : "memory"); \ |
424 | ia64_pa; \ | 424 | ia64_pa; \ |
425 | }) | 425 | }) |
@@ -444,35 +444,35 @@ register unsigned long ia64_r13 asm ("r13") __used; | |||
444 | 444 | ||
445 | #define ia64_native_get_cpuid(index) \ | 445 | #define ia64_native_get_cpuid(index) \ |
446 | ({ \ | 446 | ({ \ |
447 | __u64 ia64_intri_res; \ | 447 | unsigned long ia64_intri_res; \ |
448 | asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \ | 448 | asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \ |
449 | ia64_intri_res; \ | 449 | ia64_intri_res; \ |
450 | }) | 450 | }) |
451 | 451 | ||
452 | #define __ia64_get_dbr(index) \ | 452 | #define __ia64_get_dbr(index) \ |
453 | ({ \ | 453 | ({ \ |
454 | __u64 ia64_intri_res; \ | 454 | unsigned long ia64_intri_res; \ |
455 | asm volatile ("mov %0=dbr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ | 455 | asm volatile ("mov %0=dbr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
456 | ia64_intri_res; \ | 456 | ia64_intri_res; \ |
457 | }) | 457 | }) |
458 | 458 | ||
459 | #define ia64_get_ibr(index) \ | 459 | #define ia64_get_ibr(index) \ |
460 | ({ \ | 460 | ({ \ |
461 | __u64 ia64_intri_res; \ | 461 | unsigned long ia64_intri_res; \ |
462 | asm volatile ("mov %0=ibr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ | 462 | asm volatile ("mov %0=ibr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
463 | ia64_intri_res; \ | 463 | ia64_intri_res; \ |
464 | }) | 464 | }) |
465 | 465 | ||
466 | #define ia64_get_pkr(index) \ | 466 | #define ia64_get_pkr(index) \ |
467 | ({ \ | 467 | ({ \ |
468 | __u64 ia64_intri_res; \ | 468 | unsigned long ia64_intri_res; \ |
469 | asm volatile ("mov %0=pkr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ | 469 | asm volatile ("mov %0=pkr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
470 | ia64_intri_res; \ | 470 | ia64_intri_res; \ |
471 | }) | 471 | }) |
472 | 472 | ||
473 | #define ia64_get_pmc(index) \ | 473 | #define ia64_get_pmc(index) \ |
474 | ({ \ | 474 | ({ \ |
475 | __u64 ia64_intri_res; \ | 475 | unsigned long ia64_intri_res; \ |
476 | asm volatile ("mov %0=pmc[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ | 476 | asm volatile ("mov %0=pmc[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
477 | ia64_intri_res; \ | 477 | ia64_intri_res; \ |
478 | }) | 478 | }) |
@@ -480,14 +480,14 @@ register unsigned long ia64_r13 asm ("r13") __used; | |||
480 | 480 | ||
481 | #define ia64_native_get_pmd(index) \ | 481 | #define ia64_native_get_pmd(index) \ |
482 | ({ \ | 482 | ({ \ |
483 | __u64 ia64_intri_res; \ | 483 | unsigned long ia64_intri_res; \ |
484 | asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ | 484 | asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
485 | ia64_intri_res; \ | 485 | ia64_intri_res; \ |
486 | }) | 486 | }) |
487 | 487 | ||
488 | #define ia64_native_get_rr(index) \ | 488 | #define ia64_native_get_rr(index) \ |
489 | ({ \ | 489 | ({ \ |
490 | __u64 ia64_intri_res; \ | 490 | unsigned long ia64_intri_res; \ |
491 | asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \ | 491 | asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \ |
492 | ia64_intri_res; \ | 492 | ia64_intri_res; \ |
493 | }) | 493 | }) |