diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2014-06-06 13:43:28 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2014-06-06 13:43:28 -0400 |
commit | cc07aabc53978ae09a1d539237189f7c9841060a (patch) | |
tree | 6f47580d19ab5ad85f319bdb260615e991a93399 /arch/arm64/kvm | |
parent | 9e47aaef0bd3a50a43626fa6b19e1f964ac173d6 (diff) | |
parent | 9358d755bd5cba8965ea79f2a446e689323409f9 (diff) |
Merge tag 'arm64-upstream' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux into next
Pull arm64 updates from Catalin Marinas:
- Optimised assembly string/memory routines (based on the AArch64
Cortex Strings library contributed to glibc but re-licensed under
GPLv2)
- Optimised crypto algorithms making use of the ARMv8 crypto extensions
(together with kernel API for using FPSIMD instructions in interrupt
context)
- Ftrace support
- CPU topology parsing from DT
- ESR_EL1 (Exception Syndrome Register) exposed to user space signal
handlers for SIGSEGV/SIGBUS (useful to emulation tools like Qemu)
- 1GB section linear mapping if applicable
- Barriers usage clean-up
- Default pgprot clean-up
Conflicts as per Catalin.
* tag 'arm64-upstream' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux: (57 commits)
arm64: kernel: initialize broadcast hrtimer based clock event device
arm64: ftrace: Add system call tracepoint
arm64: ftrace: Add CALLER_ADDRx macros
arm64: ftrace: Add dynamic ftrace support
arm64: Add ftrace support
ftrace: Add arm64 support to recordmcount
arm64: Add 'notrace' attribute to unwind_frame() for ftrace
arm64: add __ASSEMBLY__ in asm/insn.h
arm64: Fix linker script entry point
arm64: lib: Implement optimized string length routines
arm64: lib: Implement optimized string compare routines
arm64: lib: Implement optimized memcmp routine
arm64: lib: Implement optimized memset routine
arm64: lib: Implement optimized memmove routine
arm64: lib: Implement optimized memcpy routine
arm64: defconfig: enable a few more common/useful options in defconfig
ftrace: Make CALLER_ADDRx macros more generic
arm64: Fix deadlock scenario with smp_send_stop()
arm64: Fix machine_shutdown() definition
arm64: Support arch_irq_work_raise() via self IPIs
...
Diffstat (limited to 'arch/arm64/kvm')
-rw-r--r-- | arch/arm64/kvm/hyp.S | 12 | ||||
-rw-r--r-- | arch/arm64/kvm/sys_regs.c | 4 |
2 files changed, 11 insertions, 5 deletions
diff --git a/arch/arm64/kvm/hyp.S b/arch/arm64/kvm/hyp.S index 2c56012cb2d2..b0d1512acf08 100644 --- a/arch/arm64/kvm/hyp.S +++ b/arch/arm64/kvm/hyp.S | |||
@@ -630,9 +630,15 @@ ENTRY(__kvm_tlb_flush_vmid_ipa) | |||
630 | * whole of Stage-1. Weep... | 630 | * whole of Stage-1. Weep... |
631 | */ | 631 | */ |
632 | tlbi ipas2e1is, x1 | 632 | tlbi ipas2e1is, x1 |
633 | dsb sy | 633 | /* |
634 | * We have to ensure completion of the invalidation at Stage-2, | ||
635 | * since a table walk on another CPU could refill a TLB with a | ||
636 | * complete (S1 + S2) walk based on the old Stage-2 mapping if | ||
637 | * the Stage-1 invalidation happened first. | ||
638 | */ | ||
639 | dsb ish | ||
634 | tlbi vmalle1is | 640 | tlbi vmalle1is |
635 | dsb sy | 641 | dsb ish |
636 | isb | 642 | isb |
637 | 643 | ||
638 | msr vttbr_el2, xzr | 644 | msr vttbr_el2, xzr |
@@ -643,7 +649,7 @@ ENTRY(__kvm_flush_vm_context) | |||
643 | dsb ishst | 649 | dsb ishst |
644 | tlbi alle1is | 650 | tlbi alle1is |
645 | ic ialluis | 651 | ic ialluis |
646 | dsb sy | 652 | dsb ish |
647 | ret | 653 | ret |
648 | ENDPROC(__kvm_flush_vm_context) | 654 | ENDPROC(__kvm_flush_vm_context) |
649 | 655 | ||
diff --git a/arch/arm64/kvm/sys_regs.c b/arch/arm64/kvm/sys_regs.c index 03244582bc55..c59a1bdab5eb 100644 --- a/arch/arm64/kvm/sys_regs.c +++ b/arch/arm64/kvm/sys_regs.c | |||
@@ -71,13 +71,13 @@ static u32 get_ccsidr(u32 csselr) | |||
71 | static void do_dc_cisw(u32 val) | 71 | static void do_dc_cisw(u32 val) |
72 | { | 72 | { |
73 | asm volatile("dc cisw, %x0" : : "r" (val)); | 73 | asm volatile("dc cisw, %x0" : : "r" (val)); |
74 | dsb(); | 74 | dsb(ish); |
75 | } | 75 | } |
76 | 76 | ||
77 | static void do_dc_csw(u32 val) | 77 | static void do_dc_csw(u32 val) |
78 | { | 78 | { |
79 | asm volatile("dc csw, %x0" : : "r" (val)); | 79 | asm volatile("dc csw, %x0" : : "r" (val)); |
80 | dsb(); | 80 | dsb(ish); |
81 | } | 81 | } |
82 | 82 | ||
83 | /* See note at ARM ARM B1.14.4 */ | 83 | /* See note at ARM ARM B1.14.4 */ |