aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm64
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm64')
-rw-r--r--arch/arm64/include/asm/barrier.h2
-rw-r--r--arch/arm64/include/asm/cacheflush.h4
-rw-r--r--arch/arm64/include/asm/pgtable.h4
-rw-r--r--arch/arm64/include/asm/tlbflush.h14
-rw-r--r--arch/arm64/kernel/process.c2
-rw-r--r--arch/arm64/kvm/sys_regs.c4
6 files changed, 15 insertions, 15 deletions
diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h
index 66eb7648043b..5d69eddbe39e 100644
--- a/arch/arm64/include/asm/barrier.h
+++ b/arch/arm64/include/asm/barrier.h
@@ -28,7 +28,7 @@
28#define dmb(opt) asm volatile("dmb sy" : : : "memory") 28#define dmb(opt) asm volatile("dmb sy" : : : "memory")
29#define dsb(opt) asm volatile("dsb sy" : : : "memory") 29#define dsb(opt) asm volatile("dsb sy" : : : "memory")
30 30
31#define mb() dsb() 31#define mb() dsb(sy)
32#define rmb() asm volatile("dsb ld" : : : "memory") 32#define rmb() asm volatile("dsb ld" : : : "memory")
33#define wmb() asm volatile("dsb st" : : : "memory") 33#define wmb() asm volatile("dsb st" : : : "memory")
34 34
diff --git a/arch/arm64/include/asm/cacheflush.h b/arch/arm64/include/asm/cacheflush.h
index 4c60e64a801c..a5176cf32dad 100644
--- a/arch/arm64/include/asm/cacheflush.h
+++ b/arch/arm64/include/asm/cacheflush.h
@@ -123,7 +123,7 @@ extern void flush_dcache_page(struct page *);
123static inline void __flush_icache_all(void) 123static inline void __flush_icache_all(void)
124{ 124{
125 asm("ic ialluis"); 125 asm("ic ialluis");
126 dsb(); 126 dsb(ish);
127} 127}
128 128
129#define flush_dcache_mmap_lock(mapping) \ 129#define flush_dcache_mmap_lock(mapping) \
@@ -150,7 +150,7 @@ static inline void flush_cache_vmap(unsigned long start, unsigned long end)
150 * set_pte_at() called from vmap_pte_range() does not 150 * set_pte_at() called from vmap_pte_range() does not
151 * have a DSB after cleaning the cache line. 151 * have a DSB after cleaning the cache line.
152 */ 152 */
153 dsb(); 153 dsb(ish);
154} 154}
155 155
156static inline void flush_cache_vunmap(unsigned long start, unsigned long end) 156static inline void flush_cache_vunmap(unsigned long start, unsigned long end)
diff --git a/arch/arm64/include/asm/pgtable.h b/arch/arm64/include/asm/pgtable.h
index 3de4ef8bfd82..e4c60d6e18b8 100644
--- a/arch/arm64/include/asm/pgtable.h
+++ b/arch/arm64/include/asm/pgtable.h
@@ -303,7 +303,7 @@ extern pgprot_t phys_mem_access_prot(struct file *file, unsigned long pfn,
303static inline void set_pmd(pmd_t *pmdp, pmd_t pmd) 303static inline void set_pmd(pmd_t *pmdp, pmd_t pmd)
304{ 304{
305 *pmdp = pmd; 305 *pmdp = pmd;
306 dsb(); 306 dsb(ishst);
307} 307}
308 308
309static inline void pmd_clear(pmd_t *pmdp) 309static inline void pmd_clear(pmd_t *pmdp)
@@ -333,7 +333,7 @@ static inline pte_t *pmd_page_vaddr(pmd_t pmd)
333static inline void set_pud(pud_t *pudp, pud_t pud) 333static inline void set_pud(pud_t *pudp, pud_t pud)
334{ 334{
335 *pudp = pud; 335 *pudp = pud;
336 dsb(); 336 dsb(ishst);
337} 337}
338 338
339static inline void pud_clear(pud_t *pudp) 339static inline void pud_clear(pud_t *pudp)
diff --git a/arch/arm64/include/asm/tlbflush.h b/arch/arm64/include/asm/tlbflush.h
index 7881d7dbb9ba..b9349c4513ea 100644
--- a/arch/arm64/include/asm/tlbflush.h
+++ b/arch/arm64/include/asm/tlbflush.h
@@ -72,9 +72,9 @@ extern struct cpu_tlb_fns cpu_tlb;
72 */ 72 */
73static inline void flush_tlb_all(void) 73static inline void flush_tlb_all(void)
74{ 74{
75 dsb(); 75 dsb(ishst);
76 asm("tlbi vmalle1is"); 76 asm("tlbi vmalle1is");
77 dsb(); 77 dsb(ish);
78 isb(); 78 isb();
79} 79}
80 80
@@ -82,9 +82,9 @@ static inline void flush_tlb_mm(struct mm_struct *mm)
82{ 82{
83 unsigned long asid = (unsigned long)ASID(mm) << 48; 83 unsigned long asid = (unsigned long)ASID(mm) << 48;
84 84
85 dsb(); 85 dsb(ishst);
86 asm("tlbi aside1is, %0" : : "r" (asid)); 86 asm("tlbi aside1is, %0" : : "r" (asid));
87 dsb(); 87 dsb(ish);
88} 88}
89 89
90static inline void flush_tlb_page(struct vm_area_struct *vma, 90static inline void flush_tlb_page(struct vm_area_struct *vma,
@@ -93,9 +93,9 @@ static inline void flush_tlb_page(struct vm_area_struct *vma,
93 unsigned long addr = uaddr >> 12 | 93 unsigned long addr = uaddr >> 12 |
94 ((unsigned long)ASID(vma->vm_mm) << 48); 94 ((unsigned long)ASID(vma->vm_mm) << 48);
95 95
96 dsb(); 96 dsb(ishst);
97 asm("tlbi vae1is, %0" : : "r" (addr)); 97 asm("tlbi vae1is, %0" : : "r" (addr));
98 dsb(); 98 dsb(ish);
99} 99}
100 100
101static inline void flush_tlb_range(struct vm_area_struct *vma, 101static inline void flush_tlb_range(struct vm_area_struct *vma,
@@ -134,7 +134,7 @@ static inline void update_mmu_cache(struct vm_area_struct *vma,
134 * set_pte() does not have a DSB, so make sure that the page table 134 * set_pte() does not have a DSB, so make sure that the page table
135 * write is visible. 135 * write is visible.
136 */ 136 */
137 dsb(); 137 dsb(ishst);
138} 138}
139 139
140#define update_mmu_cache_pmd(vma, address, pmd) do { } while (0) 140#define update_mmu_cache_pmd(vma, address, pmd) do { } while (0)
diff --git a/arch/arm64/kernel/process.c b/arch/arm64/kernel/process.c
index 6391485f342d..f7c446a5e97b 100644
--- a/arch/arm64/kernel/process.c
+++ b/arch/arm64/kernel/process.c
@@ -300,7 +300,7 @@ struct task_struct *__switch_to(struct task_struct *prev,
300 * Complete any pending TLB or cache maintenance on this CPU in case 300 * Complete any pending TLB or cache maintenance on this CPU in case
301 * the thread migrates to a different CPU. 301 * the thread migrates to a different CPU.
302 */ 302 */
303 dsb(); 303 dsb(ish);
304 304
305 /* the actual thread switch */ 305 /* the actual thread switch */
306 last = cpu_switch_to(prev, next); 306 last = cpu_switch_to(prev, next);
diff --git a/arch/arm64/kvm/sys_regs.c b/arch/arm64/kvm/sys_regs.c
index 03244582bc55..c59a1bdab5eb 100644
--- a/arch/arm64/kvm/sys_regs.c
+++ b/arch/arm64/kvm/sys_regs.c
@@ -71,13 +71,13 @@ static u32 get_ccsidr(u32 csselr)
71static void do_dc_cisw(u32 val) 71static void do_dc_cisw(u32 val)
72{ 72{
73 asm volatile("dc cisw, %x0" : : "r" (val)); 73 asm volatile("dc cisw, %x0" : : "r" (val));
74 dsb(); 74 dsb(ish);
75} 75}
76 76
77static void do_dc_csw(u32 val) 77static void do_dc_csw(u32 val)
78{ 78{
79 asm volatile("dc csw, %x0" : : "r" (val)); 79 asm volatile("dc csw, %x0" : : "r" (val));
80 dsb(); 80 dsb(ish);
81} 81}
82 82
83/* See note at ARM ARM B1.14.4 */ 83/* See note at ARM ARM B1.14.4 */