diff options
Diffstat (limited to 'arch/powerpc')
-rw-r--r-- | arch/powerpc/kernel/asm-offsets.c | 4 | ||||
-rw-r--r-- | arch/powerpc/kernel/vdso.c | 11 | ||||
-rw-r--r-- | arch/powerpc/kernel/vdso32/cacheflush.S | 41 | ||||
-rw-r--r-- | arch/powerpc/kernel/vdso64/cacheflush.S | 41 |
4 files changed, 73 insertions, 24 deletions
diff --git a/arch/powerpc/kernel/asm-offsets.c b/arch/powerpc/kernel/asm-offsets.c index 2c8e756d19a3..d67bcd84f329 100644 --- a/arch/powerpc/kernel/asm-offsets.c +++ b/arch/powerpc/kernel/asm-offsets.c | |||
@@ -284,6 +284,10 @@ int main(void) | |||
284 | DEFINE(CFG_SYSCALL_MAP32, offsetof(struct vdso_data, syscall_map_32)); | 284 | DEFINE(CFG_SYSCALL_MAP32, offsetof(struct vdso_data, syscall_map_32)); |
285 | DEFINE(WTOM_CLOCK_SEC, offsetof(struct vdso_data, wtom_clock_sec)); | 285 | DEFINE(WTOM_CLOCK_SEC, offsetof(struct vdso_data, wtom_clock_sec)); |
286 | DEFINE(WTOM_CLOCK_NSEC, offsetof(struct vdso_data, wtom_clock_nsec)); | 286 | DEFINE(WTOM_CLOCK_NSEC, offsetof(struct vdso_data, wtom_clock_nsec)); |
287 | DEFINE(CFG_ICACHE_BLOCKSZ, offsetof(struct vdso_data, icache_block_size)); | ||
288 | DEFINE(CFG_DCACHE_BLOCKSZ, offsetof(struct vdso_data, dcache_block_size)); | ||
289 | DEFINE(CFG_ICACHE_LOGBLOCKSZ, offsetof(struct vdso_data, icache_log_block_size)); | ||
290 | DEFINE(CFG_DCACHE_LOGBLOCKSZ, offsetof(struct vdso_data, dcache_log_block_size)); | ||
287 | #ifdef CONFIG_PPC64 | 291 | #ifdef CONFIG_PPC64 |
288 | DEFINE(CFG_SYSCALL_MAP64, offsetof(struct vdso_data, syscall_map_64)); | 292 | DEFINE(CFG_SYSCALL_MAP64, offsetof(struct vdso_data, syscall_map_64)); |
289 | DEFINE(TVAL64_TV_SEC, offsetof(struct timeval, tv_sec)); | 293 | DEFINE(TVAL64_TV_SEC, offsetof(struct timeval, tv_sec)); |
diff --git a/arch/powerpc/kernel/vdso.c b/arch/powerpc/kernel/vdso.c index 2322ba5cce4c..3702df7dc567 100644 --- a/arch/powerpc/kernel/vdso.c +++ b/arch/powerpc/kernel/vdso.c | |||
@@ -699,11 +699,22 @@ static int __init vdso_init(void) | |||
699 | vdso_data->icache_size = ppc64_caches.isize; | 699 | vdso_data->icache_size = ppc64_caches.isize; |
700 | vdso_data->icache_line_size = ppc64_caches.iline_size; | 700 | vdso_data->icache_line_size = ppc64_caches.iline_size; |
701 | 701 | ||
702 | /* XXXOJN: Blocks should be added to ppc64_caches and used instead */ | ||
703 | vdso_data->dcache_block_size = ppc64_caches.dline_size; | ||
704 | vdso_data->icache_block_size = ppc64_caches.iline_size; | ||
705 | vdso_data->dcache_log_block_size = ppc64_caches.log_dline_size; | ||
706 | vdso_data->icache_log_block_size = ppc64_caches.log_iline_size; | ||
707 | |||
702 | /* | 708 | /* |
703 | * Calculate the size of the 64 bits vDSO | 709 | * Calculate the size of the 64 bits vDSO |
704 | */ | 710 | */ |
705 | vdso64_pages = (&vdso64_end - &vdso64_start) >> PAGE_SHIFT; | 711 | vdso64_pages = (&vdso64_end - &vdso64_start) >> PAGE_SHIFT; |
706 | DBG("vdso64_kbase: %p, 0x%x pages\n", vdso64_kbase, vdso64_pages); | 712 | DBG("vdso64_kbase: %p, 0x%x pages\n", vdso64_kbase, vdso64_pages); |
713 | #else | ||
714 | vdso_data->dcache_block_size = L1_CACHE_BYTES; | ||
715 | vdso_data->dcache_log_block_size = L1_CACHE_SHIFT; | ||
716 | vdso_data->icache_block_size = L1_CACHE_BYTES; | ||
717 | vdso_data->icache_log_block_size = L1_CACHE_SHIFT; | ||
707 | #endif /* CONFIG_PPC64 */ | 718 | #endif /* CONFIG_PPC64 */ |
708 | 719 | ||
709 | 720 | ||
diff --git a/arch/powerpc/kernel/vdso32/cacheflush.S b/arch/powerpc/kernel/vdso32/cacheflush.S index 9cb319992c38..1ba6feb71b31 100644 --- a/arch/powerpc/kernel/vdso32/cacheflush.S +++ b/arch/powerpc/kernel/vdso32/cacheflush.S | |||
@@ -23,29 +23,46 @@ | |||
23 | * | 23 | * |
24 | * Flushes the data cache & invalidate the instruction cache for the | 24 | * Flushes the data cache & invalidate the instruction cache for the |
25 | * provided range [start, end[ | 25 | * provided range [start, end[ |
26 | * | ||
27 | * Note: all CPUs supported by this kernel have a 128 bytes cache | ||
28 | * line size so we don't have to peek that info from the datapage | ||
29 | */ | 26 | */ |
30 | V_FUNCTION_BEGIN(__kernel_sync_dicache) | 27 | V_FUNCTION_BEGIN(__kernel_sync_dicache) |
31 | .cfi_startproc | 28 | .cfi_startproc |
32 | li r5,127 | 29 | mflr r12 |
33 | andc r6,r3,r5 /* round low to line bdy */ | 30 | .cfi_register lr,r12 |
31 | mr r11,r3 | ||
32 | bl __get_datapage@local | ||
33 | mtlr r12 | ||
34 | mr r10,r3 | ||
35 | |||
36 | lwz r7,CFG_DCACHE_BLOCKSZ(r10) | ||
37 | addi r5,r7,-1 | ||
38 | andc r6,r11,r5 /* round low to line bdy */ | ||
34 | subf r8,r6,r4 /* compute length */ | 39 | subf r8,r6,r4 /* compute length */ |
35 | add r8,r8,r5 /* ensure we get enough */ | 40 | add r8,r8,r5 /* ensure we get enough */ |
36 | srwi. r8,r8,7 /* compute line count */ | 41 | lwz r9,CFG_DCACHE_LOGBLOCKSZ(r10) |
42 | srw. r8,r8,r9 /* compute line count */ | ||
37 | crclr cr0*4+so | 43 | crclr cr0*4+so |
38 | beqlr /* nothing to do? */ | 44 | beqlr /* nothing to do? */ |
39 | mtctr r8 | 45 | mtctr r8 |
40 | mr r3,r6 | 46 | 1: dcbst 0,r6 |
41 | 1: dcbst 0,r3 | 47 | add r6,r6,r7 |
42 | addi r3,r3,128 | ||
43 | bdnz 1b | 48 | bdnz 1b |
44 | sync | 49 | sync |
50 | |||
51 | /* Now invalidate the instruction cache */ | ||
52 | |||
53 | lwz r7,CFG_ICACHE_BLOCKSZ(r10) | ||
54 | addi r5,r7,-1 | ||
55 | andc r6,r11,r5 /* round low to line bdy */ | ||
56 | subf r8,r6,r4 /* compute length */ | ||
57 | add r8,r8,r5 | ||
58 | lwz r9,CFG_ICACHE_LOGBLOCKSZ(r10) | ||
59 | srw. r8,r8,r9 /* compute line count */ | ||
60 | crclr cr0*4+so | ||
61 | beqlr /* nothing to do? */ | ||
45 | mtctr r8 | 62 | mtctr r8 |
46 | 1: icbi 0,r6 | 63 | 2: icbi 0,r6 |
47 | addi r6,r6,128 | 64 | add r6,r6,r7 |
48 | bdnz 1b | 65 | bdnz 2b |
49 | isync | 66 | isync |
50 | li r3,0 | 67 | li r3,0 |
51 | blr | 68 | blr |
diff --git a/arch/powerpc/kernel/vdso64/cacheflush.S b/arch/powerpc/kernel/vdso64/cacheflush.S index 66a36d3cc6ad..69c5af2b3c96 100644 --- a/arch/powerpc/kernel/vdso64/cacheflush.S +++ b/arch/powerpc/kernel/vdso64/cacheflush.S | |||
@@ -23,29 +23,46 @@ | |||
23 | * | 23 | * |
24 | * Flushes the data cache & invalidate the instruction cache for the | 24 | * Flushes the data cache & invalidate the instruction cache for the |
25 | * provided range [start, end[ | 25 | * provided range [start, end[ |
26 | * | ||
27 | * Note: all CPUs supported by this kernel have a 128 bytes cache | ||
28 | * line size so we don't have to peek that info from the datapage | ||
29 | */ | 26 | */ |
30 | V_FUNCTION_BEGIN(__kernel_sync_dicache) | 27 | V_FUNCTION_BEGIN(__kernel_sync_dicache) |
31 | .cfi_startproc | 28 | .cfi_startproc |
32 | li r5,127 | 29 | mflr r12 |
33 | andc r6,r3,r5 /* round low to line bdy */ | 30 | .cfi_register lr,r12 |
31 | mr r11,r3 | ||
32 | bl V_LOCAL_FUNC(__get_datapage) | ||
33 | mtlr r12 | ||
34 | mr r10,r3 | ||
35 | |||
36 | lwz r7,CFG_DCACHE_BLOCKSZ(r10) | ||
37 | addi r5,r7,-1 | ||
38 | andc r6,r11,r5 /* round low to line bdy */ | ||
34 | subf r8,r6,r4 /* compute length */ | 39 | subf r8,r6,r4 /* compute length */ |
35 | add r8,r8,r5 /* ensure we get enough */ | 40 | add r8,r8,r5 /* ensure we get enough */ |
36 | srwi. r8,r8,7 /* compute line count */ | 41 | lwz r9,CFG_DCACHE_LOGBLOCKSZ(r10) |
42 | srw. r8,r8,r9 /* compute line count */ | ||
37 | crclr cr0*4+so | 43 | crclr cr0*4+so |
38 | beqlr /* nothing to do? */ | 44 | beqlr /* nothing to do? */ |
39 | mtctr r8 | 45 | mtctr r8 |
40 | mr r3,r6 | 46 | 1: dcbst 0,r6 |
41 | 1: dcbst 0,r3 | 47 | add r6,r6,r7 |
42 | addi r3,r3,128 | ||
43 | bdnz 1b | 48 | bdnz 1b |
44 | sync | 49 | sync |
50 | |||
51 | /* Now invalidate the instruction cache */ | ||
52 | |||
53 | lwz r7,CFG_ICACHE_BLOCKSZ(r10) | ||
54 | addi r5,r7,-1 | ||
55 | andc r6,r11,r5 /* round low to line bdy */ | ||
56 | subf r8,r6,r4 /* compute length */ | ||
57 | add r8,r8,r5 | ||
58 | lwz r9,CFG_ICACHE_LOGBLOCKSZ(r10) | ||
59 | srw. r8,r8,r9 /* compute line count */ | ||
60 | crclr cr0*4+so | ||
61 | beqlr /* nothing to do? */ | ||
45 | mtctr r8 | 62 | mtctr r8 |
46 | 1: icbi 0,r6 | 63 | 2: icbi 0,r6 |
47 | addi r6,r6,128 | 64 | add r6,r6,r7 |
48 | bdnz 1b | 65 | bdnz 2b |
49 | isync | 66 | isync |
50 | li r3,0 | 67 | li r3,0 |
51 | blr | 68 | blr |