aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/arm/include/asm/cache.h2
-rw-r--r--arch/avr32/include/asm/cache.h2
-rw-r--r--arch/blackfin/include/asm/cache.h2
-rw-r--r--arch/frv/include/asm/mem-layout.h2
-rw-r--r--arch/m68k/include/asm/cache.h2
-rw-r--r--arch/microblaze/include/asm/page.h2
-rw-r--r--arch/mips/include/asm/mach-generic/kmalloc.h2
-rw-r--r--arch/mips/include/asm/mach-ip27/kmalloc.h2
-rw-r--r--arch/mips/include/asm/mach-ip32/kmalloc.h4
-rw-r--r--arch/mn10300/include/asm/cache.h2
-rw-r--r--arch/powerpc/include/asm/page_32.h2
-rw-r--r--arch/sh/include/asm/page.h2
-rw-r--r--arch/xtensa/include/asm/cache.h2
-rw-r--r--include/linux/slab_def.h4
-rw-r--r--include/linux/slob_def.h4
-rw-r--r--include/linux/slub_def.h8
16 files changed, 25 insertions, 19 deletions
diff --git a/arch/arm/include/asm/cache.h b/arch/arm/include/asm/cache.h
index 66c160b8547f..9d6122096fbe 100644
--- a/arch/arm/include/asm/cache.h
+++ b/arch/arm/include/asm/cache.h
@@ -14,7 +14,7 @@
14 * cache before the transfer is done, causing old data to be seen by 14 * cache before the transfer is done, causing old data to be seen by
15 * the CPU. 15 * the CPU.
16 */ 16 */
17#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 17#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
18 18
19/* 19/*
20 * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers. 20 * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
diff --git a/arch/avr32/include/asm/cache.h b/arch/avr32/include/asm/cache.h
index d3cf35ab11ab..c3a58a189a91 100644
--- a/arch/avr32/include/asm/cache.h
+++ b/arch/avr32/include/asm/cache.h
@@ -11,7 +11,7 @@
11 * cache before the transfer is done, causing old data to be seen by 11 * cache before the transfer is done, causing old data to be seen by
12 * the CPU. 12 * the CPU.
13 */ 13 */
14#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 14#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
15 15
16#ifndef __ASSEMBLER__ 16#ifndef __ASSEMBLER__
17struct cache_info { 17struct cache_info {
diff --git a/arch/blackfin/include/asm/cache.h b/arch/blackfin/include/asm/cache.h
index 93f6c634fdf4..bd0641a267f1 100644
--- a/arch/blackfin/include/asm/cache.h
+++ b/arch/blackfin/include/asm/cache.h
@@ -15,7 +15,7 @@
15#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 15#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
16#define SMP_CACHE_BYTES L1_CACHE_BYTES 16#define SMP_CACHE_BYTES L1_CACHE_BYTES
17 17
18#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 18#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
19 19
20#ifdef CONFIG_SMP 20#ifdef CONFIG_SMP
21#define __cacheline_aligned 21#define __cacheline_aligned
diff --git a/arch/frv/include/asm/mem-layout.h b/arch/frv/include/asm/mem-layout.h
index ccae981876fa..e9a0ec85a402 100644
--- a/arch/frv/include/asm/mem-layout.h
+++ b/arch/frv/include/asm/mem-layout.h
@@ -35,7 +35,7 @@
35 * the slab must be aligned such that load- and store-double instructions don't 35 * the slab must be aligned such that load- and store-double instructions don't
36 * fault if used 36 * fault if used
37 */ 37 */
38#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 38#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
39#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES 39#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
40 40
41/*****************************************************************************/ 41/*****************************************************************************/
diff --git a/arch/m68k/include/asm/cache.h b/arch/m68k/include/asm/cache.h
index ecafbe1718c3..0395c51e46a6 100644
--- a/arch/m68k/include/asm/cache.h
+++ b/arch/m68k/include/asm/cache.h
@@ -8,6 +8,6 @@
8#define L1_CACHE_SHIFT 4 8#define L1_CACHE_SHIFT 4
9#define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) 9#define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT)
10 10
11#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 11#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
12 12
13#endif 13#endif
diff --git a/arch/microblaze/include/asm/page.h b/arch/microblaze/include/asm/page.h
index 4f268faa0126..cf377d91da71 100644
--- a/arch/microblaze/include/asm/page.h
+++ b/arch/microblaze/include/asm/page.h
@@ -40,7 +40,7 @@
40#ifndef __ASSEMBLY__ 40#ifndef __ASSEMBLY__
41 41
42/* MS be sure that SLAB allocates aligned objects */ 42/* MS be sure that SLAB allocates aligned objects */
43#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 43#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
44 44
45#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES 45#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
46 46
diff --git a/arch/mips/include/asm/mach-generic/kmalloc.h b/arch/mips/include/asm/mach-generic/kmalloc.h
index b8e6deba352f..a5d669086ed9 100644
--- a/arch/mips/include/asm/mach-generic/kmalloc.h
+++ b/arch/mips/include/asm/mach-generic/kmalloc.h
@@ -7,7 +7,7 @@
7 * Total overkill for most systems but need as a safe default. 7 * Total overkill for most systems but need as a safe default.
8 * Set this one if any device in the system might do non-coherent DMA. 8 * Set this one if any device in the system might do non-coherent DMA.
9 */ 9 */
10#define ARCH_KMALLOC_MINALIGN 128 10#define ARCH_DMA_MINALIGN 128
11#endif 11#endif
12 12
13#endif /* __ASM_MACH_GENERIC_KMALLOC_H */ 13#endif /* __ASM_MACH_GENERIC_KMALLOC_H */
diff --git a/arch/mips/include/asm/mach-ip27/kmalloc.h b/arch/mips/include/asm/mach-ip27/kmalloc.h
index 426bd049b2d7..82c23ce2afa7 100644
--- a/arch/mips/include/asm/mach-ip27/kmalloc.h
+++ b/arch/mips/include/asm/mach-ip27/kmalloc.h
@@ -2,7 +2,7 @@
2#define __ASM_MACH_IP27_KMALLOC_H 2#define __ASM_MACH_IP27_KMALLOC_H
3 3
4/* 4/*
5 * All happy, no need to define ARCH_KMALLOC_MINALIGN 5 * All happy, no need to define ARCH_DMA_MINALIGN
6 */ 6 */
7 7
8#endif /* __ASM_MACH_IP27_KMALLOC_H */ 8#endif /* __ASM_MACH_IP27_KMALLOC_H */
diff --git a/arch/mips/include/asm/mach-ip32/kmalloc.h b/arch/mips/include/asm/mach-ip32/kmalloc.h
index b1e0be60f720..042ca926c48f 100644
--- a/arch/mips/include/asm/mach-ip32/kmalloc.h
+++ b/arch/mips/include/asm/mach-ip32/kmalloc.h
@@ -3,9 +3,9 @@
3 3
4 4
5#if defined(CONFIG_CPU_R5000) || defined(CONFIG_CPU_RM7000) 5#if defined(CONFIG_CPU_R5000) || defined(CONFIG_CPU_RM7000)
6#define ARCH_KMALLOC_MINALIGN 32 6#define ARCH_DMA_MINALIGN 32
7#else 7#else
8#define ARCH_KMALLOC_MINALIGN 128 8#define ARCH_DMA_MINALIGN 128
9#endif 9#endif
10 10
11#endif /* __ASM_MACH_IP32_KMALLOC_H */ 11#endif /* __ASM_MACH_IP32_KMALLOC_H */
diff --git a/arch/mn10300/include/asm/cache.h b/arch/mn10300/include/asm/cache.h
index 6e2fe28dde4e..781bf613366d 100644
--- a/arch/mn10300/include/asm/cache.h
+++ b/arch/mn10300/include/asm/cache.h
@@ -21,7 +21,7 @@
21#define L1_CACHE_DISPARITY L1_CACHE_NENTRIES * L1_CACHE_BYTES 21#define L1_CACHE_DISPARITY L1_CACHE_NENTRIES * L1_CACHE_BYTES
22#endif 22#endif
23 23
24#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 24#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
25 25
26/* data cache purge registers 26/* data cache purge registers
27 * - read from the register to unconditionally purge that cache line 27 * - read from the register to unconditionally purge that cache line
diff --git a/arch/powerpc/include/asm/page_32.h b/arch/powerpc/include/asm/page_32.h
index bd0849dbcaaa..68d73b2a7bfc 100644
--- a/arch/powerpc/include/asm/page_32.h
+++ b/arch/powerpc/include/asm/page_32.h
@@ -10,7 +10,7 @@
10#define VM_DATA_DEFAULT_FLAGS VM_DATA_DEFAULT_FLAGS32 10#define VM_DATA_DEFAULT_FLAGS VM_DATA_DEFAULT_FLAGS32
11 11
12#ifdef CONFIG_NOT_COHERENT_CACHE 12#ifdef CONFIG_NOT_COHERENT_CACHE
13#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 13#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
14#endif 14#endif
15 15
16#ifdef CONFIG_PTE_64BIT 16#ifdef CONFIG_PTE_64BIT
diff --git a/arch/sh/include/asm/page.h b/arch/sh/include/asm/page.h
index fb703d120d09..c4e0b3d472b9 100644
--- a/arch/sh/include/asm/page.h
+++ b/arch/sh/include/asm/page.h
@@ -180,7 +180,7 @@ typedef struct page *pgtable_t;
180 * Some drivers need to perform DMA into kmalloc'ed buffers 180 * Some drivers need to perform DMA into kmalloc'ed buffers
181 * and so we have to increase the kmalloc minalign for this. 181 * and so we have to increase the kmalloc minalign for this.
182 */ 182 */
183#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 183#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
184 184
185#ifdef CONFIG_SUPERH64 185#ifdef CONFIG_SUPERH64
186/* 186/*
diff --git a/arch/xtensa/include/asm/cache.h b/arch/xtensa/include/asm/cache.h
index ed8cd3cbd499..d2fd932fdb4d 100644
--- a/arch/xtensa/include/asm/cache.h
+++ b/arch/xtensa/include/asm/cache.h
@@ -29,6 +29,6 @@
29# define CACHE_WAY_SIZE ICACHE_WAY_SIZE 29# define CACHE_WAY_SIZE ICACHE_WAY_SIZE
30#endif 30#endif
31 31
32#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES 32#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
33 33
34#endif /* _XTENSA_CACHE_H */ 34#endif /* _XTENSA_CACHE_H */
diff --git a/include/linux/slab_def.h b/include/linux/slab_def.h
index 1acfa73ce2ac..791a502f6906 100644
--- a/include/linux/slab_def.h
+++ b/include/linux/slab_def.h
@@ -17,7 +17,6 @@
17 17
18#include <trace/events/kmem.h> 18#include <trace/events/kmem.h>
19 19
20#ifndef ARCH_KMALLOC_MINALIGN
21/* 20/*
22 * Enforce a minimum alignment for the kmalloc caches. 21 * Enforce a minimum alignment for the kmalloc caches.
23 * Usually, the kmalloc caches are cache_line_size() aligned, except when 22 * Usually, the kmalloc caches are cache_line_size() aligned, except when
@@ -27,6 +26,9 @@
27 * ARCH_KMALLOC_MINALIGN allows that. 26 * ARCH_KMALLOC_MINALIGN allows that.
28 * Note that increasing this value may disable some debug features. 27 * Note that increasing this value may disable some debug features.
29 */ 28 */
29#ifdef ARCH_DMA_MINALIGN
30#define ARCH_KMALLOC_MINALIGN ARCH_DMA_MINALIGN
31#else
30#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long) 32#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long)
31#endif 33#endif
32 34
diff --git a/include/linux/slob_def.h b/include/linux/slob_def.h
index 62667f72c2ef..4382db09df4f 100644
--- a/include/linux/slob_def.h
+++ b/include/linux/slob_def.h
@@ -1,7 +1,9 @@
1#ifndef __LINUX_SLOB_DEF_H 1#ifndef __LINUX_SLOB_DEF_H
2#define __LINUX_SLOB_DEF_H 2#define __LINUX_SLOB_DEF_H
3 3
4#ifndef ARCH_KMALLOC_MINALIGN 4#ifdef ARCH_DMA_MINALIGN
5#define ARCH_KMALLOC_MINALIGN ARCH_DMA_MINALIGN
6#else
5#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long) 7#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long)
6#endif 8#endif
7 9
diff --git a/include/linux/slub_def.h b/include/linux/slub_def.h
index 6447a723ecb1..6d14409c4d9a 100644
--- a/include/linux/slub_def.h
+++ b/include/linux/slub_def.h
@@ -106,15 +106,17 @@ struct kmem_cache {
106/* 106/*
107 * Kmalloc subsystem. 107 * Kmalloc subsystem.
108 */ 108 */
109#if defined(ARCH_KMALLOC_MINALIGN) && ARCH_KMALLOC_MINALIGN > 8 109#if defined(ARCH_DMA_MINALIGN) && ARCH_DMA_MINALIGN > 8
110#define KMALLOC_MIN_SIZE ARCH_KMALLOC_MINALIGN 110#define KMALLOC_MIN_SIZE ARCH_DMA_MINALIGN
111#else 111#else
112#define KMALLOC_MIN_SIZE 8 112#define KMALLOC_MIN_SIZE 8
113#endif 113#endif
114 114
115#define KMALLOC_SHIFT_LOW ilog2(KMALLOC_MIN_SIZE) 115#define KMALLOC_SHIFT_LOW ilog2(KMALLOC_MIN_SIZE)
116 116
117#ifndef ARCH_KMALLOC_MINALIGN 117#ifdef ARCH_DMA_MINALIGN
118#define ARCH_KMALLOC_MINALIGN ARCH_DMA_MINALIGN
119#else
118#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long) 120#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long)
119#endif 121#endif
120 122