aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/cache.h
diff options
context:
space:
mode:
authorChristoph Lameter <christoph@lameter.com>2005-07-07 20:56:59 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2005-07-07 21:23:46 -0400
commit6c036527a630720063b67d9a65455e8caca2c8fa (patch)
tree316e947f5f4efcda0205e48044ed1d12665eaed1 /include/linux/cache.h
parent0db925af1db5f3dfe1691c35b39496e2baaff9c9 (diff)
[PATCH] mostly_read data section
Add a new section called ".data.read_mostly" for data items that are read frequently and rarely written to like cpumaps etc. If these maps are placed in the .data section then these frequenly read items may end up in cachelines with data is is frequently updated. In that case all processors in an SMP system must needlessly reload the cachelines again and again containing elements of those frequently used variables. The ability to share these cachelines will allow each cpu in an SMP system to keep local copies of those shared cachelines thereby optimizing performance. Signed-off-by: Alok N Kataria <alokk@calsoftinc.com> Signed-off-by: Shobhit Dayal <shobhit@calsoftinc.com> Signed-off-by: Christoph Lameter <christoph@scalex86.org> Signed-off-by: Shai Fultheim <shai@scalex86.org> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/linux/cache.h')
-rw-r--r--include/linux/cache.h6
1 files changed, 6 insertions, 0 deletions
diff --git a/include/linux/cache.h b/include/linux/cache.h
index 4d767b93738a..2b66a36d85f0 100644
--- a/include/linux/cache.h
+++ b/include/linux/cache.h
@@ -13,6 +13,12 @@
13#define SMP_CACHE_BYTES L1_CACHE_BYTES 13#define SMP_CACHE_BYTES L1_CACHE_BYTES
14#endif 14#endif
15 15
16#ifdef CONFIG_X86
17#define __read_mostly __attribute__((__section__(".data.read_mostly")))
18#else
19#define __read_mostly
20#endif
21
16#ifndef ____cacheline_aligned 22#ifndef ____cacheline_aligned
17#define ____cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES))) 23#define ____cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES)))
18#endif 24#endif