aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/desc.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/desc.h')
-rw-r--r--include/asm-i386/desc.h95
1 files changed, 63 insertions, 32 deletions
diff --git a/include/asm-i386/desc.h b/include/asm-i386/desc.h
index 050831f34f71..c547403f341d 100644
--- a/include/asm-i386/desc.h
+++ b/include/asm-i386/desc.h
@@ -12,23 +12,24 @@
12 12
13#include <asm/mmu.h> 13#include <asm/mmu.h>
14 14
15extern struct desc_struct cpu_gdt_table[GDT_ENTRIES];
16
17struct Xgt_desc_struct { 15struct Xgt_desc_struct {
18 unsigned short size; 16 unsigned short size;
19 unsigned long address __attribute__((packed)); 17 unsigned long address __attribute__((packed));
20 unsigned short pad; 18 unsigned short pad;
21} __attribute__ ((packed)); 19} __attribute__ ((packed));
22 20
23extern struct Xgt_desc_struct idt_descr; 21struct gdt_page
24DECLARE_PER_CPU(struct Xgt_desc_struct, cpu_gdt_descr); 22{
25extern struct Xgt_desc_struct early_gdt_descr; 23 struct desc_struct gdt[GDT_ENTRIES];
24} __attribute__((aligned(PAGE_SIZE)));
25DECLARE_PER_CPU(struct gdt_page, gdt_page);
26 26
27static inline struct desc_struct *get_cpu_gdt_table(unsigned int cpu) 27static inline struct desc_struct *get_cpu_gdt_table(unsigned int cpu)
28{ 28{
29 return (struct desc_struct *)per_cpu(cpu_gdt_descr, cpu).address; 29 return per_cpu(gdt_page, cpu).gdt;
30} 30}
31 31
32extern struct Xgt_desc_struct idt_descr;
32extern struct desc_struct idt_table[]; 33extern struct desc_struct idt_table[];
33extern void set_intr_gate(unsigned int irq, void * addr); 34extern void set_intr_gate(unsigned int irq, void * addr);
34 35
@@ -58,45 +59,33 @@ static inline void pack_gate(__u32 *a, __u32 *b,
58#ifdef CONFIG_PARAVIRT 59#ifdef CONFIG_PARAVIRT
59#include <asm/paravirt.h> 60#include <asm/paravirt.h>
60#else 61#else
61#define load_TR_desc() __asm__ __volatile__("ltr %w0"::"q" (GDT_ENTRY_TSS*8)) 62#define load_TR_desc() native_load_tr_desc()
62 63#define load_gdt(dtr) native_load_gdt(dtr)
63#define load_gdt(dtr) __asm__ __volatile("lgdt %0"::"m" (*dtr)) 64#define load_idt(dtr) native_load_idt(dtr)
64#define load_idt(dtr) __asm__ __volatile("lidt %0"::"m" (*dtr))
65#define load_tr(tr) __asm__ __volatile("ltr %0"::"m" (tr)) 65#define load_tr(tr) __asm__ __volatile("ltr %0"::"m" (tr))
66#define load_ldt(ldt) __asm__ __volatile("lldt %0"::"m" (ldt)) 66#define load_ldt(ldt) __asm__ __volatile("lldt %0"::"m" (ldt))
67 67
68#define store_gdt(dtr) __asm__ ("sgdt %0":"=m" (*dtr)) 68#define store_gdt(dtr) native_store_gdt(dtr)
69#define store_idt(dtr) __asm__ ("sidt %0":"=m" (*dtr)) 69#define store_idt(dtr) native_store_idt(dtr)
70#define store_tr(tr) __asm__ ("str %0":"=m" (tr)) 70#define store_tr(tr) (tr = native_store_tr())
71#define store_ldt(ldt) __asm__ ("sldt %0":"=m" (ldt)) 71#define store_ldt(ldt) __asm__ ("sldt %0":"=m" (ldt))
72 72
73#if TLS_SIZE != 24 73#define load_TLS(t, cpu) native_load_tls(t, cpu)
74# error update this code. 74#define set_ldt native_set_ldt
75#endif
76
77static inline void load_TLS(struct thread_struct *t, unsigned int cpu)
78{
79#define C(i) get_cpu_gdt_table(cpu)[GDT_ENTRY_TLS_MIN + i] = t->tls_array[i]
80 C(0); C(1); C(2);
81#undef C
82}
83 75
84#define write_ldt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b) 76#define write_ldt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b)
85#define write_gdt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b) 77#define write_gdt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b)
86#define write_idt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b) 78#define write_idt_entry(dt, entry, a, b) write_dt_entry(dt, entry, a, b)
79#endif
87 80
88static inline void write_dt_entry(void *dt, int entry, __u32 entry_a, __u32 entry_b) 81static inline void write_dt_entry(struct desc_struct *dt,
82 int entry, u32 entry_low, u32 entry_high)
89{ 83{
90 __u32 *lp = (__u32 *)((char *)dt + entry*8); 84 dt[entry].a = entry_low;
91 *lp = entry_a; 85 dt[entry].b = entry_high;
92 *(lp+1) = entry_b;
93} 86}
94 87
95#define set_ldt native_set_ldt 88static inline void native_set_ldt(const void *addr, unsigned int entries)
96#endif /* CONFIG_PARAVIRT */
97
98static inline fastcall void native_set_ldt(const void *addr,
99 unsigned int entries)
100{ 89{
101 if (likely(entries == 0)) 90 if (likely(entries == 0))
102 __asm__ __volatile__("lldt %w0"::"q" (0)); 91 __asm__ __volatile__("lldt %w0"::"q" (0));
@@ -112,6 +101,48 @@ static inline fastcall void native_set_ldt(const void *addr,
112 } 101 }
113} 102}
114 103
104
105static inline void native_load_tr_desc(void)
106{
107 asm volatile("ltr %w0"::"q" (GDT_ENTRY_TSS*8));
108}
109
110static inline void native_load_gdt(const struct Xgt_desc_struct *dtr)
111{
112 asm volatile("lgdt %0"::"m" (*dtr));
113}
114
115static inline void native_load_idt(const struct Xgt_desc_struct *dtr)
116{
117 asm volatile("lidt %0"::"m" (*dtr));
118}
119
120static inline void native_store_gdt(struct Xgt_desc_struct *dtr)
121{
122 asm ("sgdt %0":"=m" (*dtr));
123}
124
125static inline void native_store_idt(struct Xgt_desc_struct *dtr)
126{
127 asm ("sidt %0":"=m" (*dtr));
128}
129
130static inline unsigned long native_store_tr(void)
131{
132 unsigned long tr;
133 asm ("str %0":"=r" (tr));
134 return tr;
135}
136
137static inline void native_load_tls(struct thread_struct *t, unsigned int cpu)
138{
139 unsigned int i;
140 struct desc_struct *gdt = get_cpu_gdt_table(cpu);
141
142 for (i = 0; i < GDT_ENTRY_TLS_ENTRIES; i++)
143 gdt[GDT_ENTRY_TLS_MIN + i] = t->tls_array[i];
144}
145
115static inline void _set_gate(int gate, unsigned int type, void *addr, unsigned short seg) 146static inline void _set_gate(int gate, unsigned int type, void *addr, unsigned short seg)
116{ 147{
117 __u32 a, b; 148 __u32 a, b;