aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/misc/sgi-gru/grutables.h
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/misc/sgi-gru/grutables.h')
-rw-r--r--drivers/misc/sgi-gru/grutables.h60
1 files changed, 52 insertions, 8 deletions
diff --git a/drivers/misc/sgi-gru/grutables.h b/drivers/misc/sgi-gru/grutables.h
index bf1eeb7553ed..34ab3d453919 100644
--- a/drivers/misc/sgi-gru/grutables.h
+++ b/drivers/misc/sgi-gru/grutables.h
@@ -148,11 +148,13 @@
148#include <linux/wait.h> 148#include <linux/wait.h>
149#include <linux/mmu_notifier.h> 149#include <linux/mmu_notifier.h>
150#include "gru.h" 150#include "gru.h"
151#include "grulib.h"
151#include "gruhandles.h" 152#include "gruhandles.h"
152 153
153extern struct gru_stats_s gru_stats; 154extern struct gru_stats_s gru_stats;
154extern struct gru_blade_state *gru_base[]; 155extern struct gru_blade_state *gru_base[];
155extern unsigned long gru_start_paddr, gru_end_paddr; 156extern unsigned long gru_start_paddr, gru_end_paddr;
157extern void *gru_start_vaddr;
156extern unsigned int gru_max_gids; 158extern unsigned int gru_max_gids;
157 159
158#define GRU_MAX_BLADES MAX_NUMNODES 160#define GRU_MAX_BLADES MAX_NUMNODES
@@ -174,9 +176,12 @@ struct gru_stats_s {
174 atomic_long_t assign_context; 176 atomic_long_t assign_context;
175 atomic_long_t assign_context_failed; 177 atomic_long_t assign_context_failed;
176 atomic_long_t free_context; 178 atomic_long_t free_context;
177 atomic_long_t load_context; 179 atomic_long_t load_user_context;
178 atomic_long_t unload_context; 180 atomic_long_t load_kernel_context;
179 atomic_long_t steal_context; 181 atomic_long_t lock_kernel_context;
182 atomic_long_t unlock_kernel_context;
183 atomic_long_t steal_user_context;
184 atomic_long_t steal_kernel_context;
180 atomic_long_t steal_context_failed; 185 atomic_long_t steal_context_failed;
181 atomic_long_t nopfn; 186 atomic_long_t nopfn;
182 atomic_long_t break_cow; 187 atomic_long_t break_cow;
@@ -193,7 +198,7 @@ struct gru_stats_s {
193 atomic_long_t user_flush_tlb; 198 atomic_long_t user_flush_tlb;
194 atomic_long_t user_unload_context; 199 atomic_long_t user_unload_context;
195 atomic_long_t user_exception; 200 atomic_long_t user_exception;
196 atomic_long_t set_task_slice; 201 atomic_long_t set_context_option;
197 atomic_long_t migrate_check; 202 atomic_long_t migrate_check;
198 atomic_long_t migrated_retarget; 203 atomic_long_t migrated_retarget;
199 atomic_long_t migrated_unload; 204 atomic_long_t migrated_unload;
@@ -207,6 +212,9 @@ struct gru_stats_s {
207 atomic_long_t tlb_dropin_fail_range_active; 212 atomic_long_t tlb_dropin_fail_range_active;
208 atomic_long_t tlb_dropin_fail_idle; 213 atomic_long_t tlb_dropin_fail_idle;
209 atomic_long_t tlb_dropin_fail_fmm; 214 atomic_long_t tlb_dropin_fail_fmm;
215 atomic_long_t tlb_dropin_fail_no_exception;
216 atomic_long_t tlb_dropin_fail_no_exception_war;
217 atomic_long_t tfh_stale_on_fault;
210 atomic_long_t mmu_invalidate_range; 218 atomic_long_t mmu_invalidate_range;
211 atomic_long_t mmu_invalidate_page; 219 atomic_long_t mmu_invalidate_page;
212 atomic_long_t mmu_clear_flush_young; 220 atomic_long_t mmu_clear_flush_young;
@@ -253,7 +261,6 @@ extern struct mcs_op_statistic mcs_op_statistics[mcsop_last];
253 261
254#define OPT_DPRINT 1 262#define OPT_DPRINT 1
255#define OPT_STATS 2 263#define OPT_STATS 2
256#define GRU_QUICKLOOK 4
257 264
258 265
259#define IRQ_GRU 110 /* Starting IRQ number for interrupts */ 266#define IRQ_GRU 110 /* Starting IRQ number for interrupts */
@@ -373,6 +380,7 @@ struct gru_thread_state {
373 required for contest */ 380 required for contest */
374 unsigned char ts_cbr_au_count;/* Number of CBR resources 381 unsigned char ts_cbr_au_count;/* Number of CBR resources
375 required for contest */ 382 required for contest */
383 char ts_cch_req_slice;/* CCH packet slice */
376 char ts_blade; /* If >= 0, migrate context if 384 char ts_blade; /* If >= 0, migrate context if
377 ref from diferent blade */ 385 ref from diferent blade */
378 char ts_force_cch_reload; 386 char ts_force_cch_reload;
@@ -380,6 +388,9 @@ struct gru_thread_state {
380 after migration */ 388 after migration */
381 char ts_cbr_idx[GRU_CBR_AU];/* CBR numbers of each 389 char ts_cbr_idx[GRU_CBR_AU];/* CBR numbers of each
382 allocated CB */ 390 allocated CB */
391 int ts_data_valid; /* Indicates if ts_gdata has
392 valid data */
393 struct gts_statistics ustats; /* User statistics */
383 unsigned long ts_gdata[0]; /* save area for GRU data (CB, 394 unsigned long ts_gdata[0]; /* save area for GRU data (CB,
384 DS, CBE) */ 395 DS, CBE) */
385}; 396};
@@ -452,6 +463,14 @@ struct gru_blade_state {
452 reserved cb */ 463 reserved cb */
453 void *kernel_dsr; /* First kernel 464 void *kernel_dsr; /* First kernel
454 reserved DSR */ 465 reserved DSR */
466 struct rw_semaphore bs_kgts_sema; /* lock for kgts */
467 struct gru_thread_state *bs_kgts; /* GTS for kernel use */
468
469 /* ---- the following are used for managing kernel async GRU CBRs --- */
470 int bs_async_dsr_bytes; /* DSRs for async */
471 int bs_async_cbrs; /* CBRs AU for async */
472 struct completion *bs_async_wq;
473
455 /* ---- the following are protected by the bs_lock spinlock ---- */ 474 /* ---- the following are protected by the bs_lock spinlock ---- */
456 spinlock_t bs_lock; /* lock used for 475 spinlock_t bs_lock; /* lock used for
457 stealing contexts */ 476 stealing contexts */
@@ -552,6 +571,12 @@ struct gru_blade_state {
552 571
553/* Lock hierarchy checking enabled only in emulator */ 572/* Lock hierarchy checking enabled only in emulator */
554 573
574/* 0 = lock failed, 1 = locked */
575static inline int __trylock_handle(void *h)
576{
577 return !test_and_set_bit(1, h);
578}
579
555static inline void __lock_handle(void *h) 580static inline void __lock_handle(void *h)
556{ 581{
557 while (test_and_set_bit(1, h)) 582 while (test_and_set_bit(1, h))
@@ -563,6 +588,11 @@ static inline void __unlock_handle(void *h)
563 clear_bit(1, h); 588 clear_bit(1, h);
564} 589}
565 590
591static inline int trylock_cch_handle(struct gru_context_configuration_handle *cch)
592{
593 return __trylock_handle(cch);
594}
595
566static inline void lock_cch_handle(struct gru_context_configuration_handle *cch) 596static inline void lock_cch_handle(struct gru_context_configuration_handle *cch)
567{ 597{
568 __lock_handle(cch); 598 __lock_handle(cch);
@@ -584,6 +614,11 @@ static inline void unlock_tgh_handle(struct gru_tlb_global_handle *tgh)
584 __unlock_handle(tgh); 614 __unlock_handle(tgh);
585} 615}
586 616
617static inline int is_kernel_context(struct gru_thread_state *gts)
618{
619 return !gts->ts_mm;
620}
621
587/*----------------------------------------------------------------------------- 622/*-----------------------------------------------------------------------------
588 * Function prototypes & externs 623 * Function prototypes & externs
589 */ 624 */
@@ -598,24 +633,32 @@ extern struct gru_thread_state *gru_find_thread_state(struct vm_area_struct
598 *vma, int tsid); 633 *vma, int tsid);
599extern struct gru_thread_state *gru_alloc_thread_state(struct vm_area_struct 634extern struct gru_thread_state *gru_alloc_thread_state(struct vm_area_struct
600 *vma, int tsid); 635 *vma, int tsid);
636extern struct gru_state *gru_assign_gru_context(struct gru_thread_state *gts,
637 int blade);
638extern void gru_load_context(struct gru_thread_state *gts);
639extern void gru_steal_context(struct gru_thread_state *gts, int blade_id);
601extern void gru_unload_context(struct gru_thread_state *gts, int savestate); 640extern void gru_unload_context(struct gru_thread_state *gts, int savestate);
602extern int gru_update_cch(struct gru_thread_state *gts, int force_unload); 641extern int gru_update_cch(struct gru_thread_state *gts, int force_unload);
603extern void gts_drop(struct gru_thread_state *gts); 642extern void gts_drop(struct gru_thread_state *gts);
604extern void gru_tgh_flush_init(struct gru_state *gru); 643extern void gru_tgh_flush_init(struct gru_state *gru);
605extern int gru_kservices_init(struct gru_state *gru); 644extern int gru_kservices_init(void);
606extern void gru_kservices_exit(struct gru_state *gru); 645extern void gru_kservices_exit(void);
646extern int gru_dump_chiplet_request(unsigned long arg);
647extern long gru_get_gseg_statistics(unsigned long arg);
607extern irqreturn_t gru_intr(int irq, void *dev_id); 648extern irqreturn_t gru_intr(int irq, void *dev_id);
608extern int gru_handle_user_call_os(unsigned long address); 649extern int gru_handle_user_call_os(unsigned long address);
609extern int gru_user_flush_tlb(unsigned long arg); 650extern int gru_user_flush_tlb(unsigned long arg);
610extern int gru_user_unload_context(unsigned long arg); 651extern int gru_user_unload_context(unsigned long arg);
611extern int gru_get_exception_detail(unsigned long arg); 652extern int gru_get_exception_detail(unsigned long arg);
612extern int gru_set_task_slice(long address); 653extern int gru_set_context_option(unsigned long address);
613extern int gru_cpu_fault_map_id(void); 654extern int gru_cpu_fault_map_id(void);
614extern struct vm_area_struct *gru_find_vma(unsigned long vaddr); 655extern struct vm_area_struct *gru_find_vma(unsigned long vaddr);
615extern void gru_flush_all_tlb(struct gru_state *gru); 656extern void gru_flush_all_tlb(struct gru_state *gru);
616extern int gru_proc_init(void); 657extern int gru_proc_init(void);
617extern void gru_proc_exit(void); 658extern void gru_proc_exit(void);
618 659
660extern struct gru_thread_state *gru_alloc_gts(struct vm_area_struct *vma,
661 int cbr_au_count, int dsr_au_count, int options, int tsid);
619extern unsigned long gru_reserve_cb_resources(struct gru_state *gru, 662extern unsigned long gru_reserve_cb_resources(struct gru_state *gru,
620 int cbr_au_count, char *cbmap); 663 int cbr_au_count, char *cbmap);
621extern unsigned long gru_reserve_ds_resources(struct gru_state *gru, 664extern unsigned long gru_reserve_ds_resources(struct gru_state *gru,
@@ -624,6 +667,7 @@ extern int gru_fault(struct vm_area_struct *, struct vm_fault *vmf);
624extern struct gru_mm_struct *gru_register_mmu_notifier(void); 667extern struct gru_mm_struct *gru_register_mmu_notifier(void);
625extern void gru_drop_mmu_notifier(struct gru_mm_struct *gms); 668extern void gru_drop_mmu_notifier(struct gru_mm_struct *gms);
626 669
670extern int gru_ktest(unsigned long arg);
627extern void gru_flush_tlb_range(struct gru_mm_struct *gms, unsigned long start, 671extern void gru_flush_tlb_range(struct gru_mm_struct *gms, unsigned long start,
628 unsigned long len); 672 unsigned long len);
629 673