diff options
Diffstat (limited to 'include/asm-xtensa/xtensa/cacheattrasm.h')
| -rw-r--r-- | include/asm-xtensa/xtensa/cacheattrasm.h | 432 |
1 files changed, 0 insertions, 432 deletions
diff --git a/include/asm-xtensa/xtensa/cacheattrasm.h b/include/asm-xtensa/xtensa/cacheattrasm.h deleted file mode 100644 index 1c3e117b359..00000000000 --- a/include/asm-xtensa/xtensa/cacheattrasm.h +++ /dev/null | |||
| @@ -1,432 +0,0 @@ | |||
| 1 | #ifndef XTENSA_CACHEATTRASM_H | ||
| 2 | #define XTENSA_CACHEATTRASM_H | ||
| 3 | |||
| 4 | /* | ||
| 5 | * THIS FILE IS GENERATED -- DO NOT MODIFY BY HAND | ||
| 6 | * | ||
| 7 | * include/asm-xtensa/xtensa/cacheattrasm.h -- assembler-specific | ||
| 8 | * CACHEATTR register related definitions that depend on CORE | ||
| 9 | * configuration. | ||
| 10 | * | ||
| 11 | * This file is subject to the terms and conditions of the GNU General Public | ||
| 12 | * License. See the file "COPYING" in the main directory of this archive | ||
| 13 | * for more details. | ||
| 14 | * | ||
| 15 | * Copyright (C) 2002 Tensilica Inc. | ||
| 16 | */ | ||
| 17 | |||
| 18 | |||
| 19 | #include <xtensa/coreasm.h> | ||
| 20 | |||
| 21 | |||
| 22 | /* | ||
| 23 | * This header file defines assembler macros of the form: | ||
| 24 | * <x>cacheattr_<func> | ||
| 25 | * where: | ||
| 26 | * <x> is 'i', 'd' or absent for instruction, data | ||
| 27 | * or both caches; and | ||
| 28 | * <func> indicates the function of the macro. | ||
| 29 | * | ||
| 30 | * The following functions are defined: | ||
| 31 | * | ||
| 32 | * icacheattr_get | ||
| 33 | * Reads I-cache CACHEATTR into a2 (clobbers a3-a5). | ||
| 34 | * | ||
| 35 | * dcacheattr_get | ||
| 36 | * Reads D-cache CACHEATTR into a2 (clobbers a3-a5). | ||
| 37 | * (Note: for configs with a real CACHEATTR register, the | ||
| 38 | * above two macros are identical.) | ||
| 39 | * | ||
| 40 | * cacheattr_set | ||
| 41 | * Writes both I-cache and D-cache CACHEATTRs from a2 (a3-a8 clobbered). | ||
| 42 | * Works even when changing one's own code's attributes. | ||
| 43 | * | ||
| 44 | * icacheattr_is_enabled label | ||
| 45 | * Branches to \label if I-cache appears to have been enabled | ||
| 46 | * (eg. if CACHEATTR contains a cache-enabled attribute). | ||
| 47 | * (clobbers a2-a5,SAR) | ||
| 48 | * | ||
| 49 | * dcacheattr_is_enabled label | ||
| 50 | * Branches to \label if D-cache appears to have been enabled | ||
| 51 | * (eg. if CACHEATTR contains a cache-enabled attribute). | ||
| 52 | * (clobbers a2-a5,SAR) | ||
| 53 | * | ||
| 54 | * cacheattr_is_enabled label | ||
| 55 | * Branches to \label if either I-cache or D-cache appears to have been enabled | ||
| 56 | * (eg. if CACHEATTR contains a cache-enabled attribute). | ||
| 57 | * (clobbers a2-a5,SAR) | ||
| 58 | * | ||
| 59 | * The following macros are only defined under certain conditions: | ||
| 60 | * | ||
| 61 | * icacheattr_set (if XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR) | ||
| 62 | * Writes I-cache CACHEATTR from a2 (a3-a8 clobbered). | ||
| 63 | * | ||
| 64 | * dcacheattr_set (if XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR) | ||
| 65 | * Writes D-cache CACHEATTR from a2 (a3-a8 clobbered). | ||
| 66 | */ | ||
| 67 | |||
| 68 | |||
| 69 | |||
| 70 | /*************************** GENERIC -- ALL CACHES ***************************/ | ||
| 71 | |||
| 72 | /* | ||
| 73 | * _cacheattr_get | ||
| 74 | * | ||
| 75 | * (Internal macro.) | ||
| 76 | * Returns value of CACHEATTR register (or closest equivalent) in a2. | ||
| 77 | * | ||
| 78 | * Entry: | ||
| 79 | * (none) | ||
| 80 | * Exit: | ||
| 81 | * a2 value read from CACHEATTR | ||
| 82 | * a3-a5 clobbered (temporaries) | ||
| 83 | */ | ||
| 84 | .macro _cacheattr_get tlb | ||
| 85 | #if XCHAL_HAVE_CACHEATTR | ||
| 86 | rsr a2, CACHEATTR | ||
| 87 | #elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 88 | // We have a config that "mimics" CACHEATTR using a simplified | ||
| 89 | // "MMU" composed of a single statically-mapped way. | ||
| 90 | // DTLB and ITLB are independent, so there's no single | ||
| 91 | // cache attribute that can describe both. So for now | ||
| 92 | // just return the DTLB state. | ||
| 93 | movi a5, 0xE0000000 | ||
| 94 | movi a2, 0 | ||
| 95 | movi a3, 0 | ||
| 96 | 1: add a3, a3, a5 // next segment | ||
| 97 | r&tlb&1 a4, a3 // get PPN+CA of segment at 0xE0000000, 0xC0000000, ..., 0 | ||
| 98 | dsync // interlock??? | ||
| 99 | slli a2, a2, 4 | ||
| 100 | extui a4, a4, 0, 4 // extract CA | ||
| 101 | or a2, a2, a4 | ||
| 102 | bnez a3, 1b | ||
| 103 | #else | ||
| 104 | // This macro isn't applicable to arbitrary MMU configurations. | ||
| 105 | // Just return zero. | ||
| 106 | movi a2, 0 | ||
| 107 | #endif | ||
| 108 | .endm | ||
| 109 | |||
| 110 | .macro icacheattr_get | ||
| 111 | _cacheattr_get itlb | ||
| 112 | .endm | ||
| 113 | |||
| 114 | .macro dcacheattr_get | ||
| 115 | _cacheattr_get dtlb | ||
| 116 | .endm | ||
| 117 | |||
| 118 | |||
| 119 | #define XCHAL_CACHEATTR_ALL_BYPASS 0x22222222 /* default (powerup/reset) value of CACHEATTR, all BYPASS | ||
| 120 | mode (ie. disabled/bypassed caches) */ | ||
| 121 | |||
| 122 | #if XCHAL_HAVE_CACHEATTR || XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 123 | |||
| 124 | #define XCHAL_FCA_ENAMASK 0x001A /* bitmap of fetch attributes that require enabled icache */ | ||
| 125 | #define XCHAL_LCA_ENAMASK 0x0003 /* bitmap of load attributes that require enabled dcache */ | ||
| 126 | #define XCHAL_SCA_ENAMASK 0x0003 /* bitmap of store attributes that require enabled dcache */ | ||
| 127 | #define XCHAL_LSCA_ENAMASK (XCHAL_LCA_ENAMASK|XCHAL_SCA_ENAMASK) /* l/s attrs requiring enabled dcache */ | ||
| 128 | #define XCHAL_ALLCA_ENAMASK (XCHAL_FCA_ENAMASK|XCHAL_LSCA_ENAMASK) /* all attrs requiring enabled caches */ | ||
| 129 | |||
| 130 | /* | ||
| 131 | * _cacheattr_is_enabled | ||
| 132 | * | ||
| 133 | * (Internal macro.) | ||
| 134 | * Branches to \label if CACHEATTR in a2 indicates an enabled | ||
| 135 | * cache, using mask in a3. | ||
| 136 | * | ||
| 137 | * Parameters: | ||
| 138 | * label where to branch to if cache is enabled | ||
| 139 | * Entry: | ||
| 140 | * a2 contains CACHEATTR value used to determine whether | ||
| 141 | * caches are enabled | ||
| 142 | * a3 16-bit constant where each bit correspond to | ||
| 143 | * one of the 16 possible CA values (in a CACHEATTR mask); | ||
| 144 | * CA values that indicate the cache is enabled | ||
| 145 | * have their corresponding bit set in this mask | ||
| 146 | * (eg. use XCHAL_xCA_ENAMASK , above) | ||
| 147 | * Exit: | ||
| 148 | * a2,a4,a5 clobbered | ||
| 149 | * SAR clobbered | ||
| 150 | */ | ||
| 151 | .macro _cacheattr_is_enabled label | ||
| 152 | movi a4, 8 // loop 8 times | ||
| 153 | .Lcaife\@: | ||
| 154 | extui a5, a2, 0, 4 // get CA nibble | ||
| 155 | ssr a5 // index into mask according to CA... | ||
| 156 | srl a5, a3 // ...and get CA's mask bit in a5 bit 0 | ||
| 157 | bbsi.l a5, 0, \label // if CA indicates cache enabled, jump to label | ||
| 158 | srli a2, a2, 4 // next nibble | ||
| 159 | addi a4, a4, -1 | ||
| 160 | bnez a4, .Lcaife\@ // loop for each nibble | ||
| 161 | .endm | ||
| 162 | |||
| 163 | #else /* XCHAL_HAVE_CACHEATTR || XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR */ | ||
| 164 | .macro _cacheattr_is_enabled label | ||
| 165 | j \label // macro not applicable, assume caches always enabled | ||
| 166 | .endm | ||
| 167 | #endif /* XCHAL_HAVE_CACHEATTR || XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR */ | ||
| 168 | |||
| 169 | |||
| 170 | |||
| 171 | /* | ||
| 172 | * icacheattr_is_enabled | ||
| 173 | * | ||
| 174 | * Branches to \label if I-cache is enabled. | ||
| 175 | * | ||
| 176 | * Parameters: | ||
| 177 | * label where to branch to if icache is enabled | ||
| 178 | * Entry: | ||
| 179 | * (none) | ||
| 180 | * Exit: | ||
| 181 | * a2-a5, SAR clobbered (temporaries) | ||
| 182 | */ | ||
| 183 | .macro icacheattr_is_enabled label | ||
| 184 | #if XCHAL_HAVE_CACHEATTR || XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 185 | icacheattr_get | ||
| 186 | movi a3, XCHAL_FCA_ENAMASK | ||
| 187 | #endif | ||
| 188 | _cacheattr_is_enabled \label | ||
| 189 | .endm | ||
| 190 | |||
| 191 | /* | ||
| 192 | * dcacheattr_is_enabled | ||
| 193 | * | ||
| 194 | * Branches to \label if D-cache is enabled. | ||
| 195 | * | ||
| 196 | * Parameters: | ||
| 197 | * label where to branch to if dcache is enabled | ||
| 198 | * Entry: | ||
| 199 | * (none) | ||
| 200 | * Exit: | ||
| 201 | * a2-a5, SAR clobbered (temporaries) | ||
| 202 | */ | ||
| 203 | .macro dcacheattr_is_enabled label | ||
| 204 | #if XCHAL_HAVE_CACHEATTR || XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 205 | dcacheattr_get | ||
| 206 | movi a3, XCHAL_LSCA_ENAMASK | ||
| 207 | #endif | ||
| 208 | _cacheattr_is_enabled \label | ||
| 209 | .endm | ||
| 210 | |||
| 211 | /* | ||
| 212 | * cacheattr_is_enabled | ||
| 213 | * | ||
| 214 | * Branches to \label if either I-cache or D-cache is enabled. | ||
| 215 | * | ||
| 216 | * Parameters: | ||
| 217 | * label where to branch to if a cache is enabled | ||
| 218 | * Entry: | ||
| 219 | * (none) | ||
| 220 | * Exit: | ||
| 221 | * a2-a5, SAR clobbered (temporaries) | ||
| 222 | */ | ||
| 223 | .macro cacheattr_is_enabled label | ||
| 224 | #if XCHAL_HAVE_CACHEATTR | ||
| 225 | rsr a2, CACHEATTR | ||
| 226 | movi a3, XCHAL_ALLCA_ENAMASK | ||
| 227 | #elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 228 | icacheattr_get | ||
| 229 | movi a3, XCHAL_FCA_ENAMASK | ||
| 230 | _cacheattr_is_enabled \label | ||
| 231 | dcacheattr_get | ||
| 232 | movi a3, XCHAL_LSCA_ENAMASK | ||
| 233 | #endif | ||
| 234 | _cacheattr_is_enabled \label | ||
| 235 | .endm | ||
| 236 | |||
| 237 | |||
| 238 | |||
| 239 | /* | ||
| 240 | * The ISA does not have a defined way to change the | ||
| 241 | * instruction cache attributes of the running code, | ||
| 242 | * ie. of the memory area that encloses the current PC. | ||
| 243 | * However, each micro-architecture (or class of | ||
| 244 | * configurations within a micro-architecture) | ||
| 245 | * provides a way to deal with this issue. | ||
| 246 | * | ||
| 247 | * Here are a few macros used to implement the relevant | ||
| 248 | * approach taken. | ||
| 249 | */ | ||
| 250 | |||
| 251 | #if XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 252 | // We have a config that "mimics" CACHEATTR using a simplified | ||
| 253 | // "MMU" composed of a single statically-mapped way. | ||
| 254 | |||
| 255 | /* | ||
| 256 | * icacheattr_set | ||
| 257 | * | ||
| 258 | * Entry: | ||
| 259 | * a2 cacheattr value to set | ||
| 260 | * Exit: | ||
| 261 | * a2 unchanged | ||
| 262 | * a3-a8 clobbered (temporaries) | ||
| 263 | */ | ||
| 264 | .macro icacheattr_set | ||
| 265 | |||
| 266 | movi a5, 0xE0000000 // mask of upper 3 bits | ||
| 267 | movi a6, 3f // PC where ITLB is set | ||
| 268 | movi a3, 0 // start at region 0 (0 .. 7) | ||
| 269 | and a6, a6, a5 // upper 3 bits of local PC area | ||
| 270 | mov a7, a2 // copy a2 so it doesn't get clobbered | ||
| 271 | j 3f | ||
| 272 | |||
| 273 | # if XCHAL_HAVE_XLT_CACHEATTR | ||
| 274 | // Can do translations, use generic method: | ||
| 275 | 1: sub a6, a3, a5 // address of some other segment | ||
| 276 | ritlb1 a8, a6 // save its PPN+CA | ||
| 277 | dsync // interlock?? | ||
| 278 | witlb a4, a6 // make it translate to this code area | ||
| 279 | movi a6, 5f // where to jump into it | ||
| 280 | isync | ||
| 281 | sub a6, a6, a5 // adjust jump address within that other segment | ||
| 282 | jx a6 | ||
| 283 | |||
| 284 | // Note that in the following code snippet, which runs at a different virtual | ||
| 285 | // address than it is assembled for, we avoid using literals (eg. via movi/l32r) | ||
| 286 | // just in case literals end up in a different 512 MB segment, and we avoid | ||
| 287 | // instructions that rely on the current PC being what is expected. | ||
| 288 | // | ||
| 289 | .align 4 | ||
| 290 | _j 6f // this is at label '5' minus 4 bytes | ||
| 291 | .align 4 | ||
| 292 | 5: witlb a4, a3 // we're in other segment, now can write previous segment's CA | ||
| 293 | isync | ||
| 294 | add a6, a6, a5 // back to previous segment | ||
| 295 | addi a6, a6, -4 // next jump label | ||
| 296 | jx a6 | ||
| 297 | |||
| 298 | 6: sub a6, a3, a5 // address of some other segment | ||
| 299 | witlb a8, a6 // restore PPN+CA of other segment | ||
| 300 | mov a6, a3 // restore a6 | ||
| 301 | isync | ||
| 302 | # else /* XCHAL_HAVE_XLT_CACHEATTR */ | ||
| 303 | // Use micro-architecture specific method. | ||
| 304 | // The following 4-instruction sequence is aligned such that | ||
| 305 | // it all fits within a single I-cache line. Sixteen byte | ||
| 306 | // alignment is sufficient for this (using XCHAL_ICACHE_LINESIZE | ||
| 307 | // actually causes problems because that can be greater than | ||
| 308 | // the alignment of the reset vector, where this macro is often | ||
| 309 | // invoked, which would cause the linker to align the reset | ||
| 310 | // vector code away from the reset vector!!). | ||
| 311 | .align 16 /*XCHAL_ICACHE_LINESIZE*/ | ||
| 312 | 1: _witlb a4, a3 // write wired PTE (CA, no PPN) of 512MB segment to ITLB | ||
| 313 | _isync | ||
| 314 | nop | ||
| 315 | nop | ||
| 316 | # endif /* XCHAL_HAVE_XLT_CACHEATTR */ | ||
| 317 | beq a3, a5, 4f // done? | ||
| 318 | |||
| 319 | // Note that in the WITLB loop, we don't do any load/stores | ||
| 320 | // (may not be an issue here, but it is important in the DTLB case). | ||
| 321 | 2: srli a7, a7, 4 // next CA | ||
| 322 | sub a3, a3, a5 // next segment (add 0x20000000) | ||
| 323 | 3: | ||
| 324 | # if XCHAL_HAVE_XLT_CACHEATTR /* if have translation, preserve it */ | ||
| 325 | ritlb1 a8, a3 // get current PPN+CA of segment | ||
| 326 | dsync // interlock??? | ||
| 327 | extui a4, a7, 0, 4 // extract CA to set | ||
| 328 | srli a8, a8, 4 // clear CA but keep PPN ... | ||
| 329 | slli a8, a8, 4 // ... | ||
| 330 | add a4, a4, a8 // combine new CA with PPN to preserve | ||
| 331 | # else | ||
| 332 | extui a4, a7, 0, 4 // extract CA | ||
| 333 | # endif | ||
| 334 | beq a3, a6, 1b // current PC's region? if so, do it in a safe way | ||
| 335 | witlb a4, a3 // write wired PTE (CA [+PPN]) of 512MB segment to ITLB | ||
| 336 | bne a3, a5, 2b | ||
| 337 | isync // make sure all ifetch changes take effect | ||
| 338 | 4: | ||
| 339 | .endm // icacheattr_set | ||
| 340 | |||
| 341 | |||
| 342 | /* | ||
| 343 | * dcacheattr_set | ||
| 344 | * | ||
| 345 | * Entry: | ||
| 346 | * a2 cacheattr value to set | ||
| 347 | * Exit: | ||
| 348 | * a2 unchanged | ||
| 349 | * a3-a8 clobbered (temporaries) | ||
| 350 | */ | ||
| 351 | |||
| 352 | .macro dcacheattr_set | ||
| 353 | |||
| 354 | movi a5, 0xE0000000 // mask of upper 3 bits | ||
| 355 | movi a3, 0 // start at region 0 (0 .. 7) | ||
| 356 | mov a7, a2 // copy a2 so it doesn't get clobbered | ||
| 357 | j 3f | ||
| 358 | // Note that in the WDTLB loop, we don't do any load/stores | ||
| 359 | // (including implicit l32r via movi) because it isn't safe. | ||
| 360 | 2: srli a7, a7, 4 // next CA | ||
| 361 | sub a3, a3, a5 // next segment (add 0x20000000) | ||
| 362 | 3: | ||
| 363 | # if XCHAL_HAVE_XLT_CACHEATTR /* if have translation, preserve it */ | ||
| 364 | rdtlb1 a8, a3 // get current PPN+CA of segment | ||
| 365 | dsync // interlock??? | ||
| 366 | extui a4, a7, 0, 4 // extract CA to set | ||
| 367 | srli a8, a8, 4 // clear CA but keep PPN ... | ||
| 368 | slli a8, a8, 4 // ... | ||
| 369 | add a4, a4, a8 // combine new CA with PPN to preserve | ||
| 370 | # else | ||
| 371 | extui a4, a7, 0, 4 // extract CA to set | ||
| 372 | # endif | ||
| 373 | wdtlb a4, a3 // write wired PTE (CA [+PPN]) of 512MB segment to DTLB | ||
| 374 | bne a3, a5, 2b | ||
| 375 | dsync // make sure all data path changes take effect | ||
| 376 | .endm // dcacheattr_set | ||
| 377 | |||
| 378 | #endif /* XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR */ | ||
| 379 | |||
| 380 | |||
| 381 | |||
| 382 | /* | ||
| 383 | * cacheattr_set | ||
| 384 | * | ||
| 385 | * Macro that sets the current CACHEATTR safely | ||
| 386 | * (both i and d) according to the current contents of a2. | ||
| 387 | * It works even when changing the cache attributes of | ||
| 388 | * the currently running code. | ||
| 389 | * | ||
| 390 | * Entry: | ||
| 391 | * a2 cacheattr value to set | ||
| 392 | * Exit: | ||
| 393 | * a2 unchanged | ||
| 394 | * a3-a8 clobbered (temporaries) | ||
| 395 | */ | ||
| 396 | .macro cacheattr_set | ||
| 397 | |||
| 398 | #if XCHAL_HAVE_CACHEATTR | ||
| 399 | # if XCHAL_ICACHE_LINESIZE < 4 | ||
| 400 | // No i-cache, so can always safely write to CACHEATTR: | ||
| 401 | wsr a2, CACHEATTR | ||
| 402 | # else | ||
| 403 | // The Athens micro-architecture, when using the old | ||
| 404 | // exception architecture option (ie. with the CACHEATTR register) | ||
| 405 | // allows changing the cache attributes of the running code | ||
| 406 | // using the following exact sequence aligned to be within | ||
| 407 | // an instruction cache line. (NOTE: using XCHAL_ICACHE_LINESIZE | ||
| 408 | // alignment actually causes problems because that can be greater | ||
| 409 | // than the alignment of the reset vector, where this macro is often | ||
| 410 | // invoked, which would cause the linker to align the reset | ||
| 411 | // vector code away from the reset vector!!). | ||
| 412 | j 1f | ||
| 413 | .align 16 /*XCHAL_ICACHE_LINESIZE*/ // align to within an I-cache line | ||
| 414 | 1: _wsr a2, CACHEATTR | ||
| 415 | _isync | ||
| 416 | nop | ||
| 417 | nop | ||
| 418 | # endif | ||
| 419 | #elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR | ||
| 420 | // DTLB and ITLB are independent, but to keep semantics | ||
| 421 | // of this macro we simply write to both. | ||
| 422 | icacheattr_set | ||
| 423 | dcacheattr_set | ||
| 424 | #else | ||
| 425 | // This macro isn't applicable to arbitrary MMU configurations. | ||
| 426 | // Do nothing in this case. | ||
| 427 | #endif | ||
| 428 | .endm | ||
| 429 | |||
| 430 | |||
| 431 | #endif /*XTENSA_CACHEATTRASM_H*/ | ||
| 432 | |||
