aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-m68knommu/system.h54
1 files changed, 48 insertions, 6 deletions
diff --git a/include/asm-m68knommu/system.h b/include/asm-m68knommu/system.h
index c341b66c147b..53cbbad0f130 100644
--- a/include/asm-m68knommu/system.h
+++ b/include/asm-m68knommu/system.h
@@ -57,9 +57,18 @@ asmlinkage void resume(void);
57 : "cc", "%d0", "memory") 57 : "cc", "%d0", "memory")
58#define local_irq_disable() __asm__ __volatile__ ( \ 58#define local_irq_disable() __asm__ __volatile__ ( \
59 "move %/sr,%%d0\n\t" \ 59 "move %/sr,%%d0\n\t" \
60 "ori.l #0x0700,%%d0\n\t" \ 60 "ori.l #0x0700,%%d0\n\t" \
61 "move %%d0,%/sr\n" \ 61 "move %%d0,%/sr\n" \
62 : /* no inputs */ \ 62 : /* no outputs */ \
63 : \
64 : "cc", "%d0", "memory")
65/* For spinlocks etc */
66#define local_irq_save(x) __asm__ __volatile__ ( \
67 "movew %%sr,%0\n\t" \
68 "movew #0x0700,%%d0\n\t" \
69 "or.l %0,%%d0\n\t" \
70 "movew %%d0,%/sr" \
71 : "=d" (x) \
63 : \ 72 : \
64 : "cc", "%d0", "memory") 73 : "cc", "%d0", "memory")
65#else 74#else
@@ -75,7 +84,9 @@ asmlinkage void resume(void);
75#define local_irq_restore(x) asm volatile ("movew %0,%%sr": :"d" (x) : "memory") 84#define local_irq_restore(x) asm volatile ("movew %0,%%sr": :"d" (x) : "memory")
76 85
77/* For spinlocks etc */ 86/* For spinlocks etc */
87#ifndef local_irq_save
78#define local_irq_save(x) do { local_save_flags(x); local_irq_disable(); } while (0) 88#define local_irq_save(x) do { local_save_flags(x); local_irq_disable(); } while (0)
89#endif
79 90
80#define irqs_disabled() \ 91#define irqs_disabled() \
81({ \ 92({ \
@@ -234,9 +245,9 @@ cmpxchg(volatile int *p, int old, int new)
234#ifdef CONFIG_COLDFIRE 245#ifdef CONFIG_COLDFIRE
235#if defined(CONFIG_M5272) && defined(CONFIG_NETtel) 246#if defined(CONFIG_M5272) && defined(CONFIG_NETtel)
236/* 247/*
237 * Need to account for broken early mask of 5272 silicon. So don't 248 * Need to account for broken early mask of 5272 silicon. So don't
238 * jump through the original start address. Jump strait into the 249 * jump through the original start address. Jump strait into the
239 * known start of the FLASH code. 250 * known start of the FLASH code.
240 */ 251 */
241#define HARD_RESET_NOW() ({ \ 252#define HARD_RESET_NOW() ({ \
242 asm(" \ 253 asm(" \
@@ -244,7 +255,9 @@ cmpxchg(volatile int *p, int old, int new)
244 jmp 0xf0000400; \ 255 jmp 0xf0000400; \
245 "); \ 256 "); \
246}) 257})
247#elif defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || defined(CONFIG_DISKtel) || defined(CONFIG_SECUREEDGEMP3) || defined(CONFIG_CLEOPATRA) 258#elif defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || \
259 defined(CONFIG_DISKtel) || defined(CONFIG_SECUREEDGEMP3) || \
260 defined(CONFIG_CLEOPATRA)
248#define HARD_RESET_NOW() ({ \ 261#define HARD_RESET_NOW() ({ \
249 asm(" \ 262 asm(" \
250 movew #0x2700, %sr; \ 263 movew #0x2700, %sr; \
@@ -257,6 +270,26 @@ cmpxchg(volatile int *p, int old, int new)
257 jmp (%a0); \ 270 jmp (%a0); \
258 "); \ 271 "); \
259}) 272})
273#elif defined(CONFIG_M5272)
274/*
275 * Retrieve the boot address in flash using CSBR0 and CSOR0
276 * find the reset vector at flash_address + 4 (e.g. 0x400)
277 * remap it in the flash's current location (e.g. 0xf0000400)
278 * and jump there.
279 */
280#define HARD_RESET_NOW() ({ \
281 asm(" \
282 movew #0x2700, %%sr; \
283 move.l %0+0x40,%%d0; \
284 and.l %0+0x44,%%d0; \
285 andi.l #0xfffff000,%%d0; \
286 mov.l %%d0,%%a0; \
287 or.l 4(%%a0),%%d0; \
288 mov.l %%d0,%%a0; \
289 jmp (%%a0);" \
290 : /* No output */ \
291 : "o" (*(char *)MCF_MBAR) ); \
292})
260#elif defined(CONFIG_M528x) 293#elif defined(CONFIG_M528x)
261/* 294/*
262 * The MCF528x has a bit (SOFTRST) in memory (Reset Control Register RCR), 295 * The MCF528x has a bit (SOFTRST) in memory (Reset Control Register RCR),
@@ -270,6 +303,15 @@ cmpxchg(volatile int *p, int old, int new)
270 while(1) \ 303 while(1) \
271 *reset |= (0x01 << 7);\ 304 *reset |= (0x01 << 7);\
272}) 305})
306#elif defined(CONFIG_M523x)
307#define HARD_RESET_NOW() ({ \
308 asm(" \
309 movew #0x2700, %sr; \
310 movel #0x01000000, %sp; \
311 moveal #0x40110000, %a0; \
312 moveb #0x80, (%a0); \
313 "); \
314})
273#else 315#else
274#define HARD_RESET_NOW() ({ \ 316#define HARD_RESET_NOW() ({ \
275 asm(" \ 317 asm(" \