diff options
author | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-06-11 14:41:00 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-06-11 14:41:00 -0400 |
commit | 22353f35c895acb7a8ca27ebdc6397c993b4213e (patch) | |
tree | b1b52ca357ac2e5bb1428dea948bd4d175e40e07 /include | |
parent | 3e2ce4dae98f6b246eaeb12833914d22fd83e31d (diff) | |
parent | ff72b7a6188088976bf7d77d3309a9b2f1716071 (diff) |
Merge branch 'upstream' of git://ftp.linux-mips.org/pub/scm/upstream-linus
* 'upstream' of git://ftp.linux-mips.org/pub/scm/upstream-linus:
[MIPS] Fix smp barriers in test_and_{change,clear,set}_bit
[MIPS] Fix IP27 build
[MIPS] Fix modpost warnings by making start_secondary __cpuinit
[MIPS] SMTC: Fix build error caused by nonsense code.
[MIPS] SMTC: The MT ASE requires to initialize c0_pagemask and c0_wired.
[MIPS] SMTC: Don't continue in set_vi_srs_handler on detected bad arguments.
[MIPS] SMTC: Fix warning.
[MIPS] Wire up utimensat, signalfd, timerfd, eventfd
[MIPS] Atlas: Fix build.
[MIPS] Always install the DSP exception handler.
[MIPS] SMTC: Don't set and restore irqregs ptr from self_ipi.
[MIPS] Fix KMODE for the R3000
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-mips/bitops.h | 51 | ||||
-rw-r--r-- | include/asm-mips/stackframe.h | 52 | ||||
-rw-r--r-- | include/asm-mips/unistd.h | 24 |
3 files changed, 65 insertions, 62 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h index d995413e11fd..ffe245b4258f 100644 --- a/include/asm-mips/bitops.h +++ b/include/asm-mips/bitops.h | |||
@@ -238,10 +238,11 @@ static inline int test_and_set_bit(unsigned long nr, | |||
238 | volatile unsigned long *addr) | 238 | volatile unsigned long *addr) |
239 | { | 239 | { |
240 | unsigned short bit = nr & SZLONG_MASK; | 240 | unsigned short bit = nr & SZLONG_MASK; |
241 | unsigned long res; | ||
241 | 242 | ||
242 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 243 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
243 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 244 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
244 | unsigned long temp, res; | 245 | unsigned long temp; |
245 | 246 | ||
246 | __asm__ __volatile__( | 247 | __asm__ __volatile__( |
247 | " .set mips3 \n" | 248 | " .set mips3 \n" |
@@ -254,11 +255,9 @@ static inline int test_and_set_bit(unsigned long nr, | |||
254 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 255 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
255 | : "r" (1UL << bit), "m" (*m) | 256 | : "r" (1UL << bit), "m" (*m) |
256 | : "memory"); | 257 | : "memory"); |
257 | |||
258 | return res != 0; | ||
259 | } else if (cpu_has_llsc) { | 258 | } else if (cpu_has_llsc) { |
260 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 259 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
261 | unsigned long temp, res; | 260 | unsigned long temp; |
262 | 261 | ||
263 | __asm__ __volatile__( | 262 | __asm__ __volatile__( |
264 | " .set push \n" | 263 | " .set push \n" |
@@ -277,25 +276,22 @@ static inline int test_and_set_bit(unsigned long nr, | |||
277 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 276 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
278 | : "r" (1UL << bit), "m" (*m) | 277 | : "r" (1UL << bit), "m" (*m) |
279 | : "memory"); | 278 | : "memory"); |
280 | |||
281 | return res != 0; | ||
282 | } else { | 279 | } else { |
283 | volatile unsigned long *a = addr; | 280 | volatile unsigned long *a = addr; |
284 | unsigned long mask; | 281 | unsigned long mask; |
285 | int retval; | ||
286 | unsigned long flags; | 282 | unsigned long flags; |
287 | 283 | ||
288 | a += nr >> SZLONG_LOG; | 284 | a += nr >> SZLONG_LOG; |
289 | mask = 1UL << bit; | 285 | mask = 1UL << bit; |
290 | raw_local_irq_save(flags); | 286 | raw_local_irq_save(flags); |
291 | retval = (mask & *a) != 0; | 287 | res = (mask & *a); |
292 | *a |= mask; | 288 | *a |= mask; |
293 | raw_local_irq_restore(flags); | 289 | raw_local_irq_restore(flags); |
294 | |||
295 | return retval; | ||
296 | } | 290 | } |
297 | 291 | ||
298 | smp_mb(); | 292 | smp_mb(); |
293 | |||
294 | return res != 0; | ||
299 | } | 295 | } |
300 | 296 | ||
301 | /* | 297 | /* |
@@ -310,6 +306,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
310 | volatile unsigned long *addr) | 306 | volatile unsigned long *addr) |
311 | { | 307 | { |
312 | unsigned short bit = nr & SZLONG_MASK; | 308 | unsigned short bit = nr & SZLONG_MASK; |
309 | unsigned long res; | ||
313 | 310 | ||
314 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 311 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
315 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 312 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
@@ -327,12 +324,10 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
327 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 324 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
328 | : "r" (1UL << bit), "m" (*m) | 325 | : "r" (1UL << bit), "m" (*m) |
329 | : "memory"); | 326 | : "memory"); |
330 | |||
331 | return res != 0; | ||
332 | #ifdef CONFIG_CPU_MIPSR2 | 327 | #ifdef CONFIG_CPU_MIPSR2 |
333 | } else if (__builtin_constant_p(nr)) { | 328 | } else if (__builtin_constant_p(nr)) { |
334 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 329 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
335 | unsigned long temp, res; | 330 | unsigned long temp; |
336 | 331 | ||
337 | __asm__ __volatile__( | 332 | __asm__ __volatile__( |
338 | "1: " __LL "%0, %1 # test_and_clear_bit \n" | 333 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
@@ -346,12 +341,10 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
346 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 341 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
347 | : "ri" (bit), "m" (*m) | 342 | : "ri" (bit), "m" (*m) |
348 | : "memory"); | 343 | : "memory"); |
349 | |||
350 | return res; | ||
351 | #endif | 344 | #endif |
352 | } else if (cpu_has_llsc) { | 345 | } else if (cpu_has_llsc) { |
353 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 346 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
354 | unsigned long temp, res; | 347 | unsigned long temp; |
355 | 348 | ||
356 | __asm__ __volatile__( | 349 | __asm__ __volatile__( |
357 | " .set push \n" | 350 | " .set push \n" |
@@ -371,25 +364,22 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
371 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 364 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
372 | : "r" (1UL << bit), "m" (*m) | 365 | : "r" (1UL << bit), "m" (*m) |
373 | : "memory"); | 366 | : "memory"); |
374 | |||
375 | return res != 0; | ||
376 | } else { | 367 | } else { |
377 | volatile unsigned long *a = addr; | 368 | volatile unsigned long *a = addr; |
378 | unsigned long mask; | 369 | unsigned long mask; |
379 | int retval; | ||
380 | unsigned long flags; | 370 | unsigned long flags; |
381 | 371 | ||
382 | a += nr >> SZLONG_LOG; | 372 | a += nr >> SZLONG_LOG; |
383 | mask = 1UL << bit; | 373 | mask = 1UL << bit; |
384 | raw_local_irq_save(flags); | 374 | raw_local_irq_save(flags); |
385 | retval = (mask & *a) != 0; | 375 | res = (mask & *a); |
386 | *a &= ~mask; | 376 | *a &= ~mask; |
387 | raw_local_irq_restore(flags); | 377 | raw_local_irq_restore(flags); |
388 | |||
389 | return retval; | ||
390 | } | 378 | } |
391 | 379 | ||
392 | smp_mb(); | 380 | smp_mb(); |
381 | |||
382 | return res != 0; | ||
393 | } | 383 | } |
394 | 384 | ||
395 | /* | 385 | /* |
@@ -404,10 +394,11 @@ static inline int test_and_change_bit(unsigned long nr, | |||
404 | volatile unsigned long *addr) | 394 | volatile unsigned long *addr) |
405 | { | 395 | { |
406 | unsigned short bit = nr & SZLONG_MASK; | 396 | unsigned short bit = nr & SZLONG_MASK; |
397 | unsigned long res; | ||
407 | 398 | ||
408 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 399 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
409 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 400 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
410 | unsigned long temp, res; | 401 | unsigned long temp; |
411 | 402 | ||
412 | __asm__ __volatile__( | 403 | __asm__ __volatile__( |
413 | " .set mips3 \n" | 404 | " .set mips3 \n" |
@@ -420,11 +411,9 @@ static inline int test_and_change_bit(unsigned long nr, | |||
420 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 411 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
421 | : "r" (1UL << bit), "m" (*m) | 412 | : "r" (1UL << bit), "m" (*m) |
422 | : "memory"); | 413 | : "memory"); |
423 | |||
424 | return res != 0; | ||
425 | } else if (cpu_has_llsc) { | 414 | } else if (cpu_has_llsc) { |
426 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 415 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
427 | unsigned long temp, res; | 416 | unsigned long temp; |
428 | 417 | ||
429 | __asm__ __volatile__( | 418 | __asm__ __volatile__( |
430 | " .set push \n" | 419 | " .set push \n" |
@@ -443,24 +432,22 @@ static inline int test_and_change_bit(unsigned long nr, | |||
443 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 432 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
444 | : "r" (1UL << bit), "m" (*m) | 433 | : "r" (1UL << bit), "m" (*m) |
445 | : "memory"); | 434 | : "memory"); |
446 | |||
447 | return res != 0; | ||
448 | } else { | 435 | } else { |
449 | volatile unsigned long *a = addr; | 436 | volatile unsigned long *a = addr; |
450 | unsigned long mask, retval; | 437 | unsigned long mask; |
451 | unsigned long flags; | 438 | unsigned long flags; |
452 | 439 | ||
453 | a += nr >> SZLONG_LOG; | 440 | a += nr >> SZLONG_LOG; |
454 | mask = 1UL << bit; | 441 | mask = 1UL << bit; |
455 | raw_local_irq_save(flags); | 442 | raw_local_irq_save(flags); |
456 | retval = (mask & *a) != 0; | 443 | res = (mask & *a); |
457 | *a ^= mask; | 444 | *a ^= mask; |
458 | raw_local_irq_restore(flags); | 445 | raw_local_irq_restore(flags); |
459 | |||
460 | return retval; | ||
461 | } | 446 | } |
462 | 447 | ||
463 | smp_mb(); | 448 | smp_mb(); |
449 | |||
450 | return res != 0; | ||
464 | } | 451 | } |
465 | 452 | ||
466 | #include <asm-generic/bitops/non-atomic.h> | 453 | #include <asm-generic/bitops/non-atomic.h> |
diff --git a/include/asm-mips/stackframe.h b/include/asm-mips/stackframe.h index 7afa1fdf70ca..ed33366b85b8 100644 --- a/include/asm-mips/stackframe.h +++ b/include/asm-mips/stackframe.h | |||
@@ -17,6 +17,18 @@ | |||
17 | #include <asm/mipsregs.h> | 17 | #include <asm/mipsregs.h> |
18 | #include <asm/asm-offsets.h> | 18 | #include <asm/asm-offsets.h> |
19 | 19 | ||
20 | /* | ||
21 | * For SMTC kernel, global IE should be left set, and interrupts | ||
22 | * controlled exclusively via IXMT. | ||
23 | */ | ||
24 | #ifdef CONFIG_MIPS_MT_SMTC | ||
25 | #define STATMASK 0x1e | ||
26 | #elif defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) | ||
27 | #define STATMASK 0x3f | ||
28 | #else | ||
29 | #define STATMASK 0x1f | ||
30 | #endif | ||
31 | |||
20 | #ifdef CONFIG_MIPS_MT_SMTC | 32 | #ifdef CONFIG_MIPS_MT_SMTC |
21 | #include <asm/mipsmtregs.h> | 33 | #include <asm/mipsmtregs.h> |
22 | #endif /* CONFIG_MIPS_MT_SMTC */ | 34 | #endif /* CONFIG_MIPS_MT_SMTC */ |
@@ -236,10 +248,10 @@ | |||
236 | .set reorder | 248 | .set reorder |
237 | .set noat | 249 | .set noat |
238 | mfc0 a0, CP0_STATUS | 250 | mfc0 a0, CP0_STATUS |
239 | ori a0, 0x1f | ||
240 | xori a0, 0x1f | ||
241 | mtc0 a0, CP0_STATUS | ||
242 | li v1, 0xff00 | 251 | li v1, 0xff00 |
252 | ori a0, STATMASK | ||
253 | xori a0, STATMASK | ||
254 | mtc0 a0, CP0_STATUS | ||
243 | and a0, v1 | 255 | and a0, v1 |
244 | LONG_L v0, PT_STATUS(sp) | 256 | LONG_L v0, PT_STATUS(sp) |
245 | nor v1, $0, v1 | 257 | nor v1, $0, v1 |
@@ -249,10 +261,6 @@ | |||
249 | LONG_L $31, PT_R31(sp) | 261 | LONG_L $31, PT_R31(sp) |
250 | LONG_L $28, PT_R28(sp) | 262 | LONG_L $28, PT_R28(sp) |
251 | LONG_L $25, PT_R25(sp) | 263 | LONG_L $25, PT_R25(sp) |
252 | #ifdef CONFIG_64BIT | ||
253 | LONG_L $8, PT_R8(sp) | ||
254 | LONG_L $9, PT_R9(sp) | ||
255 | #endif | ||
256 | LONG_L $7, PT_R7(sp) | 264 | LONG_L $7, PT_R7(sp) |
257 | LONG_L $6, PT_R6(sp) | 265 | LONG_L $6, PT_R6(sp) |
258 | LONG_L $5, PT_R5(sp) | 266 | LONG_L $5, PT_R5(sp) |
@@ -273,16 +281,6 @@ | |||
273 | .endm | 281 | .endm |
274 | 282 | ||
275 | #else | 283 | #else |
276 | /* | ||
277 | * For SMTC kernel, global IE should be left set, and interrupts | ||
278 | * controlled exclusively via IXMT. | ||
279 | */ | ||
280 | |||
281 | #ifdef CONFIG_MIPS_MT_SMTC | ||
282 | #define STATMASK 0x1e | ||
283 | #else | ||
284 | #define STATMASK 0x1f | ||
285 | #endif | ||
286 | .macro RESTORE_SOME | 284 | .macro RESTORE_SOME |
287 | .set push | 285 | .set push |
288 | .set reorder | 286 | .set reorder |
@@ -385,9 +383,9 @@ | |||
385 | .macro CLI | 383 | .macro CLI |
386 | #if !defined(CONFIG_MIPS_MT_SMTC) | 384 | #if !defined(CONFIG_MIPS_MT_SMTC) |
387 | mfc0 t0, CP0_STATUS | 385 | mfc0 t0, CP0_STATUS |
388 | li t1, ST0_CU0 | 0x1f | 386 | li t1, ST0_CU0 | STATMASK |
389 | or t0, t1 | 387 | or t0, t1 |
390 | xori t0, 0x1f | 388 | xori t0, STATMASK |
391 | mtc0 t0, CP0_STATUS | 389 | mtc0 t0, CP0_STATUS |
392 | #else /* CONFIG_MIPS_MT_SMTC */ | 390 | #else /* CONFIG_MIPS_MT_SMTC */ |
393 | /* | 391 | /* |
@@ -420,9 +418,9 @@ | |||
420 | .macro STI | 418 | .macro STI |
421 | #if !defined(CONFIG_MIPS_MT_SMTC) | 419 | #if !defined(CONFIG_MIPS_MT_SMTC) |
422 | mfc0 t0, CP0_STATUS | 420 | mfc0 t0, CP0_STATUS |
423 | li t1, ST0_CU0 | 0x1f | 421 | li t1, ST0_CU0 | STATMASK |
424 | or t0, t1 | 422 | or t0, t1 |
425 | xori t0, 0x1e | 423 | xori t0, STATMASK & ~1 |
426 | mtc0 t0, CP0_STATUS | 424 | mtc0 t0, CP0_STATUS |
427 | #else /* CONFIG_MIPS_MT_SMTC */ | 425 | #else /* CONFIG_MIPS_MT_SMTC */ |
428 | /* | 426 | /* |
@@ -451,7 +449,8 @@ | |||
451 | .endm | 449 | .endm |
452 | 450 | ||
453 | /* | 451 | /* |
454 | * Just move to kernel mode and leave interrupts as they are. | 452 | * Just move to kernel mode and leave interrupts as they are. Note |
453 | * for the R3000 this means copying the previous enable from IEp. | ||
455 | * Set cp0 enable bit as sign that we're running on the kernel stack | 454 | * Set cp0 enable bit as sign that we're running on the kernel stack |
456 | */ | 455 | */ |
457 | .macro KMODE | 456 | .macro KMODE |
@@ -482,9 +481,14 @@ | |||
482 | move ra, t0 | 481 | move ra, t0 |
483 | #endif /* CONFIG_MIPS_MT_SMTC */ | 482 | #endif /* CONFIG_MIPS_MT_SMTC */ |
484 | mfc0 t0, CP0_STATUS | 483 | mfc0 t0, CP0_STATUS |
485 | li t1, ST0_CU0 | 0x1e | 484 | li t1, ST0_CU0 | (STATMASK & ~1) |
485 | #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) | ||
486 | andi t2, t0, ST0_IEP | ||
487 | srl t2, 2 | ||
488 | or t0, t2 | ||
489 | #endif | ||
486 | or t0, t1 | 490 | or t0, t1 |
487 | xori t0, 0x1e | 491 | xori t0, STATMASK & ~1 |
488 | mtc0 t0, CP0_STATUS | 492 | mtc0 t0, CP0_STATUS |
489 | #ifdef CONFIG_MIPS_MT_SMTC | 493 | #ifdef CONFIG_MIPS_MT_SMTC |
490 | _ehb | 494 | _ehb |
diff --git a/include/asm-mips/unistd.h b/include/asm-mips/unistd.h index 91c306fcfb72..59d6fec8fbff 100644 --- a/include/asm-mips/unistd.h +++ b/include/asm-mips/unistd.h | |||
@@ -336,16 +336,20 @@ | |||
336 | #define __NR_epoll_pwait (__NR_Linux + 313) | 336 | #define __NR_epoll_pwait (__NR_Linux + 313) |
337 | #define __NR_ioprio_set (__NR_Linux + 314) | 337 | #define __NR_ioprio_set (__NR_Linux + 314) |
338 | #define __NR_ioprio_get (__NR_Linux + 315) | 338 | #define __NR_ioprio_get (__NR_Linux + 315) |
339 | #define __NR_utimensat (__NR_Linux + 316) | ||
340 | #define __NR_signalfd (__NR_Linux + 317) | ||
341 | #define __NR_timerfd (__NR_Linux + 318) | ||
342 | #define __NR_eventfd (__NR_Linux + 319) | ||
339 | 343 | ||
340 | /* | 344 | /* |
341 | * Offset of the last Linux o32 flavoured syscall | 345 | * Offset of the last Linux o32 flavoured syscall |
342 | */ | 346 | */ |
343 | #define __NR_Linux_syscalls 315 | 347 | #define __NR_Linux_syscalls 319 |
344 | 348 | ||
345 | #endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */ | 349 | #endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */ |
346 | 350 | ||
347 | #define __NR_O32_Linux 4000 | 351 | #define __NR_O32_Linux 4000 |
348 | #define __NR_O32_Linux_syscalls 315 | 352 | #define __NR_O32_Linux_syscalls 319 |
349 | 353 | ||
350 | #if _MIPS_SIM == _MIPS_SIM_ABI64 | 354 | #if _MIPS_SIM == _MIPS_SIM_ABI64 |
351 | 355 | ||
@@ -628,16 +632,20 @@ | |||
628 | #define __NR_epoll_pwait (__NR_Linux + 272) | 632 | #define __NR_epoll_pwait (__NR_Linux + 272) |
629 | #define __NR_ioprio_set (__NR_Linux + 273) | 633 | #define __NR_ioprio_set (__NR_Linux + 273) |
630 | #define __NR_ioprio_get (__NR_Linux + 274) | 634 | #define __NR_ioprio_get (__NR_Linux + 274) |
635 | #define __NR_utimensat (__NR_Linux + 275) | ||
636 | #define __NR_signalfd (__NR_Linux + 276) | ||
637 | #define __NR_timerfd (__NR_Linux + 277) | ||
638 | #define __NR_eventfd (__NR_Linux + 278) | ||
631 | 639 | ||
632 | /* | 640 | /* |
633 | * Offset of the last Linux 64-bit flavoured syscall | 641 | * Offset of the last Linux 64-bit flavoured syscall |
634 | */ | 642 | */ |
635 | #define __NR_Linux_syscalls 274 | 643 | #define __NR_Linux_syscalls 278 |
636 | 644 | ||
637 | #endif /* _MIPS_SIM == _MIPS_SIM_ABI64 */ | 645 | #endif /* _MIPS_SIM == _MIPS_SIM_ABI64 */ |
638 | 646 | ||
639 | #define __NR_64_Linux 5000 | 647 | #define __NR_64_Linux 5000 |
640 | #define __NR_64_Linux_syscalls 274 | 648 | #define __NR_64_Linux_syscalls 278 |
641 | 649 | ||
642 | #if _MIPS_SIM == _MIPS_SIM_NABI32 | 650 | #if _MIPS_SIM == _MIPS_SIM_NABI32 |
643 | 651 | ||
@@ -924,16 +932,20 @@ | |||
924 | #define __NR_epoll_pwait (__NR_Linux + 276) | 932 | #define __NR_epoll_pwait (__NR_Linux + 276) |
925 | #define __NR_ioprio_set (__NR_Linux + 277) | 933 | #define __NR_ioprio_set (__NR_Linux + 277) |
926 | #define __NR_ioprio_get (__NR_Linux + 278) | 934 | #define __NR_ioprio_get (__NR_Linux + 278) |
935 | #define __NR_utimensat (__NR_Linux + 279) | ||
936 | #define __NR_signalfd (__NR_Linux + 280) | ||
937 | #define __NR_timerfd (__NR_Linux + 281) | ||
938 | #define __NR_eventfd (__NR_Linux + 282) | ||
927 | 939 | ||
928 | /* | 940 | /* |
929 | * Offset of the last N32 flavoured syscall | 941 | * Offset of the last N32 flavoured syscall |
930 | */ | 942 | */ |
931 | #define __NR_Linux_syscalls 278 | 943 | #define __NR_Linux_syscalls 282 |
932 | 944 | ||
933 | #endif /* _MIPS_SIM == _MIPS_SIM_NABI32 */ | 945 | #endif /* _MIPS_SIM == _MIPS_SIM_NABI32 */ |
934 | 946 | ||
935 | #define __NR_N32_Linux 6000 | 947 | #define __NR_N32_Linux 6000 |
936 | #define __NR_N32_Linux_syscalls 278 | 948 | #define __NR_N32_Linux_syscalls 282 |
937 | 949 | ||
938 | #ifdef __KERNEL__ | 950 | #ifdef __KERNEL__ |
939 | 951 | ||