Add EF_MIPS_ARCH_32R2 and EF_MIPS_ARCH_64R2 for tagging of R2 binaries.
[powerpc.git] / include / asm-mips / system.h
index 6663efd..36f34d8 100644 (file)
@@ -17,6 +17,7 @@
 
 #include <asm/addrspace.h>
 #include <asm/cpu-features.h>
+#include <asm/dsp.h>
 #include <asm/ptrace.h>
 #include <asm/war.h>
 #include <asm/interrupt.h>
@@ -154,9 +155,13 @@ extern asmlinkage void *resume(void *last, void *next, void *next_ti);
 
 struct task_struct;
 
-#define switch_to(prev,next,last) \
-do { \
-       (last) = resume(prev, next, next->thread_info); \
+#define switch_to(prev,next,last)                                      \
+do {                                                                   \
+       if (cpu_has_dsp)                                                \
+               __save_dsp(prev);                                       \
+       (last) = resume(prev, next, next->thread_info);                 \
+       if (cpu_has_dsp)                                                \
+               __restore_dsp(current);                                 \
 } while(0)
 
 #define ROT_IN_PIECES                                                  \
@@ -171,14 +176,18 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                unsigned long dummy;
 
                __asm__ __volatile__(
+               "       .set    mips3                                   \n"
                "1:     ll      %0, %3                  # xchg_u32      \n"
+               "       .set    mips0                                   \n"
                "       move    %2, %z4                                 \n"
+               "       .set    mips3                                   \n"
                "       sc      %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                ROT_IN_PIECES
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
+               "       .set    mips0                                   \n"
                : "=&r" (retval), "=m" (*m), "=&r" (dummy)
                : "R" (*m), "Jr" (val)
                : "memory");
@@ -186,13 +195,17 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                unsigned long dummy;
 
                __asm__ __volatile__(
+               "       .set    mips3                                   \n"
                "1:     ll      %0, %3                  # xchg_u32      \n"
+               "       .set    mips0                                   \n"
                "       move    %2, %z4                                 \n"
+               "       .set    mips3                                   \n"
                "       sc      %2, %1                                  \n"
                "       beqz    %2, 1b                                  \n"
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
+               "       .set    mips0                                   \n"
                : "=&r" (retval), "=m" (*m), "=&r" (dummy)
                : "R" (*m), "Jr" (val)
                : "memory");
@@ -217,6 +230,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                unsigned long dummy;
 
                __asm__ __volatile__(
+               "       .set    mips3                                   \n"
                "1:     lld     %0, %3                  # xchg_u64      \n"
                "       move    %2, %z4                                 \n"
                "       scd     %2, %1                                  \n"
@@ -225,6 +239,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
+               "       .set    mips0                                   \n"
                : "=&r" (retval), "=m" (*m), "=&r" (dummy)
                : "R" (*m), "Jr" (val)
                : "memory");
@@ -232,6 +247,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                unsigned long dummy;
 
                __asm__ __volatile__(
+               "       .set    mips3                                   \n"
                "1:     lld     %0, %3                  # xchg_u64      \n"
                "       move    %2, %z4                                 \n"
                "       scd     %2, %1                                  \n"
@@ -239,6 +255,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
+               "       .set    mips0                                   \n"
                : "=&r" (retval), "=m" (*m), "=&r" (dummy)
                : "R" (*m), "Jr" (val)
                : "memory");
@@ -286,7 +303,9 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
 
        if (cpu_has_llsc && R10000_LLSC_WAR) {
                __asm__ __volatile__(
+               "       .set    push                                    \n"
                "       .set    noat                                    \n"
+               "       .set    mips3                                   \n"
                "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
                "       bne     %0, %z3, 2f                             \n"
                "       move    $1, %z4                                 \n"
@@ -297,13 +316,15 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
                "       sync                                            \n"
 #endif
                "2:                                                     \n"
-               "       .set    at                                      \n"
+               "       .set    pop                                     \n"
                : "=&r" (retval), "=m" (*m)
                : "R" (*m), "Jr" (old), "Jr" (new)
                : "memory");
        } else if (cpu_has_llsc) {
                __asm__ __volatile__(
+               "       .set    push                                    \n"
                "       .set    noat                                    \n"
+               "       .set    mips3                                   \n"
                "1:     ll      %0, %2                  # __cmpxchg_u32 \n"
                "       bne     %0, %z3, 2f                             \n"
                "       move    $1, %z4                                 \n"
@@ -313,7 +334,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
                "       sync                                            \n"
 #endif
                "2:                                                     \n"
-               "       .set    at                                      \n"
+               "       .set    pop                                     \n"
                : "=&r" (retval), "=m" (*m)
                : "R" (*m), "Jr" (old), "Jr" (new)
                : "memory");
@@ -338,7 +359,9 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
 
        if (cpu_has_llsc) {
                __asm__ __volatile__(
+               "       .set    push                                    \n"
                "       .set    noat                                    \n"
+               "       .set    mips3                                   \n"
                "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
                "       bne     %0, %z3, 2f                             \n"
                "       move    $1, %z4                                 \n"
@@ -349,13 +372,15 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
                "       sync                                            \n"
 #endif
                "2:                                                     \n"
-               "       .set    at                                      \n"
+               "       .set    pop                                     \n"
                : "=&r" (retval), "=m" (*m)
                : "R" (*m), "Jr" (old), "Jr" (new)
                : "memory");
        } else if (cpu_has_llsc) {
                __asm__ __volatile__(
+               "       .set    push                                    \n"
                "       .set    noat                                    \n"
+               "       .set    mips3                                   \n"
                "1:     lld     %0, %2                  # __cmpxchg_u64 \n"
                "       bne     %0, %z3, 2f                             \n"
                "       move    $1, %z4                                 \n"
@@ -365,7 +390,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
                "       sync                                            \n"
 #endif
                "2:                                                     \n"
-               "       .set    at                                      \n"
+               "       .set    pop                                     \n"
                : "=&r" (retval), "=m" (*m)
                : "R" (*m), "Jr" (old), "Jr" (new)
                : "memory");
@@ -410,7 +435,7 @@ extern void *set_except_vector(int n, void *addr);
 extern void per_cpu_trap_init(void);
 
 extern NORET_TYPE void __die(const char *, struct pt_regs *, const char *file,
-       const char *func, unsigned long line);
+       const char *func, unsigned long line) ATTRIB_NORET;
 extern void __die_if_kernel(const char *, struct pt_regs *, const char *file,
        const char *func, unsigned long line);