diff options
author | Rich Felker <dalias@libc.org> | 2016-04-25 21:58:03 +0000 |
---|---|---|
committer | Rich Felker <dalias@libc.org> | 2016-08-05 03:29:36 +0000 |
commit | 00b73d8d1b7131da03aec73011a7286f566fe87f (patch) | |
tree | 5a557b49eeaa1717bba015c730c5a56e8d084985 /arch/sh | |
parent | 2b47d54ed41c33baf5825185168b493317c5572f (diff) |
sh: add working futex atomic ops on userspace addresses for smp
The version of futex.h in asm-generic should really be adapted to do
the same thing so that this hideous code does not have to be
duplicated per-arch.
Signed-off-by: Rich Felker <dalias@libc.org>
Diffstat (limited to 'arch/sh')
-rw-r--r-- | arch/sh/include/asm/futex-cas.h | 34 | ||||
-rw-r--r-- | arch/sh/include/asm/futex-irq.h | 86 | ||||
-rw-r--r-- | arch/sh/include/asm/futex-llsc.h | 41 | ||||
-rw-r--r-- | arch/sh/include/asm/futex.h | 97 |
4 files changed, 134 insertions, 124 deletions
diff --git a/arch/sh/include/asm/futex-cas.h b/arch/sh/include/asm/futex-cas.h new file mode 100644 index 000000000000..267cb7a5f101 --- /dev/null +++ b/arch/sh/include/asm/futex-cas.h @@ -0,0 +1,34 @@ +#ifndef __ASM_SH_FUTEX_CAS_H +#define __ASM_SH_FUTEX_CAS_H + +static inline int atomic_futex_op_cmpxchg_inatomic(u32 *uval, + u32 __user *uaddr, + u32 oldval, u32 newval) +{ + int err = 0; + __asm__ __volatile__( + "1:\n\t" + "cas.l %2, %1, @r0\n" + "2:\n\t" +#ifdef CONFIG_MMU + ".section .fixup,\"ax\"\n" + "3:\n\t" + "mov.l 4f, %0\n\t" + "jmp @%0\n\t" + " mov %3, %0\n\t" + ".balign 4\n" + "4: .long 2b\n\t" + ".previous\n" + ".section __ex_table,\"a\"\n\t" + ".long 1b, 3b\n\t" + ".previous" +#endif + :"+r" (err), "+r" (newval) + :"r" (oldval), "i" (-EFAULT), "z" (uaddr) + :"t", "memory"); + if (err) return err; + *uval = newval; + return 0; +} + +#endif /* __ASM_SH_FUTEX_CAS_H */ diff --git a/arch/sh/include/asm/futex-irq.h b/arch/sh/include/asm/futex-irq.h index 63d33129ea23..ab01dbee0a82 100644 --- a/arch/sh/include/asm/futex-irq.h +++ b/arch/sh/include/asm/futex-irq.h @@ -1,92 +1,6 @@ #ifndef __ASM_SH_FUTEX_IRQ_H #define __ASM_SH_FUTEX_IRQ_H - -static inline int atomic_futex_op_xchg_set(int oparg, u32 __user *uaddr, - int *oldval) -{ - unsigned long flags; - int ret; - - local_irq_save(flags); - - ret = get_user(*oldval, uaddr); - if (!ret) - ret = put_user(oparg, uaddr); - - local_irq_restore(flags); - - return ret; -} - -static inline int atomic_futex_op_xchg_add(int oparg, u32 __user *uaddr, - int *oldval) -{ - unsigned long flags; - int ret; - - local_irq_save(flags); - - ret = get_user(*oldval, uaddr); - if (!ret) - ret = put_user(*oldval + oparg, uaddr); - - local_irq_restore(flags); - - return ret; -} - -static inline int atomic_futex_op_xchg_or(int oparg, u32 __user *uaddr, - int *oldval) -{ - unsigned long flags; - int ret; - - local_irq_save(flags); - - ret = get_user(*oldval, uaddr); - if (!ret) - ret = put_user(*oldval | oparg, uaddr); - - local_irq_restore(flags); - - return ret; -} - -static inline int atomic_futex_op_xchg_and(int oparg, u32 __user *uaddr, - int *oldval) -{ - unsigned long flags; - int ret; - - local_irq_save(flags); - - ret = get_user(*oldval, uaddr); - if (!ret) - ret = put_user(*oldval & oparg, uaddr); - - local_irq_restore(flags); - - return ret; -} - -static inline int atomic_futex_op_xchg_xor(int oparg, u32 __user *uaddr, - int *oldval) -{ - unsigned long flags; - int ret; - - local_irq_save(flags); - - ret = get_user(*oldval, uaddr); - if (!ret) - ret = put_user(*oldval ^ oparg, uaddr); - - local_irq_restore(flags); - - return ret; -} - static inline int atomic_futex_op_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newval) diff --git a/arch/sh/include/asm/futex-llsc.h b/arch/sh/include/asm/futex-llsc.h new file mode 100644 index 000000000000..23591703bec0 --- /dev/null +++ b/arch/sh/include/asm/futex-llsc.h @@ -0,0 +1,41 @@ +#ifndef __ASM_SH_FUTEX_LLSC_H +#define __ASM_SH_FUTEX_LLSC_H + +static inline int atomic_futex_op_cmpxchg_inatomic(u32 *uval, + u32 __user *uaddr, + u32 oldval, u32 newval) +{ + int err = 0; + __asm__ __volatile__( + "synco\n" + "1:\n\t" + "movli.l @%2, r0\n\t" + "mov r0, %1\n\t" + "cmp/eq %1, %4\n\t" + "bf 2f\n\t" + "mov %5, r0\n\t" + "movco.l r0, @%2\n\t" + "bf 1b\n" + "2:\n\t" + "synco\n\t" +#ifdef CONFIG_MMU + ".section .fixup,\"ax\"\n" + "3:\n\t" + "mov.l 4f, %0\n\t" + "jmp @%0\n\t" + " mov %3, %0\n\t" + ".balign 4\n" + "4: .long 2b\n\t" + ".previous\n" + ".section __ex_table,\"a\"\n\t" + ".long 1b, 3b\n\t" + ".previous" +#endif + :"+r" (err), "=&r" (*uval) + :"r" (uaddr), "i" (-EFAULT), "r" (oldval), "r" (newval) + :"t", "memory", "r0"); + if (err) return err; + return 0; +} + +#endif /* __ASM_SH_FUTEX_LLSC_H */ diff --git a/arch/sh/include/asm/futex.h b/arch/sh/include/asm/futex.h index 7be39a646fbd..d0078747d308 100644 --- a/arch/sh/include/asm/futex.h +++ b/arch/sh/include/asm/futex.h @@ -7,16 +7,34 @@ #include <linux/uaccess.h> #include <asm/errno.h> -/* XXX: UP variants, fix for SH-4A and SMP.. */ +#if !defined(CONFIG_SMP) #include <asm/futex-irq.h> +#elif defined(CONFIG_CPU_J2) +#include <asm/futex-cas.h> +#elif defined(CONFIG_CPU_SH4A) +#include <asm/futex-llsc.h> +#else +#error SMP not supported on this configuration. +#endif + +static inline int +futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, + u32 oldval, u32 newval) +{ + if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) + return -EFAULT; + + return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval); +} static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) { int op = (encoded_op >> 28) & 7; int cmp = (encoded_op >> 24) & 15; - int oparg = (encoded_op << 8) >> 20; - int cmparg = (encoded_op << 20) >> 20; - int oldval = 0, ret; + u32 oparg = (encoded_op << 8) >> 20; + u32 cmparg = (encoded_op << 20) >> 20; + u32 oldval, newval, prev; + int ret; if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) oparg = 1 << oparg; @@ -26,26 +44,39 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) pagefault_disable(); - switch (op) { - case FUTEX_OP_SET: - ret = atomic_futex_op_xchg_set(oparg, uaddr, &oldval); - break; - case FUTEX_OP_ADD: - ret = atomic_futex_op_xchg_add(oparg, uaddr, &oldval); - break; - case FUTEX_OP_OR: - ret = atomic_futex_op_xchg_or(oparg, uaddr, &oldval); - break; - case FUTEX_OP_ANDN: - ret = atomic_futex_op_xchg_and(~oparg, uaddr, &oldval); - break; - case FUTEX_OP_XOR: - ret = atomic_futex_op_xchg_xor(oparg, uaddr, &oldval); - break; - default: - ret = -ENOSYS; - break; - } + do { + if (op == FUTEX_OP_SET) + ret = oldval = 0; + else + ret = get_user(oldval, uaddr); + + if (ret) break; + + switch (op) { + case FUTEX_OP_SET: + newval = oparg; + break; + case FUTEX_OP_ADD: + newval = oldval + oparg; + break; + case FUTEX_OP_OR: + newval = oldval | oparg; + break; + case FUTEX_OP_ANDN: + newval = oldval & ~oparg; + break; + case FUTEX_OP_XOR: + newval = oldval ^ oparg; + break; + default: + ret = -ENOSYS; + break; + } + + if (ret) break; + + ret = futex_atomic_cmpxchg_inatomic(&prev, uaddr, oldval, newval); + } while (!ret && prev != oldval); pagefault_enable(); @@ -53,10 +84,10 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) switch (cmp) { case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; - case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; - case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; - case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; - case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; + case FUTEX_OP_CMP_LT: ret = ((int)oldval < (int)cmparg); break; + case FUTEX_OP_CMP_GE: ret = ((int)oldval >= (int)cmparg); break; + case FUTEX_OP_CMP_LE: ret = ((int)oldval <= (int)cmparg); break; + case FUTEX_OP_CMP_GT: ret = ((int)oldval > (int)cmparg); break; default: ret = -ENOSYS; } } @@ -64,15 +95,5 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) return ret; } -static inline int -futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, - u32 oldval, u32 newval) -{ - if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) - return -EFAULT; - - return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval); -} - #endif /* __KERNEL__ */ #endif /* __ASM_SH_FUTEX_H */ |