mirror of
				git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
				synced 2025-09-04 20:19:47 +08:00 
			
		
		
		
	 a08971e948
			
		
	
	
		a08971e948
		
	
	
	
	
		
			
			Move access_ok() in and pagefault_enable()/pagefault_disable() out. Mechanical conversion only - some instances don't really need a separate access_ok() at all (e.g. the ones only using get_user()/put_user(), or architectures where access_ok() is always true); we'll deal with that in followups. Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
		
			
				
	
	
		
			105 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			105 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| /* SPDX-License-Identifier: GPL-2.0 */
 | |
| #ifndef __ASM_OPENRISC_FUTEX_H
 | |
| #define __ASM_OPENRISC_FUTEX_H
 | |
| 
 | |
| #ifdef __KERNEL__
 | |
| 
 | |
| #include <linux/futex.h>
 | |
| #include <linux/uaccess.h>
 | |
| #include <asm/errno.h>
 | |
| 
 | |
| #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
 | |
| ({								\
 | |
| 	__asm__ __volatile__ (					\
 | |
| 		"1:	l.lwa	%0, %2			\n"	\
 | |
| 			insn				"\n"	\
 | |
| 		"2:	l.swa	%2, %1			\n"	\
 | |
| 		"	l.bnf	1b			\n"	\
 | |
| 		"	 l.ori	%1, r0, 0		\n"	\
 | |
| 		"3:					\n"	\
 | |
| 		".section .fixup,\"ax\"			\n"	\
 | |
| 		"4:	l.j	3b			\n"	\
 | |
| 		"	 l.addi	%1, r0, %3		\n"	\
 | |
| 		".previous				\n"	\
 | |
| 		".section __ex_table,\"a\"		\n"	\
 | |
| 		".word	1b,4b,2b,4b			\n"	\
 | |
| 		".previous				\n"	\
 | |
| 		: "=&r" (oldval), "=&r" (ret), "+m" (*uaddr)	\
 | |
| 		: "i" (-EFAULT), "r" (oparg)			\
 | |
| 		: "cc", "memory"				\
 | |
| 		);						\
 | |
| })
 | |
| 
 | |
| static inline int
 | |
| arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
 | |
| {
 | |
| 	int oldval = 0, ret;
 | |
| 
 | |
| 	if (!access_ok(uaddr, sizeof(u32)))
 | |
| 		return -EFAULT;
 | |
| 
 | |
| 	switch (op) {
 | |
| 	case FUTEX_OP_SET:
 | |
| 		__futex_atomic_op("l.or %1,%4,%4", ret, oldval, uaddr, oparg);
 | |
| 		break;
 | |
| 	case FUTEX_OP_ADD:
 | |
| 		__futex_atomic_op("l.add %1,%0,%4", ret, oldval, uaddr, oparg);
 | |
| 		break;
 | |
| 	case FUTEX_OP_OR:
 | |
| 		__futex_atomic_op("l.or %1,%0,%4", ret, oldval, uaddr, oparg);
 | |
| 		break;
 | |
| 	case FUTEX_OP_ANDN:
 | |
| 		__futex_atomic_op("l.and %1,%0,%4", ret, oldval, uaddr, ~oparg);
 | |
| 		break;
 | |
| 	case FUTEX_OP_XOR:
 | |
| 		__futex_atomic_op("l.xor %1,%0,%4", ret, oldval, uaddr, oparg);
 | |
| 		break;
 | |
| 	default:
 | |
| 		ret = -ENOSYS;
 | |
| 	}
 | |
| 
 | |
| 	if (!ret)
 | |
| 		*oval = oldval;
 | |
| 
 | |
| 	return ret;
 | |
| }
 | |
| 
 | |
| static inline int
 | |
| futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 | |
| 			      u32 oldval, u32 newval)
 | |
| {
 | |
| 	int ret = 0;
 | |
| 	u32 prev;
 | |
| 
 | |
| 	if (!access_ok(uaddr, sizeof(u32)))
 | |
| 		return -EFAULT;
 | |
| 
 | |
| 	__asm__ __volatile__ (				\
 | |
| 		"1:	l.lwa	%1, %2		\n"	\
 | |
| 		"	l.sfeq	%1, %3		\n"	\
 | |
| 		"	l.bnf	3f		\n"	\
 | |
| 		"	 l.nop			\n"	\
 | |
| 		"2:	l.swa	%2, %4		\n"	\
 | |
| 		"	l.bnf	1b		\n"	\
 | |
| 		"	 l.nop			\n"	\
 | |
| 		"3:				\n"	\
 | |
| 		".section .fixup,\"ax\"		\n"	\
 | |
| 		"4:	l.j	3b		\n"	\
 | |
| 		"	 l.addi	%0, r0, %5	\n"	\
 | |
| 		".previous			\n"	\
 | |
| 		".section __ex_table,\"a\"	\n"	\
 | |
| 		".word	1b,4b,2b,4b		\n"	\
 | |
| 		".previous			\n"	\
 | |
| 		: "+r" (ret), "=&r" (prev), "+m" (*uaddr) \
 | |
| 		: "r" (oldval), "r" (newval), "i" (-EFAULT) \
 | |
| 		: "cc",	"memory"			\
 | |
| 		);
 | |
| 
 | |
| 	*uval = prev;
 | |
| 	return ret;
 | |
| }
 | |
| 
 | |
| #endif /* __KERNEL__ */
 | |
| 
 | |
| #endif /* __ASM_OPENRISC_FUTEX_H */
 |