mirror of
				https://git.kernel.org/pub/scm/linux/kernel/git/chenhuacai/linux-loongson
				synced 2025-10-31 06:18:54 +00:00 
			
		
		
		
	 4c7c997886
			
		
	
	
		4c7c997886
		
	
	
	
	
		
			
			This fixes up a typo in the ll/sc based cmpxchg code which apparently wasn't getting a lot of testing due to the swapped old/new pair. With that fixed up, the ll/sc code also starts using it and provides its own atomic_add_unless(). Signed-off-by: Aoi Shinkai <shinkoi2005@gmail.com> Signed-off-by: Paul Mundt <lethal@linux-sh.org>
		
			
				
	
	
		
			72 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			72 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| #ifndef __ASM_SH_CMPXCHG_LLSC_H
 | |
| #define __ASM_SH_CMPXCHG_LLSC_H
 | |
| 
 | |
| static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
 | |
| {
 | |
| 	unsigned long retval;
 | |
| 	unsigned long tmp;
 | |
| 
 | |
| 	__asm__ __volatile__ (
 | |
| 		"1:					\n\t"
 | |
| 		"movli.l	@%2, %0	! xchg_u32	\n\t"
 | |
| 		"mov		%0, %1			\n\t"
 | |
| 		"mov		%3, %0			\n\t"
 | |
| 		"movco.l	%0, @%2			\n\t"
 | |
| 		"bf		1b			\n\t"
 | |
| 		"synco					\n\t"
 | |
| 		: "=&z"(tmp), "=&r" (retval)
 | |
| 		: "r" (m), "r" (val)
 | |
| 		: "t", "memory"
 | |
| 	);
 | |
| 
 | |
| 	return retval;
 | |
| }
 | |
| 
 | |
| static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
 | |
| {
 | |
| 	unsigned long retval;
 | |
| 	unsigned long tmp;
 | |
| 
 | |
| 	__asm__ __volatile__ (
 | |
| 		"1:					\n\t"
 | |
| 		"movli.l	@%2, %0	! xchg_u8	\n\t"
 | |
| 		"mov		%0, %1			\n\t"
 | |
| 		"mov		%3, %0			\n\t"
 | |
| 		"movco.l	%0, @%2			\n\t"
 | |
| 		"bf		1b			\n\t"
 | |
| 		"synco					\n\t"
 | |
| 		: "=&z"(tmp), "=&r" (retval)
 | |
| 		: "r" (m), "r" (val & 0xff)
 | |
| 		: "t", "memory"
 | |
| 	);
 | |
| 
 | |
| 	return retval;
 | |
| }
 | |
| 
 | |
| static inline unsigned long
 | |
| __cmpxchg_u32(volatile int *m, unsigned long old, unsigned long new)
 | |
| {
 | |
| 	unsigned long retval;
 | |
| 	unsigned long tmp;
 | |
| 
 | |
| 	__asm__ __volatile__ (
 | |
| 		"1:						\n\t"
 | |
| 		"movli.l	@%2, %0	! __cmpxchg_u32		\n\t"
 | |
| 		"mov		%0, %1				\n\t"
 | |
| 		"cmp/eq		%1, %3				\n\t"
 | |
| 		"bf		2f				\n\t"
 | |
| 		"mov		%4, %0				\n\t"
 | |
| 		"2:						\n\t"
 | |
| 		"movco.l	%0, @%2				\n\t"
 | |
| 		"bf		1b				\n\t"
 | |
| 		"synco						\n\t"
 | |
| 		: "=&z" (tmp), "=&r" (retval)
 | |
| 		: "r" (m), "r" (old), "r" (new)
 | |
| 		: "t", "memory"
 | |
| 	);
 | |
| 
 | |
| 	return retval;
 | |
| }
 | |
| 
 | |
| #endif /* __ASM_SH_CMPXCHG_LLSC_H */
 |