Linux Audio

Check our new training course

Loading...
v3.15
 
 
  1#include <asm/unwind.h>
  2
  3#if __LINUX_ARM_ARCH__ >= 6
  4	.macro	bitop, name, instr
  5ENTRY(	\name		)
  6UNWIND(	.fnstart	)
  7	ands	ip, r1, #3
  8	strneb	r1, [ip]		@ assert word-aligned
  9	mov	r2, #1
 10	and	r3, r0, #31		@ Get bit offset
 11	mov	r0, r0, lsr #5
 12	add	r1, r1, r0, lsl #2	@ Get word offset
 13#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 14	.arch_extension	mp
 15	ALT_SMP(W(pldw)	[r1])
 16	ALT_UP(W(nop))
 17#endif
 18	mov	r3, r2, lsl r3
 191:	ldrex	r2, [r1]
 20	\instr	r2, r2, r3
 21	strex	r0, r2, [r1]
 22	cmp	r0, #0
 23	bne	1b
 24	bx	lr
 25UNWIND(	.fnend		)
 26ENDPROC(\name		)
 27	.endm
 28
 29	.macro	testop, name, instr, store
 30ENTRY(	\name		)
 31UNWIND(	.fnstart	)
 32	ands	ip, r1, #3
 33	strneb	r1, [ip]		@ assert word-aligned
 34	mov	r2, #1
 35	and	r3, r0, #31		@ Get bit offset
 36	mov	r0, r0, lsr #5
 37	add	r1, r1, r0, lsl #2	@ Get word offset
 38	mov	r3, r2, lsl r3		@ create mask
 39	smp_dmb
 40#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 41	.arch_extension	mp
 42	ALT_SMP(W(pldw)	[r1])
 43	ALT_UP(W(nop))
 44#endif
 451:	ldrex	r2, [r1]
 46	ands	r0, r2, r3		@ save old value of bit
 47	\instr	r2, r2, r3		@ toggle bit
 48	strex	ip, r2, [r1]
 49	cmp	ip, #0
 50	bne	1b
 51	smp_dmb
 52	cmp	r0, #0
 53	movne	r0, #1
 542:	bx	lr
 55UNWIND(	.fnend		)
 56ENDPROC(\name		)
 57	.endm
 
 
 
 
 
 
 
 
 58#else
 59	.macro	bitop, name, instr
 60ENTRY(	\name		)
 61UNWIND(	.fnstart	)
 62	ands	ip, r1, #3
 63	strneb	r1, [ip]		@ assert word-aligned
 64	and	r2, r0, #31
 65	mov	r0, r0, lsr #5
 66	mov	r3, #1
 67	mov	r3, r3, lsl r2
 68	save_and_disable_irqs ip
 69	ldr	r2, [r1, r0, lsl #2]
 70	\instr	r2, r2, r3
 71	str	r2, [r1, r0, lsl #2]
 72	restore_irqs ip
 73	mov	pc, lr
 74UNWIND(	.fnend		)
 75ENDPROC(\name		)
 76	.endm
 77
 78/**
 79 * testop - implement a test_and_xxx_bit operation.
 80 * @instr: operational instruction
 81 * @store: store instruction
 82 *
 83 * Note: we can trivially conditionalise the store instruction
 84 * to avoid dirtying the data cache.
 85 */
 86	.macro	testop, name, instr, store
 87ENTRY(	\name		)
 88UNWIND(	.fnstart	)
 89	ands	ip, r1, #3
 90	strneb	r1, [ip]		@ assert word-aligned
 91	and	r3, r0, #31
 92	mov	r0, r0, lsr #5
 93	save_and_disable_irqs ip
 94	ldr	r2, [r1, r0, lsl #2]!
 95	mov	r0, #1
 96	tst	r2, r0, lsl r3
 97	\instr	r2, r2, r0, lsl r3
 98	\store	r2, [r1]
 99	moveq	r0, #0
100	restore_irqs ip
101	mov	pc, lr
102UNWIND(	.fnend		)
103ENDPROC(\name		)
104	.endm
105#endif
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#include <asm/assembler.h>
  3#include <asm/unwind.h>
  4
  5#if __LINUX_ARM_ARCH__ >= 6
  6	.macro	bitop, name, instr
  7ENTRY(	\name		)
  8UNWIND(	.fnstart	)
  9	ands	ip, r1, #3
 10	strbne	r1, [ip]		@ assert word-aligned
 11	mov	r2, #1
 12	and	r3, r0, #31		@ Get bit offset
 13	mov	r0, r0, lsr #5
 14	add	r1, r1, r0, lsl #2	@ Get word offset
 15#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 16	.arch_extension	mp
 17	ALT_SMP(W(pldw)	[r1])
 18	ALT_UP(W(nop))
 19#endif
 20	mov	r3, r2, lsl r3
 211:	ldrex	r2, [r1]
 22	\instr	r2, r2, r3
 23	strex	r0, r2, [r1]
 24	cmp	r0, #0
 25	bne	1b
 26	bx	lr
 27UNWIND(	.fnend		)
 28ENDPROC(\name		)
 29	.endm
 30
 31	.macro	__testop, name, instr, store, barrier
 32ENTRY(	\name		)
 33UNWIND(	.fnstart	)
 34	ands	ip, r1, #3
 35	strbne	r1, [ip]		@ assert word-aligned
 36	mov	r2, #1
 37	and	r3, r0, #31		@ Get bit offset
 38	mov	r0, r0, lsr #5
 39	add	r1, r1, r0, lsl #2	@ Get word offset
 40	mov	r3, r2, lsl r3		@ create mask
 41	\barrier
 42#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 43	.arch_extension	mp
 44	ALT_SMP(W(pldw)	[r1])
 45	ALT_UP(W(nop))
 46#endif
 471:	ldrex	r2, [r1]
 48	ands	r0, r2, r3		@ save old value of bit
 49	\instr	r2, r2, r3		@ toggle bit
 50	strex	ip, r2, [r1]
 51	cmp	ip, #0
 52	bne	1b
 53	\barrier
 54	cmp	r0, #0
 55	movne	r0, #1
 562:	bx	lr
 57UNWIND(	.fnend		)
 58ENDPROC(\name		)
 59	.endm
 60
 61	.macro	testop, name, instr, store
 62	__testop \name, \instr, \store, smp_dmb
 63	.endm
 64
 65	.macro	sync_testop, name, instr, store
 66	__testop \name, \instr, \store, __smp_dmb
 67	.endm
 68#else
 69	.macro	bitop, name, instr
 70ENTRY(	\name		)
 71UNWIND(	.fnstart	)
 72	ands	ip, r1, #3
 73	strbne	r1, [ip]		@ assert word-aligned
 74	and	r2, r0, #31
 75	mov	r0, r0, lsr #5
 76	mov	r3, #1
 77	mov	r3, r3, lsl r2
 78	save_and_disable_irqs ip
 79	ldr	r2, [r1, r0, lsl #2]
 80	\instr	r2, r2, r3
 81	str	r2, [r1, r0, lsl #2]
 82	restore_irqs ip
 83	ret	lr
 84UNWIND(	.fnend		)
 85ENDPROC(\name		)
 86	.endm
 87
 88/**
 89 * testop - implement a test_and_xxx_bit operation.
 90 * @instr: operational instruction
 91 * @store: store instruction
 92 *
 93 * Note: we can trivially conditionalise the store instruction
 94 * to avoid dirtying the data cache.
 95 */
 96	.macro	testop, name, instr, store
 97ENTRY(	\name		)
 98UNWIND(	.fnstart	)
 99	ands	ip, r1, #3
100	strbne	r1, [ip]		@ assert word-aligned
101	and	r3, r0, #31
102	mov	r0, r0, lsr #5
103	save_and_disable_irqs ip
104	ldr	r2, [r1, r0, lsl #2]!
105	mov	r0, #1
106	tst	r2, r0, lsl r3
107	\instr	r2, r2, r0, lsl r3
108	\store	r2, [r1]
109	moveq	r0, #0
110	restore_irqs ip
111	ret	lr
112UNWIND(	.fnend		)
113ENDPROC(\name		)
114	.endm
115#endif