Linux Audio

Check our new training course

Loading...
v3.1
 
 
 
 
 1#if __LINUX_ARM_ARCH__ >= 6
 2	.macro	bitop, instr
 
 
 3	ands	ip, r1, #3
 4	strneb	r1, [ip]		@ assert word-aligned
 5	mov	r2, #1
 6	and	r3, r0, #31		@ Get bit offset
 7	mov	r0, r0, lsr #5
 8	add	r1, r1, r0, lsl #2	@ Get word offset
 
 
 
 
 
 9	mov	r3, r2, lsl r3
101:	ldrex	r2, [r1]
11	\instr	r2, r2, r3
12	strex	r0, r2, [r1]
13	cmp	r0, #0
14	bne	1b
15	bx	lr
 
 
16	.endm
17
18	.macro	testop, instr, store
 
 
19	ands	ip, r1, #3
20	strneb	r1, [ip]		@ assert word-aligned
21	mov	r2, #1
22	and	r3, r0, #31		@ Get bit offset
23	mov	r0, r0, lsr #5
24	add	r1, r1, r0, lsl #2	@ Get word offset
25	mov	r3, r2, lsl r3		@ create mask
26	smp_dmb
 
 
 
 
 
271:	ldrex	r2, [r1]
28	ands	r0, r2, r3		@ save old value of bit
29	\instr	r2, r2, r3		@ toggle bit
30	strex	ip, r2, [r1]
31	cmp	ip, #0
32	bne	1b
33	smp_dmb
34	cmp	r0, #0
35	movne	r0, #1
362:	bx	lr
 
 
37	.endm
38#else
39	.macro	bitop, instr
 
 
40	ands	ip, r1, #3
41	strneb	r1, [ip]		@ assert word-aligned
42	and	r2, r0, #31
43	mov	r0, r0, lsr #5
44	mov	r3, #1
45	mov	r3, r3, lsl r2
46	save_and_disable_irqs ip
47	ldr	r2, [r1, r0, lsl #2]
48	\instr	r2, r2, r3
49	str	r2, [r1, r0, lsl #2]
50	restore_irqs ip
51	mov	pc, lr
 
 
52	.endm
53
54/**
55 * testop - implement a test_and_xxx_bit operation.
56 * @instr: operational instruction
57 * @store: store instruction
58 *
59 * Note: we can trivially conditionalise the store instruction
60 * to avoid dirtying the data cache.
61 */
62	.macro	testop, instr, store
 
 
63	ands	ip, r1, #3
64	strneb	r1, [ip]		@ assert word-aligned
65	and	r3, r0, #31
66	mov	r0, r0, lsr #5
67	save_and_disable_irqs ip
68	ldr	r2, [r1, r0, lsl #2]!
69	mov	r0, #1
70	tst	r2, r0, lsl r3
71	\instr	r2, r2, r0, lsl r3
72	\store	r2, [r1]
73	moveq	r0, #0
74	restore_irqs ip
75	mov	pc, lr
 
 
76	.endm
77#endif
v4.17
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#include <asm/assembler.h>
  3#include <asm/unwind.h>
  4
  5#if __LINUX_ARM_ARCH__ >= 6
  6	.macro	bitop, name, instr
  7ENTRY(	\name		)
  8UNWIND(	.fnstart	)
  9	ands	ip, r1, #3
 10	strneb	r1, [ip]		@ assert word-aligned
 11	mov	r2, #1
 12	and	r3, r0, #31		@ Get bit offset
 13	mov	r0, r0, lsr #5
 14	add	r1, r1, r0, lsl #2	@ Get word offset
 15#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 16	.arch_extension	mp
 17	ALT_SMP(W(pldw)	[r1])
 18	ALT_UP(W(nop))
 19#endif
 20	mov	r3, r2, lsl r3
 211:	ldrex	r2, [r1]
 22	\instr	r2, r2, r3
 23	strex	r0, r2, [r1]
 24	cmp	r0, #0
 25	bne	1b
 26	bx	lr
 27UNWIND(	.fnend		)
 28ENDPROC(\name		)
 29	.endm
 30
 31	.macro	testop, name, instr, store
 32ENTRY(	\name		)
 33UNWIND(	.fnstart	)
 34	ands	ip, r1, #3
 35	strneb	r1, [ip]		@ assert word-aligned
 36	mov	r2, #1
 37	and	r3, r0, #31		@ Get bit offset
 38	mov	r0, r0, lsr #5
 39	add	r1, r1, r0, lsl #2	@ Get word offset
 40	mov	r3, r2, lsl r3		@ create mask
 41	smp_dmb
 42#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
 43	.arch_extension	mp
 44	ALT_SMP(W(pldw)	[r1])
 45	ALT_UP(W(nop))
 46#endif
 471:	ldrex	r2, [r1]
 48	ands	r0, r2, r3		@ save old value of bit
 49	\instr	r2, r2, r3		@ toggle bit
 50	strex	ip, r2, [r1]
 51	cmp	ip, #0
 52	bne	1b
 53	smp_dmb
 54	cmp	r0, #0
 55	movne	r0, #1
 562:	bx	lr
 57UNWIND(	.fnend		)
 58ENDPROC(\name		)
 59	.endm
 60#else
 61	.macro	bitop, name, instr
 62ENTRY(	\name		)
 63UNWIND(	.fnstart	)
 64	ands	ip, r1, #3
 65	strneb	r1, [ip]		@ assert word-aligned
 66	and	r2, r0, #31
 67	mov	r0, r0, lsr #5
 68	mov	r3, #1
 69	mov	r3, r3, lsl r2
 70	save_and_disable_irqs ip
 71	ldr	r2, [r1, r0, lsl #2]
 72	\instr	r2, r2, r3
 73	str	r2, [r1, r0, lsl #2]
 74	restore_irqs ip
 75	ret	lr
 76UNWIND(	.fnend		)
 77ENDPROC(\name		)
 78	.endm
 79
 80/**
 81 * testop - implement a test_and_xxx_bit operation.
 82 * @instr: operational instruction
 83 * @store: store instruction
 84 *
 85 * Note: we can trivially conditionalise the store instruction
 86 * to avoid dirtying the data cache.
 87 */
 88	.macro	testop, name, instr, store
 89ENTRY(	\name		)
 90UNWIND(	.fnstart	)
 91	ands	ip, r1, #3
 92	strneb	r1, [ip]		@ assert word-aligned
 93	and	r3, r0, #31
 94	mov	r0, r0, lsr #5
 95	save_and_disable_irqs ip
 96	ldr	r2, [r1, r0, lsl #2]!
 97	mov	r0, #1
 98	tst	r2, r0, lsl r3
 99	\instr	r2, r2, r0, lsl r3
100	\store	r2, [r1]
101	moveq	r0, #0
102	restore_irqs ip
103	ret	lr
104UNWIND(	.fnend		)
105ENDPROC(\name		)
106	.endm
107#endif