Linux Audio

Check our new training course

Loading...
v5.4
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef __ASM_ARM_WORD_AT_A_TIME_H
 3#define __ASM_ARM_WORD_AT_A_TIME_H
 4
 5#ifndef __ARMEB__
 6
 7/*
 8 * Little-endian word-at-a-time zero byte handling.
 9 * Heavily based on the x86 algorithm.
10 */
11#include <linux/kernel.h>
12
13struct word_at_a_time {
14	const unsigned long one_bits, high_bits;
15};
16
17#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
18
19static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
20				     const struct word_at_a_time *c)
21{
22	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
23	*bits = mask;
24	return mask;
25}
26
27#define prep_zero_mask(a, bits, c) (bits)
28
29static inline unsigned long create_zero_mask(unsigned long bits)
30{
31	bits = (bits - 1) & ~bits;
32	return bits >> 7;
33}
34
35static inline unsigned long find_zero(unsigned long mask)
36{
37	unsigned long ret;
38
39#if __LINUX_ARM_ARCH__ >= 5
40	/* We have clz available. */
41	ret = fls(mask) >> 3;
42#else
43	/* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
44	ret = (0x0ff0001 + mask) >> 23;
45	/* Fix the 1 for 00 case */
46	ret &= mask;
47#endif
48
49	return ret;
50}
51
52#define zero_bytemask(mask) (mask)
53
54#else	/* __ARMEB__ */
55#include <asm-generic/word-at-a-time.h>
56#endif
57
58#ifdef CONFIG_DCACHE_WORD_ACCESS
59
60/*
61 * Load an unaligned word from kernel space.
62 *
63 * In the (very unlikely) case of the word being a page-crosser
64 * and the next page not being mapped, take the exception and
65 * return zeroes in the non-existing part.
66 */
67static inline unsigned long load_unaligned_zeropad(const void *addr)
68{
69	unsigned long ret, offset;
70
71	/* Load word from unaligned pointer addr */
72	asm(
73	"1:	ldr	%0, [%2]\n"
74	"2:\n"
75	"	.pushsection .text.fixup,\"ax\"\n"
76	"	.align 2\n"
77	"3:	and	%1, %2, #0x3\n"
78	"	bic	%2, %2, #0x3\n"
79	"	ldr	%0, [%2]\n"
80	"	lsl	%1, %1, #0x3\n"
81#ifndef __ARMEB__
82	"	lsr	%0, %0, %1\n"
83#else
84	"	lsl	%0, %0, %1\n"
85#endif
86	"	b	2b\n"
87	"	.popsection\n"
88	"	.pushsection __ex_table,\"a\"\n"
89	"	.align	3\n"
90	"	.long	1b, 3b\n"
91	"	.popsection"
92	: "=&r" (ret), "=&r" (offset)
93	: "r" (addr), "Qo" (*(unsigned long *)addr));
94
95	return ret;
96}
97
98#endif	/* DCACHE_WORD_ACCESS */
99#endif /* __ASM_ARM_WORD_AT_A_TIME_H */
v6.2
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef __ASM_ARM_WORD_AT_A_TIME_H
 3#define __ASM_ARM_WORD_AT_A_TIME_H
 4
 5#ifndef __ARMEB__
 6
 7/*
 8 * Little-endian word-at-a-time zero byte handling.
 9 * Heavily based on the x86 algorithm.
10 */
11#include <linux/kernel.h>
12
13struct word_at_a_time {
14	const unsigned long one_bits, high_bits;
15};
16
17#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
18
19static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
20				     const struct word_at_a_time *c)
21{
22	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
23	*bits = mask;
24	return mask;
25}
26
27#define prep_zero_mask(a, bits, c) (bits)
28
29static inline unsigned long create_zero_mask(unsigned long bits)
30{
31	bits = (bits - 1) & ~bits;
32	return bits >> 7;
33}
34
35static inline unsigned long find_zero(unsigned long mask)
36{
37	unsigned long ret;
38
39#if __LINUX_ARM_ARCH__ >= 5
40	/* We have clz available. */
41	ret = fls(mask) >> 3;
42#else
43	/* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
44	ret = (0x0ff0001 + mask) >> 23;
45	/* Fix the 1 for 00 case */
46	ret &= mask;
47#endif
48
49	return ret;
50}
51
52#define zero_bytemask(mask) (mask)
53
54#else	/* __ARMEB__ */
55#include <asm-generic/word-at-a-time.h>
56#endif
57
58#ifdef CONFIG_DCACHE_WORD_ACCESS
59
60/*
61 * Load an unaligned word from kernel space.
62 *
63 * In the (very unlikely) case of the word being a page-crosser
64 * and the next page not being mapped, take the exception and
65 * return zeroes in the non-existing part.
66 */
67static inline unsigned long load_unaligned_zeropad(const void *addr)
68{
69	unsigned long ret, offset;
70
71	/* Load word from unaligned pointer addr */
72	asm(
73	"1:	ldr	%0, [%2]\n"
74	"2:\n"
75	"	.pushsection .text.fixup,\"ax\"\n"
76	"	.align 2\n"
77	"3:	and	%1, %2, #0x3\n"
78	"	bic	%2, %2, #0x3\n"
79	"	ldr	%0, [%2]\n"
80	"	lsl	%1, %1, #0x3\n"
81#ifndef __ARMEB__
82	"	lsr	%0, %0, %1\n"
83#else
84	"	lsl	%0, %0, %1\n"
85#endif
86	"	b	2b\n"
87	"	.popsection\n"
88	"	.pushsection __ex_table,\"a\"\n"
89	"	.align	3\n"
90	"	.long	1b, 3b\n"
91	"	.popsection"
92	: "=&r" (ret), "=&r" (offset)
93	: "r" (addr), "Qo" (*(unsigned long *)addr));
94
95	return ret;
96}
97
98#endif	/* DCACHE_WORD_ACCESS */
99#endif /* __ASM_ARM_WORD_AT_A_TIME_H */