Linux Audio

Check our new training course

Loading...
v5.4
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef __ASM_ARM_WORD_AT_A_TIME_H
 3#define __ASM_ARM_WORD_AT_A_TIME_H
 4
 5#ifndef __ARMEB__
 6
 7/*
 8 * Little-endian word-at-a-time zero byte handling.
 9 * Heavily based on the x86 algorithm.
10 */
11#include <linux/kernel.h>
 
12
13struct word_at_a_time {
14	const unsigned long one_bits, high_bits;
15};
16
17#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
18
19static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
20				     const struct word_at_a_time *c)
21{
22	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
23	*bits = mask;
24	return mask;
25}
26
27#define prep_zero_mask(a, bits, c) (bits)
28
29static inline unsigned long create_zero_mask(unsigned long bits)
30{
31	bits = (bits - 1) & ~bits;
32	return bits >> 7;
33}
34
35static inline unsigned long find_zero(unsigned long mask)
36{
37	unsigned long ret;
38
39#if __LINUX_ARM_ARCH__ >= 5
40	/* We have clz available. */
41	ret = fls(mask) >> 3;
42#else
43	/* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
44	ret = (0x0ff0001 + mask) >> 23;
45	/* Fix the 1 for 00 case */
46	ret &= mask;
47#endif
48
49	return ret;
50}
51
52#define zero_bytemask(mask) (mask)
53
54#else	/* __ARMEB__ */
55#include <asm-generic/word-at-a-time.h>
56#endif
57
58#ifdef CONFIG_DCACHE_WORD_ACCESS
59
60/*
61 * Load an unaligned word from kernel space.
62 *
63 * In the (very unlikely) case of the word being a page-crosser
64 * and the next page not being mapped, take the exception and
65 * return zeroes in the non-existing part.
66 */
67static inline unsigned long load_unaligned_zeropad(const void *addr)
68{
69	unsigned long ret, offset;
70
71	/* Load word from unaligned pointer addr */
72	asm(
73	"1:	ldr	%0, [%2]\n"
74	"2:\n"
75	"	.pushsection .text.fixup,\"ax\"\n"
76	"	.align 2\n"
77	"3:	and	%1, %2, #0x3\n"
78	"	bic	%2, %2, #0x3\n"
79	"	ldr	%0, [%2]\n"
80	"	lsl	%1, %1, #0x3\n"
81#ifndef __ARMEB__
82	"	lsr	%0, %0, %1\n"
83#else
84	"	lsl	%0, %0, %1\n"
85#endif
86	"	b	2b\n"
87	"	.popsection\n"
88	"	.pushsection __ex_table,\"a\"\n"
89	"	.align	3\n"
90	"	.long	1b, 3b\n"
91	"	.popsection"
92	: "=&r" (ret), "=&r" (offset)
93	: "r" (addr), "Qo" (*(unsigned long *)addr));
94
95	return ret;
96}
97
98#endif	/* DCACHE_WORD_ACCESS */
99#endif /* __ASM_ARM_WORD_AT_A_TIME_H */
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef __ASM_ARM_WORD_AT_A_TIME_H
  3#define __ASM_ARM_WORD_AT_A_TIME_H
  4
  5#ifndef __ARMEB__
  6
  7/*
  8 * Little-endian word-at-a-time zero byte handling.
  9 * Heavily based on the x86 algorithm.
 10 */
 11#include <linux/bitops.h>
 12#include <linux/wordpart.h>
 13
 14struct word_at_a_time {
 15	const unsigned long one_bits, high_bits;
 16};
 17
 18#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
 19
 20static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
 21				     const struct word_at_a_time *c)
 22{
 23	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
 24	*bits = mask;
 25	return mask;
 26}
 27
 28#define prep_zero_mask(a, bits, c) (bits)
 29
 30static inline unsigned long create_zero_mask(unsigned long bits)
 31{
 32	bits = (bits - 1) & ~bits;
 33	return bits >> 7;
 34}
 35
 36static inline unsigned long find_zero(unsigned long mask)
 37{
 38	unsigned long ret;
 39
 40#if __LINUX_ARM_ARCH__ >= 5
 41	/* We have clz available. */
 42	ret = fls(mask) >> 3;
 43#else
 44	/* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
 45	ret = (0x0ff0001 + mask) >> 23;
 46	/* Fix the 1 for 00 case */
 47	ret &= mask;
 48#endif
 49
 50	return ret;
 51}
 52
 53#define zero_bytemask(mask) (mask)
 54
 55#else	/* __ARMEB__ */
 56#include <asm-generic/word-at-a-time.h>
 57#endif
 58
 59#ifdef CONFIG_DCACHE_WORD_ACCESS
 60
 61/*
 62 * Load an unaligned word from kernel space.
 63 *
 64 * In the (very unlikely) case of the word being a page-crosser
 65 * and the next page not being mapped, take the exception and
 66 * return zeroes in the non-existing part.
 67 */
 68static inline unsigned long load_unaligned_zeropad(const void *addr)
 69{
 70	unsigned long ret, offset;
 71
 72	/* Load word from unaligned pointer addr */
 73	asm(
 74	"1:	ldr	%0, [%2]\n"
 75	"2:\n"
 76	"	.pushsection .text.fixup,\"ax\"\n"
 77	"	.align 2\n"
 78	"3:	and	%1, %2, #0x3\n"
 79	"	bic	%2, %2, #0x3\n"
 80	"	ldr	%0, [%2]\n"
 81	"	lsl	%1, %1, #0x3\n"
 82#ifndef __ARMEB__
 83	"	lsr	%0, %0, %1\n"
 84#else
 85	"	lsl	%0, %0, %1\n"
 86#endif
 87	"	b	2b\n"
 88	"	.popsection\n"
 89	"	.pushsection __ex_table,\"a\"\n"
 90	"	.align	3\n"
 91	"	.long	1b, 3b\n"
 92	"	.popsection"
 93	: "=&r" (ret), "=&r" (offset)
 94	: "r" (addr), "Qo" (*(unsigned long *)addr));
 95
 96	return ret;
 97}
 98
 99#endif	/* DCACHE_WORD_ACCESS */
100#endif /* __ASM_ARM_WORD_AT_A_TIME_H */