Linux Audio

Check our new training course

Loading...
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0
  2/*
  3 * csum_partial_copy - do IP checksumming and copy
  4 *
  5 * (C) Copyright 1996 Linus Torvalds
  6 * accelerated versions (and 21264 assembly versions ) contributed by
  7 *	Rick Gorton	<rick.gorton@alpha-processor.com>
  8 *
  9 * Don't look at this too closely - you'll go mad. The things
 10 * we do for performance..
 11 */
 12
 13#include <linux/types.h>
 14#include <linux/string.h>
 15#include <linux/uaccess.h>
 16#include <net/checksum.h>
 17
 18
 19#define ldq_u(x,y) \
 20__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(const unsigned long *)(y)))
 21
 22#define stq_u(x,y) \
 23__asm__ __volatile__("stq_u %1,%0":"=m" (*(unsigned long *)(y)):"r" (x))
 24
 25#define extql(x,y,z) \
 26__asm__ __volatile__("extql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 27
 28#define extqh(x,y,z) \
 29__asm__ __volatile__("extqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 30
 31#define mskql(x,y,z) \
 32__asm__ __volatile__("mskql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 33
 34#define mskqh(x,y,z) \
 35__asm__ __volatile__("mskqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 36
 37#define insql(x,y,z) \
 38__asm__ __volatile__("insql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 39
 40#define insqh(x,y,z) \
 41__asm__ __volatile__("insqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 42
 43#define __get_word(insn,x,ptr)				\
 
 44({							\
 45	long __guu_err;					\
 46	__asm__ __volatile__(				\
 47	"1:	"#insn" %0,%2\n"			\
 48	"2:\n"						\
 49	EXC(1b,2b,%0,%1)				\
 
 
 
 50		: "=r"(x), "=r"(__guu_err)		\
 51		: "m"(__m(ptr)), "1"(0));		\
 52	__guu_err;					\
 53})
 54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 55static inline unsigned short from64to16(unsigned long x)
 56{
 57	/* Using extract instructions is a bit more efficient
 58	   than the original shift/bitmask version.  */
 59
 60	union {
 61		unsigned long	ul;
 62		unsigned int	ui[2];
 63		unsigned short	us[4];
 64	} in_v, tmp_v, out_v;
 65
 66	in_v.ul = x;
 67	tmp_v.ul = (unsigned long) in_v.ui[0] + (unsigned long) in_v.ui[1];
 68
 69	/* Since the bits of tmp_v.sh[3] are going to always be zero,
 70	   we don't have to bother to add that in.  */
 71	out_v.ul = (unsigned long) tmp_v.us[0] + (unsigned long) tmp_v.us[1]
 72			+ (unsigned long) tmp_v.us[2];
 73
 74	/* Similarly, out_v.us[2] is always zero for the final add.  */
 75	return out_v.us[0] + out_v.us[1];
 76}
 77
 78
 79
 80/*
 81 * Ok. This isn't fun, but this is the EASY case.
 82 */
 83static inline unsigned long
 84csum_partial_cfu_aligned(const unsigned long __user *src, unsigned long *dst,
 85			 long len)
 
 86{
 87	unsigned long checksum = ~0U;
 88	unsigned long carry = 0;
 
 89
 90	while (len >= 0) {
 91		unsigned long word;
 92		if (__get_word(ldq, word, src))
 93			return 0;
 94		checksum += carry;
 95		src++;
 96		checksum += word;
 97		len -= 8;
 98		carry = checksum < word;
 99		*dst = word;
100		dst++;
101	}
102	len += 8;
103	checksum += carry;
104	if (len) {
105		unsigned long word, tmp;
106		if (__get_word(ldq, word, src))
107			return 0;
108		tmp = *dst;
109		mskql(word, len, word);
110		checksum += word;
111		mskqh(tmp, len, tmp);
112		carry = checksum < word;
113		*dst = word | tmp;
114		checksum += carry;
115	}
 
116	return checksum;
117}
118
119/*
120 * This is even less fun, but this is still reasonably
121 * easy.
122 */
123static inline unsigned long
124csum_partial_cfu_dest_aligned(const unsigned long __user *src,
125			      unsigned long *dst,
126			      unsigned long soff,
127			      long len)
 
128{
129	unsigned long first;
130	unsigned long word, carry;
131	unsigned long lastsrc = 7+len+(unsigned long)src;
132	unsigned long checksum = ~0U;
133
134	if (__get_word(ldq_u, first,src))
135		return 0;
136	carry = 0;
137	while (len >= 0) {
138		unsigned long second;
139
140		if (__get_word(ldq_u, second, src+1))
141			return 0;
142		extql(first, soff, word);
143		len -= 8;
144		src++;
145		extqh(second, soff, first);
146		checksum += carry;
147		word |= first;
148		first = second;
149		checksum += word;
150		*dst = word;
151		dst++;
152		carry = checksum < word;
153	}
154	len += 8;
155	checksum += carry;
156	if (len) {
157		unsigned long tmp;
158		unsigned long second;
159		if (__get_word(ldq_u, second, lastsrc))
160			return 0;
161		tmp = *dst;
162		extql(first, soff, word);
163		extqh(second, soff, first);
164		word |= first;
165		mskql(word, len, word);
166		checksum += word;
167		mskqh(tmp, len, tmp);
168		carry = checksum < word;
169		*dst = word | tmp;
170		checksum += carry;
171	}
 
172	return checksum;
173}
174
175/*
176 * This is slightly less fun than the above..
177 */
178static inline unsigned long
179csum_partial_cfu_src_aligned(const unsigned long __user *src,
180			     unsigned long *dst,
181			     unsigned long doff,
182			     long len,
183			     unsigned long partial_dest)
 
184{
185	unsigned long carry = 0;
186	unsigned long word;
187	unsigned long second_dest;
188	unsigned long checksum = ~0U;
189
190	mskql(partial_dest, doff, partial_dest);
191	while (len >= 0) {
192		if (__get_word(ldq, word, src))
193			return 0;
194		len -= 8;
195		insql(word, doff, second_dest);
196		checksum += carry;
197		stq_u(partial_dest | second_dest, dst);
198		src++;
199		checksum += word;
200		insqh(word, doff, partial_dest);
201		carry = checksum < word;
202		dst++;
203	}
204	len += 8;
205	if (len) {
206		checksum += carry;
207		if (__get_word(ldq, word, src))
208			return 0;
209		mskql(word, len, word);
210		len -= 8;
211		checksum += word;
212		insql(word, doff, second_dest);
213		len += doff;
214		carry = checksum < word;
215		partial_dest |= second_dest;
216		if (len >= 0) {
217			stq_u(partial_dest, dst);
218			if (!len) goto out;
219			dst++;
220			insqh(word, doff, partial_dest);
221		}
222		doff = len;
223	}
224	ldq_u(second_dest, dst);
225	mskqh(second_dest, doff, second_dest);
226	stq_u(partial_dest | second_dest, dst);
227out:
228	checksum += carry;
 
229	return checksum;
230}
231
232/*
233 * This is so totally un-fun that it's frightening. Don't
234 * look at this too closely, you'll go blind.
235 */
236static inline unsigned long
237csum_partial_cfu_unaligned(const unsigned long __user * src,
238			   unsigned long * dst,
239			   unsigned long soff, unsigned long doff,
240			   long len, unsigned long partial_dest)
 
 
241{
242	unsigned long carry = 0;
243	unsigned long first;
244	unsigned long lastsrc;
245	unsigned long checksum = ~0U;
246
247	if (__get_word(ldq_u, first, src))
248		return 0;
249	lastsrc = 7+len+(unsigned long)src;
250	mskql(partial_dest, doff, partial_dest);
251	while (len >= 0) {
252		unsigned long second, word;
253		unsigned long second_dest;
254
255		if (__get_word(ldq_u, second, src+1))
256			return 0;
257		extql(first, soff, word);
258		checksum += carry;
259		len -= 8;
260		extqh(second, soff, first);
261		src++;
262		word |= first;
263		first = second;
264		insql(word, doff, second_dest);
265		checksum += word;
266		stq_u(partial_dest | second_dest, dst);
267		carry = checksum < word;
268		insqh(word, doff, partial_dest);
269		dst++;
270	}
271	len += doff;
272	checksum += carry;
273	if (len >= 0) {
274		unsigned long second, word;
275		unsigned long second_dest;
276
277		if (__get_word(ldq_u, second, lastsrc))
278			return 0;
279		extql(first, soff, word);
280		extqh(second, soff, first);
281		word |= first;
282		first = second;
283		mskql(word, len-doff, word);
284		checksum += word;
285		insql(word, doff, second_dest);
286		carry = checksum < word;
287		stq_u(partial_dest | second_dest, dst);
288		if (len) {
289			ldq_u(second_dest, dst+1);
290			insqh(word, doff, partial_dest);
291			mskqh(second_dest, len, second_dest);
292			stq_u(partial_dest | second_dest, dst+1);
293		}
294		checksum += carry;
295	} else {
296		unsigned long second, word;
297		unsigned long second_dest;
298
299		if (__get_word(ldq_u, second, lastsrc))
300			return 0;
301		extql(first, soff, word);
302		extqh(second, soff, first);
303		word |= first;
304		ldq_u(second_dest, dst);
305		mskql(word, len-doff, word);
306		checksum += word;
307		mskqh(second_dest, len, second_dest);
308		carry = checksum < word;
309		insql(word, doff, word);
310		stq_u(partial_dest | word | second_dest, dst);
311		checksum += carry;
312	}
 
313	return checksum;
314}
315
316static __wsum __csum_and_copy(const void __user *src, void *dst, int len)
 
 
317{
 
318	unsigned long soff = 7 & (unsigned long) src;
319	unsigned long doff = 7 & (unsigned long) dst;
320	unsigned long checksum;
321
322	if (!doff) {
323		if (!soff)
324			checksum = csum_partial_cfu_aligned(
325				(const unsigned long __user *) src,
326				(unsigned long *) dst, len-8);
327		else
328			checksum = csum_partial_cfu_dest_aligned(
329				(const unsigned long __user *) src,
330				(unsigned long *) dst,
331				soff, len-8);
332	} else {
333		unsigned long partial_dest;
334		ldq_u(partial_dest, dst);
335		if (!soff)
336			checksum = csum_partial_cfu_src_aligned(
337				(const unsigned long __user *) src,
338				(unsigned long *) dst,
339				doff, len-8, partial_dest);
340		else
341			checksum = csum_partial_cfu_unaligned(
342				(const unsigned long __user *) src,
343				(unsigned long *) dst,
344				soff, doff, len-8, partial_dest);
 
 
 
 
 
 
 
 
 
 
 
345	}
346	return (__force __wsum)from64to16 (checksum);
347}
348
349__wsum
350csum_and_copy_from_user(const void __user *src, void *dst, int len)
351{
352	if (!access_ok(src, len))
353		return 0;
354	return __csum_and_copy(src, dst, len);
355}
356EXPORT_SYMBOL(csum_and_copy_from_user);
357
358__wsum
359csum_partial_copy_nocheck(const void *src, void *dst, int len)
360{
361	return __csum_and_copy((__force const void __user *)src,
362						dst, len);
 
 
 
 
 
363}
364EXPORT_SYMBOL(csum_partial_copy_nocheck);
v4.6
 
  1/*
  2 * csum_partial_copy - do IP checksumming and copy
  3 *
  4 * (C) Copyright 1996 Linus Torvalds
  5 * accelerated versions (and 21264 assembly versions ) contributed by
  6 *	Rick Gorton	<rick.gorton@alpha-processor.com>
  7 *
  8 * Don't look at this too closely - you'll go mad. The things
  9 * we do for performance..
 10 */
 11
 12#include <linux/types.h>
 13#include <linux/string.h>
 14#include <asm/uaccess.h>
 
 15
 16
 17#define ldq_u(x,y) \
 18__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(const unsigned long *)(y)))
 19
 20#define stq_u(x,y) \
 21__asm__ __volatile__("stq_u %1,%0":"=m" (*(unsigned long *)(y)):"r" (x))
 22
 23#define extql(x,y,z) \
 24__asm__ __volatile__("extql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 25
 26#define extqh(x,y,z) \
 27__asm__ __volatile__("extqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 28
 29#define mskql(x,y,z) \
 30__asm__ __volatile__("mskql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 31
 32#define mskqh(x,y,z) \
 33__asm__ __volatile__("mskqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 34
 35#define insql(x,y,z) \
 36__asm__ __volatile__("insql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 37
 38#define insqh(x,y,z) \
 39__asm__ __volatile__("insqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
 40
 41
 42#define __get_user_u(x,ptr)				\
 43({							\
 44	long __guu_err;					\
 45	__asm__ __volatile__(				\
 46	"1:	ldq_u %0,%2\n"				\
 47	"2:\n"						\
 48	".section __ex_table,\"a\"\n"			\
 49	"	.long 1b - .\n"				\
 50	"	lda %0,2b-1b(%1)\n"			\
 51	".previous"					\
 52		: "=r"(x), "=r"(__guu_err)		\
 53		: "m"(__m(ptr)), "1"(0));		\
 54	__guu_err;					\
 55})
 56
 57#define __put_user_u(x,ptr)				\
 58({							\
 59	long __puu_err;					\
 60	__asm__ __volatile__(				\
 61	"1:	stq_u %2,%1\n"				\
 62	"2:\n"						\
 63	".section __ex_table,\"a\"\n"			\
 64	"	.long 1b - ."				\
 65	"	lda $31,2b-1b(%0)\n"			\
 66	".previous"					\
 67		: "=r"(__puu_err)			\
 68		: "m"(__m(addr)), "rJ"(x), "0"(0));	\
 69	__puu_err;					\
 70})
 71
 72
 73static inline unsigned short from64to16(unsigned long x)
 74{
 75	/* Using extract instructions is a bit more efficient
 76	   than the original shift/bitmask version.  */
 77
 78	union {
 79		unsigned long	ul;
 80		unsigned int	ui[2];
 81		unsigned short	us[4];
 82	} in_v, tmp_v, out_v;
 83
 84	in_v.ul = x;
 85	tmp_v.ul = (unsigned long) in_v.ui[0] + (unsigned long) in_v.ui[1];
 86
 87	/* Since the bits of tmp_v.sh[3] are going to always be zero,
 88	   we don't have to bother to add that in.  */
 89	out_v.ul = (unsigned long) tmp_v.us[0] + (unsigned long) tmp_v.us[1]
 90			+ (unsigned long) tmp_v.us[2];
 91
 92	/* Similarly, out_v.us[2] is always zero for the final add.  */
 93	return out_v.us[0] + out_v.us[1];
 94}
 95
 96
 97
 98/*
 99 * Ok. This isn't fun, but this is the EASY case.
100 */
101static inline unsigned long
102csum_partial_cfu_aligned(const unsigned long __user *src, unsigned long *dst,
103			 long len, unsigned long checksum,
104			 int *errp)
105{
 
106	unsigned long carry = 0;
107	int err = 0;
108
109	while (len >= 0) {
110		unsigned long word;
111		err |= __get_user(word, src);
 
112		checksum += carry;
113		src++;
114		checksum += word;
115		len -= 8;
116		carry = checksum < word;
117		*dst = word;
118		dst++;
119	}
120	len += 8;
121	checksum += carry;
122	if (len) {
123		unsigned long word, tmp;
124		err |= __get_user(word, src);
 
125		tmp = *dst;
126		mskql(word, len, word);
127		checksum += word;
128		mskqh(tmp, len, tmp);
129		carry = checksum < word;
130		*dst = word | tmp;
131		checksum += carry;
132	}
133	if (err && errp) *errp = err;
134	return checksum;
135}
136
137/*
138 * This is even less fun, but this is still reasonably
139 * easy.
140 */
141static inline unsigned long
142csum_partial_cfu_dest_aligned(const unsigned long __user *src,
143			      unsigned long *dst,
144			      unsigned long soff,
145			      long len, unsigned long checksum,
146			      int *errp)
147{
148	unsigned long first;
149	unsigned long word, carry;
150	unsigned long lastsrc = 7+len+(unsigned long)src;
151	int err = 0;
152
153	err |= __get_user_u(first,src);
 
154	carry = 0;
155	while (len >= 0) {
156		unsigned long second;
157
158		err |= __get_user_u(second, src+1);
 
159		extql(first, soff, word);
160		len -= 8;
161		src++;
162		extqh(second, soff, first);
163		checksum += carry;
164		word |= first;
165		first = second;
166		checksum += word;
167		*dst = word;
168		dst++;
169		carry = checksum < word;
170	}
171	len += 8;
172	checksum += carry;
173	if (len) {
174		unsigned long tmp;
175		unsigned long second;
176		err |= __get_user_u(second, lastsrc);
 
177		tmp = *dst;
178		extql(first, soff, word);
179		extqh(second, soff, first);
180		word |= first;
181		mskql(word, len, word);
182		checksum += word;
183		mskqh(tmp, len, tmp);
184		carry = checksum < word;
185		*dst = word | tmp;
186		checksum += carry;
187	}
188	if (err && errp) *errp = err;
189	return checksum;
190}
191
192/*
193 * This is slightly less fun than the above..
194 */
195static inline unsigned long
196csum_partial_cfu_src_aligned(const unsigned long __user *src,
197			     unsigned long *dst,
198			     unsigned long doff,
199			     long len, unsigned long checksum,
200			     unsigned long partial_dest,
201			     int *errp)
202{
203	unsigned long carry = 0;
204	unsigned long word;
205	unsigned long second_dest;
206	int err = 0;
207
208	mskql(partial_dest, doff, partial_dest);
209	while (len >= 0) {
210		err |= __get_user(word, src);
 
211		len -= 8;
212		insql(word, doff, second_dest);
213		checksum += carry;
214		stq_u(partial_dest | second_dest, dst);
215		src++;
216		checksum += word;
217		insqh(word, doff, partial_dest);
218		carry = checksum < word;
219		dst++;
220	}
221	len += 8;
222	if (len) {
223		checksum += carry;
224		err |= __get_user(word, src);
 
225		mskql(word, len, word);
226		len -= 8;
227		checksum += word;
228		insql(word, doff, second_dest);
229		len += doff;
230		carry = checksum < word;
231		partial_dest |= second_dest;
232		if (len >= 0) {
233			stq_u(partial_dest, dst);
234			if (!len) goto out;
235			dst++;
236			insqh(word, doff, partial_dest);
237		}
238		doff = len;
239	}
240	ldq_u(second_dest, dst);
241	mskqh(second_dest, doff, second_dest);
242	stq_u(partial_dest | second_dest, dst);
243out:
244	checksum += carry;
245	if (err && errp) *errp = err;
246	return checksum;
247}
248
249/*
250 * This is so totally un-fun that it's frightening. Don't
251 * look at this too closely, you'll go blind.
252 */
253static inline unsigned long
254csum_partial_cfu_unaligned(const unsigned long __user * src,
255			   unsigned long * dst,
256			   unsigned long soff, unsigned long doff,
257			   long len, unsigned long checksum,
258			   unsigned long partial_dest,
259			   int *errp)
260{
261	unsigned long carry = 0;
262	unsigned long first;
263	unsigned long lastsrc;
264	int err = 0;
265
266	err |= __get_user_u(first, src);
 
267	lastsrc = 7+len+(unsigned long)src;
268	mskql(partial_dest, doff, partial_dest);
269	while (len >= 0) {
270		unsigned long second, word;
271		unsigned long second_dest;
272
273		err |= __get_user_u(second, src+1);
 
274		extql(first, soff, word);
275		checksum += carry;
276		len -= 8;
277		extqh(second, soff, first);
278		src++;
279		word |= first;
280		first = second;
281		insql(word, doff, second_dest);
282		checksum += word;
283		stq_u(partial_dest | second_dest, dst);
284		carry = checksum < word;
285		insqh(word, doff, partial_dest);
286		dst++;
287	}
288	len += doff;
289	checksum += carry;
290	if (len >= 0) {
291		unsigned long second, word;
292		unsigned long second_dest;
293
294		err |= __get_user_u(second, lastsrc);
 
295		extql(first, soff, word);
296		extqh(second, soff, first);
297		word |= first;
298		first = second;
299		mskql(word, len-doff, word);
300		checksum += word;
301		insql(word, doff, second_dest);
302		carry = checksum < word;
303		stq_u(partial_dest | second_dest, dst);
304		if (len) {
305			ldq_u(second_dest, dst+1);
306			insqh(word, doff, partial_dest);
307			mskqh(second_dest, len, second_dest);
308			stq_u(partial_dest | second_dest, dst+1);
309		}
310		checksum += carry;
311	} else {
312		unsigned long second, word;
313		unsigned long second_dest;
314
315		err |= __get_user_u(second, lastsrc);
 
316		extql(first, soff, word);
317		extqh(second, soff, first);
318		word |= first;
319		ldq_u(second_dest, dst);
320		mskql(word, len-doff, word);
321		checksum += word;
322		mskqh(second_dest, len, second_dest);
323		carry = checksum < word;
324		insql(word, doff, word);
325		stq_u(partial_dest | word | second_dest, dst);
326		checksum += carry;
327	}
328	if (err && errp) *errp = err;
329	return checksum;
330}
331
332__wsum
333csum_partial_copy_from_user(const void __user *src, void *dst, int len,
334			       __wsum sum, int *errp)
335{
336	unsigned long checksum = (__force u32) sum;
337	unsigned long soff = 7 & (unsigned long) src;
338	unsigned long doff = 7 & (unsigned long) dst;
 
339
340	if (len) {
341		if (!access_ok(VERIFY_READ, src, len)) {
342			if (errp) *errp = -EFAULT;
343			memset(dst, 0, len);
344			return sum;
345		}
346		if (!doff) {
347			if (!soff)
348				checksum = csum_partial_cfu_aligned(
349					(const unsigned long __user *) src,
350					(unsigned long *) dst,
351					len-8, checksum, errp);
352			else
353				checksum = csum_partial_cfu_dest_aligned(
354					(const unsigned long __user *) src,
355					(unsigned long *) dst,
356					soff, len-8, checksum, errp);
357		} else {
358			unsigned long partial_dest;
359			ldq_u(partial_dest, dst);
360			if (!soff)
361				checksum = csum_partial_cfu_src_aligned(
362					(const unsigned long __user *) src,
363					(unsigned long *) dst,
364					doff, len-8, checksum,
365					partial_dest, errp);
366			else
367				checksum = csum_partial_cfu_unaligned(
368					(const unsigned long __user *) src,
369					(unsigned long *) dst,
370					soff, doff, len-8, checksum,
371					partial_dest, errp);
372		}
373		checksum = from64to16 (checksum);
374	}
375	return (__force __wsum)checksum;
 
 
 
 
 
 
 
 
376}
 
377
378__wsum
379csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
380{
381	__wsum checksum;
382	mm_segment_t oldfs = get_fs();
383	set_fs(KERNEL_DS);
384	checksum = csum_partial_copy_from_user((__force const void __user *)src,
385						dst, len, sum, NULL);
386	set_fs(oldfs);
387	return checksum;
388}