Linux Audio

Check our new training course

Loading...
v4.6
 
  1/*
  2 *  Copyright (C) 2011 Texas Instruments Incorporated
  3 *  Author: Mark Salter <msalter@redhat.com>
  4 *
  5 *  This program is free software; you can redistribute it and/or modify
  6 *  it under the terms of the GNU General Public License version 2 as
  7 *  published by the Free Software Foundation.
  8 */
  9#ifndef _ASM_C6X_UACCESS_H
 10#define _ASM_C6X_UACCESS_H
 11
 12#include <linux/types.h>
 13#include <linux/compiler.h>
 14#include <linux/string.h>
 15
 16#ifdef CONFIG_ACCESS_CHECK
 17#define __access_ok _access_ok
 18#endif
 19
 20/*
 21 * __copy_from_user/copy_to_user are based on ones in asm-generic/uaccess.h
 22 *
 23 * C6X supports unaligned 32 and 64 bit loads and stores.
 24 */
 25static inline __must_check long __copy_from_user(void *to,
 26		const void __user *from, unsigned long n)
 27{
 28	u32 tmp32;
 29	u64 tmp64;
 30
 31	if (__builtin_constant_p(n)) {
 32		switch (n) {
 33		case 1:
 34			*(u8 *)to = *(u8 __force *)from;
 35			return 0;
 36		case 4:
 37			asm volatile ("ldnw .d1t1 *%2,%0\n"
 38				      "nop  4\n"
 39				      "stnw .d1t1 %0,*%1\n"
 40				      : "=&a"(tmp32)
 41				      : "A"(to), "a"(from)
 42				      : "memory");
 43			return 0;
 44		case 8:
 45			asm volatile ("ldndw .d1t1 *%2,%0\n"
 46				      "nop   4\n"
 47				      "stndw .d1t1 %0,*%1\n"
 48				      : "=&a"(tmp64)
 49				      : "a"(to), "a"(from)
 50				      : "memory");
 51			return 0;
 52		default:
 53			break;
 54		}
 55	}
 56
 57	memcpy(to, (const void __force *)from, n);
 58	return 0;
 59}
 60
 61static inline __must_check long __copy_to_user(void __user *to,
 62		const void *from, unsigned long n)
 63{
 64	u32 tmp32;
 65	u64 tmp64;
 66
 67	if (__builtin_constant_p(n)) {
 68		switch (n) {
 69		case 1:
 70			*(u8 __force *)to = *(u8 *)from;
 71			return 0;
 72		case 4:
 73			asm volatile ("ldnw .d1t1 *%2,%0\n"
 74				      "nop  4\n"
 75				      "stnw .d1t1 %0,*%1\n"
 76				      : "=&a"(tmp32)
 77				      : "a"(to), "a"(from)
 78				      : "memory");
 79			return 0;
 80		case 8:
 81			asm volatile ("ldndw .d1t1 *%2,%0\n"
 82				      "nop   4\n"
 83				      "stndw .d1t1 %0,*%1\n"
 84				      : "=&a"(tmp64)
 85				      : "a"(to), "a"(from)
 86				      : "memory");
 87			return 0;
 88		default:
 89			break;
 90		}
 91	}
 92
 93	memcpy((void __force *)to, from, n);
 94	return 0;
 95}
 96
 97#define __copy_to_user   __copy_to_user
 98#define __copy_from_user __copy_from_user
 99
100extern int _access_ok(unsigned long addr, unsigned long size);
101#ifdef CONFIG_ACCESS_CHECK
102#define __access_ok _access_ok
103#endif
104
105#include <asm-generic/uaccess.h>
106
107#endif /* _ASM_C6X_UACCESS_H */
v5.4
 1/* SPDX-License-Identifier: GPL-2.0-only */
 2/*
 3 *  Copyright (C) 2011 Texas Instruments Incorporated
 4 *  Author: Mark Salter <msalter@redhat.com>
 
 
 
 
 5 */
 6#ifndef _ASM_C6X_UACCESS_H
 7#define _ASM_C6X_UACCESS_H
 8
 9#include <linux/types.h>
10#include <linux/compiler.h>
11#include <linux/string.h>
12
 
 
 
 
13/*
 
 
14 * C6X supports unaligned 32 and 64 bit loads and stores.
15 */
16static inline __must_check unsigned long
17raw_copy_from_user(void *to, const void __user *from, unsigned long n)
18{
19	u32 tmp32;
20	u64 tmp64;
21
22	if (__builtin_constant_p(n)) {
23		switch (n) {
24		case 1:
25			*(u8 *)to = *(u8 __force *)from;
26			return 0;
27		case 4:
28			asm volatile ("ldnw .d1t1 *%2,%0\n"
29				      "nop  4\n"
30				      "stnw .d1t1 %0,*%1\n"
31				      : "=&a"(tmp32)
32				      : "A"(to), "a"(from)
33				      : "memory");
34			return 0;
35		case 8:
36			asm volatile ("ldndw .d1t1 *%2,%0\n"
37				      "nop   4\n"
38				      "stndw .d1t1 %0,*%1\n"
39				      : "=&a"(tmp64)
40				      : "a"(to), "a"(from)
41				      : "memory");
42			return 0;
43		default:
44			break;
45		}
46	}
47
48	memcpy(to, (const void __force *)from, n);
49	return 0;
50}
51
52static inline __must_check unsigned long
53raw_copy_to_user(void __user *to, const void *from, unsigned long n)
54{
55	u32 tmp32;
56	u64 tmp64;
57
58	if (__builtin_constant_p(n)) {
59		switch (n) {
60		case 1:
61			*(u8 __force *)to = *(u8 *)from;
62			return 0;
63		case 4:
64			asm volatile ("ldnw .d1t1 *%2,%0\n"
65				      "nop  4\n"
66				      "stnw .d1t1 %0,*%1\n"
67				      : "=&a"(tmp32)
68				      : "a"(to), "a"(from)
69				      : "memory");
70			return 0;
71		case 8:
72			asm volatile ("ldndw .d1t1 *%2,%0\n"
73				      "nop   4\n"
74				      "stndw .d1t1 %0,*%1\n"
75				      : "=&a"(tmp64)
76				      : "a"(to), "a"(from)
77				      : "memory");
78			return 0;
79		default:
80			break;
81		}
82	}
83
84	memcpy((void __force *)to, from, n);
85	return 0;
86}
87#define INLINE_COPY_FROM_USER
88#define INLINE_COPY_TO_USER
 
89
90extern int _access_ok(unsigned long addr, unsigned long size);
91#ifdef CONFIG_ACCESS_CHECK
92#define __access_ok _access_ok
93#endif
94
95#include <asm-generic/uaccess.h>
96
97#endif /* _ASM_C6X_UACCESS_H */