Merge branch 'work.misc' of git://git.kernel.org/pub/scm/linux/kernel/git/viro/vfs
[linux-2.6-microblaze.git] / arch / c6x / include / asm / uaccess.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  *  Copyright (C) 2011 Texas Instruments Incorporated
4  *  Author: Mark Salter <msalter@redhat.com>
5  */
6 #ifndef _ASM_C6X_UACCESS_H
7 #define _ASM_C6X_UACCESS_H
8
9 #include <linux/types.h>
10 #include <linux/compiler.h>
11 #include <linux/string.h>
12
13 /*
14  * C6X supports unaligned 32 and 64 bit loads and stores.
15  */
16 static inline __must_check unsigned long
17 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
18 {
19         u32 tmp32;
20         u64 tmp64;
21
22         if (__builtin_constant_p(n)) {
23                 switch (n) {
24                 case 1:
25                         *(u8 *)to = *(u8 __force *)from;
26                         return 0;
27                 case 4:
28                         asm volatile ("ldnw .d1t1 *%2,%0\n"
29                                       "nop  4\n"
30                                       "stnw .d1t1 %0,*%1\n"
31                                       : "=&a"(tmp32)
32                                       : "A"(to), "a"(from)
33                                       : "memory");
34                         return 0;
35                 case 8:
36                         asm volatile ("ldndw .d1t1 *%2,%0\n"
37                                       "nop   4\n"
38                                       "stndw .d1t1 %0,*%1\n"
39                                       : "=&a"(tmp64)
40                                       : "a"(to), "a"(from)
41                                       : "memory");
42                         return 0;
43                 default:
44                         break;
45                 }
46         }
47
48         memcpy(to, (const void __force *)from, n);
49         return 0;
50 }
51
52 static inline __must_check unsigned long
53 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
54 {
55         u32 tmp32;
56         u64 tmp64;
57
58         if (__builtin_constant_p(n)) {
59                 switch (n) {
60                 case 1:
61                         *(u8 __force *)to = *(u8 *)from;
62                         return 0;
63                 case 4:
64                         asm volatile ("ldnw .d1t1 *%2,%0\n"
65                                       "nop  4\n"
66                                       "stnw .d1t1 %0,*%1\n"
67                                       : "=&a"(tmp32)
68                                       : "a"(to), "a"(from)
69                                       : "memory");
70                         return 0;
71                 case 8:
72                         asm volatile ("ldndw .d1t1 *%2,%0\n"
73                                       "nop   4\n"
74                                       "stndw .d1t1 %0,*%1\n"
75                                       : "=&a"(tmp64)
76                                       : "a"(to), "a"(from)
77                                       : "memory");
78                         return 0;
79                 default:
80                         break;
81                 }
82         }
83
84         memcpy((void __force *)to, from, n);
85         return 0;
86 }
87 #define INLINE_COPY_FROM_USER
88 #define INLINE_COPY_TO_USER
89
90 extern int _access_ok(unsigned long addr, unsigned long size);
91 #ifdef CONFIG_ACCESS_CHECK
92 #define __access_ok _access_ok
93 #endif
94
95 #include <asm-generic/uaccess.h>
96
97 #endif /* _ASM_C6X_UACCESS_H */