1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
|
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Copyright (C) 2012 ARM Ltd.
*/
#include <linux/linkage.h>
#include <asm/asm-uaccess.h>
#include <asm/assembler.h>
#include <asm/cache.h>
/*
* Copy from user space to a kernel buffer (alignment handled by the hardware)
*
* Parameters:
* x0 - to
* x1 - from
* x2 - n
* Returns:
* x0 - bytes not copied
*/
.macro ldrb1 ptr, regB, val
uao_user_alternative 9998f, ldrb, ldtrb, \ptr, \regB, \val
.endm
.macro strb1 ptr, regB, val
strb \ptr, [\regB], \val
.endm
.macro ldrh1 ptr, regB, val
uao_user_alternative 9998f, ldrh, ldtrh, \ptr, \regB, \val
.endm
.macro strh1 ptr, regB, val
strh \ptr, [\regB], \val
.endm
.macro ldr1 ptr, regB, val
uao_user_alternative 9998f, ldr, ldtr, \ptr, \regB, \val
.endm
.macro str1 ptr, regB, val
str \ptr, [\regB], \val
.endm
.macro ldp1 ptr, regB, regC, val
uao_ldp 9998f, \ptr, \regB, \regC, \val
.endm
.macro stp1 ptr, regB, regC, val
stp \ptr, \regB, [\regC], \val
.endm
end .req x5
ENTRY(__arch_copy_from_user)
add end, x0, x2
#include "copy_template.S"
mov x0, #0 // Nothing to copy
ret
ENDPROC(__arch_copy_from_user)
EXPORT_SYMBOL(__arch_copy_from_user)
.section .fixup,"ax"
.align 2
9998: sub x0, end, dst // bytes not copied
ret
.previous
|