summaryrefslogtreecommitdiffstats
path: root/arch/avr32/lib/copy_user.S
blob: ea59c04b07de8e3a09ed486b8c48a9ffe25ad5c1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
/*
 * Copy to/from userspace with optional address space checking.
 *
 * Copyright 2004-2006 Atmel Corporation
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 */
#include <asm/page.h>
#include <asm/thread_info.h>
#include <asm/asm.h>

	/*
	 * __kernel_size_t
	 * __copy_user(void *to, const void *from, __kernel_size_t n)
	 *
	 * Returns the number of bytes not copied. Might be off by
	 * max 3 bytes if we get a fault in the main loop.
	 *
	 * The address-space checking functions simply fall through to
	 * the non-checking version.
	 */
	.text
	.align	1
	.global	copy_from_user
	.type	copy_from_user, @function
copy_from_user:
	branch_if_kernel r8, __copy_user
	ret_if_privileged r8, r11, r10, r10
	rjmp	__copy_user
	.size	copy_from_user, . - copy_from_user

	.global	copy_to_user
	.type	copy_to_user, @function
copy_to_user:
	branch_if_kernel r8, __copy_user
	ret_if_privileged r8, r12, r10, r10
	.size	copy_to_user, . - copy_to_user

	.global	__copy_user
	.type	__copy_user, @function
__copy_user:
	mov	r9, r11
	andl	r9, 3, COH
	brne	6f

	/* At this point, from is word-aligned */
1:	sub	r10, 4
	brlt	3f

2:
10:	ld.w	r8, r11++
11:	st.w	r12++, r8
	sub	r10, 4
	brge	2b

3:	sub	r10, -4
	reteq	0

	/*
	 * Handle unaligned count. Need to be careful with r10 here so
	 * that we return the correct value even if we get a fault
	 */
4:
20:	ld.ub	r8, r11++
21:	st.b	r12++, r8
	sub	r10, 1
	reteq	0
22:	ld.ub	r8, r11++
23:	st.b	r12++, r8
	sub	r10, 1
	reteq	0
24:	ld.ub	r8, r11++
25:	st.b	r12++, r8
	retal	0

	/* Handle unaligned from-pointer */
6:	cp.w	r10, 4
	brlt	4b
	rsub	r9, r9, 4

30:	ld.ub	r8, r11++
31:	st.b	r12++, r8
	sub	r10, 1
	sub	r9, 1
	breq	1b
32:	ld.ub	r8, r11++
33:	st.b	r12++, r8
	sub	r10, 1
	sub	r9, 1
	breq	1b
34:	ld.ub	r8, r11++
35:	st.b	r12++, r8
	sub	r10, 1
	rjmp	1b
	.size	__copy_user, . - __copy_user

	.section .fixup,"ax"
	.align	1
19:	sub	r10, -4
29:	retal	r10

	.section __ex_table,"a"
	.align	2
	.long	10b, 19b
	.long	11b, 19b
	.long	20b, 29b
	.long	21b, 29b
	.long	22b, 29b
	.long	23b, 29b
	.long	24b, 29b
	.long	25b, 29b
	.long	30b, 29b
	.long	31b, 29b
	.long	32b, 29b
	.long	33b, 29b
	.long	34b, 29b
	.long	35b, 29b