Merge remote-tracking branch 'iommu/next'
[deliverable/linux.git] / arch / arm / lib / csumpartialcopyuser.S
1 /*
2 * linux/arch/arm/lib/csumpartialcopyuser.S
3 *
4 * Copyright (C) 1995-1998 Russell King
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * 27/03/03 Ian Molton Clean up CONFIG_CPU
11 *
12 */
13 #include <linux/linkage.h>
14 #include <asm/assembler.h>
15 #include <asm/errno.h>
16 #include <asm/asm-offsets.h>
17
18 .text
19
20 #ifdef CONFIG_CPU_SW_DOMAIN_PAN
21 .macro save_regs
22 mrc p15, 0, ip, c3, c0, 0
23 stmfd sp!, {r1, r2, r4 - r8, ip, lr}
24 uaccess_enable ip
25 .endm
26
27 .macro load_regs
28 ldmfd sp!, {r1, r2, r4 - r8, ip, lr}
29 mcr p15, 0, ip, c3, c0, 0
30 ret lr
31 .endm
32 #else
33 .macro save_regs
34 stmfd sp!, {r1, r2, r4 - r8, lr}
35 .endm
36
37 .macro load_regs
38 ldmfd sp!, {r1, r2, r4 - r8, pc}
39 .endm
40 #endif
41
42 .macro load1b, reg1
43 ldrusr \reg1, r0, 1
44 .endm
45
46 .macro load2b, reg1, reg2
47 ldrusr \reg1, r0, 1
48 ldrusr \reg2, r0, 1
49 .endm
50
51 .macro load1l, reg1
52 ldrusr \reg1, r0, 4
53 .endm
54
55 .macro load2l, reg1, reg2
56 ldrusr \reg1, r0, 4
57 ldrusr \reg2, r0, 4
58 .endm
59
60 .macro load4l, reg1, reg2, reg3, reg4
61 ldrusr \reg1, r0, 4
62 ldrusr \reg2, r0, 4
63 ldrusr \reg3, r0, 4
64 ldrusr \reg4, r0, 4
65 .endm
66
67 /*
68 * unsigned int
69 * csum_partial_copy_from_user(const char *src, char *dst, int len, int sum, int *err_ptr)
70 * r0 = src, r1 = dst, r2 = len, r3 = sum, [sp] = *err_ptr
71 * Returns : r0 = checksum, [[sp, #0], #0] = 0 or -EFAULT
72 */
73
74 #define FN_ENTRY ENTRY(csum_partial_copy_from_user)
75 #define FN_EXIT ENDPROC(csum_partial_copy_from_user)
76 #define FN_EXPORT EXPORT_SYMBOL(csum_partial_copy_from_user)
77
78 #include "csumpartialcopygeneric.S"
79
80 /*
81 * FIXME: minor buglet here
82 * We don't return the checksum for the data present in the buffer. To do
83 * so properly, we would have to add in whatever registers were loaded before
84 * the fault, which, with the current asm above is not predictable.
85 */
86 .pushsection .text.fixup,"ax"
87 .align 4
88 9001: mov r4, #-EFAULT
89 ldr r5, [sp, #8*4] @ *err_ptr
90 str r4, [r5]
91 ldmia sp, {r1, r2} @ retrieve dst, len
92 add r2, r2, r1
93 mov r0, #0 @ zero the buffer
94 9002: teq r2, r1
95 strneb r0, [r1], #1
96 bne 9002b
97 load_regs
98 .popsection
This page took 0.050698 seconds and 5 git commands to generate.