Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* bitops.S: Low level assembler bit operations. |
2 | * | |
3 | * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) | |
4 | */ | |
5 | ||
6 | #include <linux/config.h> | |
7 | #include <asm/ptrace.h> | |
8 | #include <asm/psr.h> | |
9 | ||
10 | .text | |
11 | .align 4 | |
12 | ||
13 | .globl __bitops_begin | |
14 | __bitops_begin: | |
15 | ||
16 | /* Take bits in %g2 and set them in word at %g1, | |
17 | * return whether bits were set in original value | |
18 | * in %g2. %g4 holds value to restore into %o7 | |
19 | * in delay slot of jmpl return, %g3 + %g5 + %g7 can be | |
20 | * used as temporaries and thus is considered clobbered | |
21 | * by all callers. | |
22 | */ | |
23 | .globl ___set_bit | |
24 | ___set_bit: | |
25 | rd %psr, %g3 | |
26 | nop; nop; nop; | |
27 | or %g3, PSR_PIL, %g5 | |
28 | wr %g5, 0x0, %psr | |
29 | nop; nop; nop | |
30 | #ifdef CONFIG_SMP | |
31 | set bitops_spinlock, %g5 | |
32 | 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. | |
33 | orcc %g7, 0x0, %g0 ! Did we get it? | |
34 | bne 2b ! Nope... | |
35 | #endif | |
36 | ld [%g1], %g7 | |
37 | or %g7, %g2, %g5 | |
38 | and %g7, %g2, %g2 | |
39 | #ifdef CONFIG_SMP | |
40 | st %g5, [%g1] | |
41 | set bitops_spinlock, %g5 | |
42 | stb %g0, [%g5] | |
43 | #else | |
44 | st %g5, [%g1] | |
45 | #endif | |
46 | wr %g3, 0x0, %psr | |
47 | nop; nop; nop | |
48 | jmpl %o7, %g0 | |
49 | mov %g4, %o7 | |
50 | ||
51 | /* Same as above, but clears the bits from %g2 instead. */ | |
52 | .globl ___clear_bit | |
53 | ___clear_bit: | |
54 | rd %psr, %g3 | |
55 | nop; nop; nop | |
56 | or %g3, PSR_PIL, %g5 | |
57 | wr %g5, 0x0, %psr | |
58 | nop; nop; nop | |
59 | #ifdef CONFIG_SMP | |
60 | set bitops_spinlock, %g5 | |
61 | 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. | |
62 | orcc %g7, 0x0, %g0 ! Did we get it? | |
63 | bne 2b ! Nope... | |
64 | #endif | |
65 | ld [%g1], %g7 | |
66 | andn %g7, %g2, %g5 | |
67 | and %g7, %g2, %g2 | |
68 | #ifdef CONFIG_SMP | |
69 | st %g5, [%g1] | |
70 | set bitops_spinlock, %g5 | |
71 | stb %g0, [%g5] | |
72 | #else | |
73 | st %g5, [%g1] | |
74 | #endif | |
75 | wr %g3, 0x0, %psr | |
76 | nop; nop; nop | |
77 | jmpl %o7, %g0 | |
78 | mov %g4, %o7 | |
79 | ||
80 | /* Same thing again, but this time toggles the bits from %g2. */ | |
81 | .globl ___change_bit | |
82 | ___change_bit: | |
83 | rd %psr, %g3 | |
84 | nop; nop; nop | |
85 | or %g3, PSR_PIL, %g5 | |
86 | wr %g5, 0x0, %psr | |
87 | nop; nop; nop | |
88 | #ifdef CONFIG_SMP | |
89 | set bitops_spinlock, %g5 | |
90 | 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. | |
91 | orcc %g7, 0x0, %g0 ! Did we get it? | |
92 | bne 2b ! Nope... | |
93 | #endif | |
94 | ld [%g1], %g7 | |
95 | xor %g7, %g2, %g5 | |
96 | and %g7, %g2, %g2 | |
97 | #ifdef CONFIG_SMP | |
98 | st %g5, [%g1] | |
99 | set bitops_spinlock, %g5 | |
100 | stb %g0, [%g5] | |
101 | #else | |
102 | st %g5, [%g1] | |
103 | #endif | |
104 | wr %g3, 0x0, %psr | |
105 | nop; nop; nop | |
106 | jmpl %o7, %g0 | |
107 | mov %g4, %o7 | |
108 | ||
109 | .globl __bitops_end | |
110 | __bitops_end: |