4 * (C) Copyright 2016 - Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 * (C) Copyright 2016 - Boqun Feng <boqun.feng@gmail.com>
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26 #define has_fast_acquire_release() 0
29 #define has_single_copy_load_64() 1
31 #define has_single_copy_load_64() 0
35 * The __rseq_table section can be used by debuggers to better handle
36 * single-stepping through the restartable critical sections.
41 #define RSEQ_FINISH_ASM(_target_final, _to_write_final, _start_value, \
42 _failure, _spec_store, _spec_input, \
43 _final_store, _final_input, _extra_clobber, \
44 _setup, _teardown, _scratch) \
45 __asm__ __volatile__ goto ( \
46 ".pushsection __rseq_table, \"aw\"\n\t" \
49 ".quad 1f, 2f, 4f, 0x0\n\t" \
54 "lis %%r17, (3b)@highest\n\t" \
55 "ori %%r17, %%r17, (3b)@higher\n\t" \
56 "rldicr %%r17, %%r17, 32, 31\n\t" \
57 "oris %%r17, %%r17, (3b)@h\n\t" \
58 "ori %%r17, %%r17, (3b)@l\n\t" \
59 "std %%r17, 0(%[rseq_cs])\n\t" \
61 "lwz %%r17, %[current_event_counter]\n\t" \
62 "cmpw cr7, %[start_event_counter], %%r17\n\t" \
70 "std %%r17, 0(%[rseq_cs])\n\t" \
75 "std %%r17, 0(%[rseq_cs])\n\t" \
79 : /* gcc asm goto does not allow outputs */ \
80 : [start_event_counter]"r"((_start_value).event_counter), \
81 [current_event_counter]"m"((_start_value).rseqp->u.e.event_counter), \
82 [rseq_cs]"b"(&(_start_value).rseqp->rseq_cs) \
86 : "r17", "memory", "cc" \
92 #define RSEQ_FINISH_FINAL_STORE_ASM() \
93 "std %[to_write_final], 0(%[target_final])\n\t"
95 #define RSEQ_FINISH_FINAL_STORE_RELEASE_ASM() \
97 RSEQ_FINISH_FINAL_STORE_ASM()
99 #define RSEQ_FINISH_FINAL_STORE_INPUT(_target_final, _to_write_final) \
100 , [to_write_final]"r"(_to_write_final), \
101 [target_final]"b"(_target_final)
103 #define RSEQ_FINISH_SPECULATIVE_STORE_ASM() \
104 "std %[to_write_spec], 0(%[target_spec])\n\t" \
107 #define RSEQ_FINISH_SPECULATIVE_STORE_INPUT(_target_spec, _to_write_spec) \
108 , [to_write_spec]"r"(_to_write_spec), \
109 [target_spec]"b"(_target_spec)
111 /* TODO: implement a faster memcpy. */
112 #define RSEQ_FINISH_MEMCPY_STORE_ASM() \
113 "cmpdi %%r19, 0\n\t" \
115 "addi %%r20, %%r20, -1\n\t" \
116 "addi %%r21, %%r21, -1\n\t" \
118 "lbzu %%r18, 1(%%r20)\n\t" \
119 "stbu %%r18, 1(%%r21)\n\t" \
120 "addi %%r19, %%r19, -1\n\t" \
121 "cmpdi %%r19, 0\n\t" \
126 #define RSEQ_FINISH_MEMCPY_STORE_INPUT(_target_memcpy, _to_write_memcpy, _len_memcpy) \
127 , [to_write_memcpy]"r"(_to_write_memcpy), \
128 [target_memcpy]"r"(_target_memcpy), \
129 [len_memcpy]"r"(_len_memcpy)
131 #define RSEQ_FINISH_MEMCPY_CLOBBER() \
132 , "r18", "r19", "r20", "r21"
134 #define RSEQ_FINISH_MEMCPY_SCRATCH()
137 * We use extra registers to hold the input registers, and we don't need to
138 * save and restore the input registers.
140 #define RSEQ_FINISH_MEMCPY_SETUP() \
141 "mr %%r19, %[len_memcpy]\n\t" \
142 "mr %%r20, %[to_write_memcpy]\n\t" \
143 "mr %%r21, %[target_memcpy]\n\t" \
145 #define RSEQ_FINISH_MEMCPY_TEARDOWN()
147 #else /* #ifdef __PPC64__ */
149 #define RSEQ_FINISH_ASM(_target_final, _to_write_final, _start_value, \
150 _failure, _spec_store, _spec_input, \
151 _final_store, _final_input, _extra_clobber, \
152 _setup, _teardown, _scratch) \
153 __asm__ __volatile__ goto ( \
154 ".pushsection __rseq_table, \"aw\"\n\t" \
157 /* 32-bit only supported on BE */ \
158 ".long 0x0, 1f, 0x0, 2f, 0x0, 4f, 0x0, 0x0\n\t" \
163 "lis %%r17, (3b)@ha\n\t" \
164 "addi %%r17, %%r17, (3b)@l\n\t" \
165 "stw %%r17, 0(%[rseq_cs])\n\t" \
167 "lwz %%r17, %[current_event_counter]\n\t" \
168 "cmpw cr7, %[start_event_counter], %%r17\n\t" \
176 "stw %%r17, 0(%[rseq_cs])\n\t" \
181 "std %%r17, 0(%[rseq_cs])\n\t" \
183 "b %l[failure]\n\t" \
185 : /* gcc asm goto does not allow outputs */ \
186 : [start_event_counter]"r"((_start_value).event_counter), \
187 [current_event_counter]"m"((_start_value).rseqp->u.e.event_counter), \
188 [rseq_cs]"b"(&(_start_value).rseqp->rseq_cs) \
192 : "r17", "memory", "cc" \
194 RSEQ_INJECT_CLOBBER \
198 #define RSEQ_FINISH_FINAL_STORE_ASM() \
199 "stw %[to_write_final], 0(%[target_final])\n\t"
201 #define RSEQ_FINISH_FINAL_STORE_RELEASE_ASM() \
203 RSEQ_FINISH_FINAL_STORE_ASM()
205 #define RSEQ_FINISH_FINAL_STORE_INPUT(_target_final, _to_write_final) \
206 , [to_write_final]"r"(_to_write_final), \
207 [target_final]"b"(_target_final)
209 #define RSEQ_FINISH_SPECULATIVE_STORE_ASM() \
210 "stw %[to_write_spec], 0(%[target_spec])\n\t" \
213 #define RSEQ_FINISH_SPECULATIVE_STORE_INPUT(_target_spec, _to_write_spec) \
214 , [to_write_spec]"r"(_to_write_spec), \
215 [target_spec]"b"(_target_spec)
217 /* TODO: implement a faster memcpy. */
218 #define RSEQ_FINISH_MEMCPY_STORE_ASM() \
219 "cmpwi %%r19, 0\n\t" \
221 "addi %%r20, %%r20, -1\n\t" \
222 "addi %%r21, %%r21, -1\n\t" \
224 "lbzu %%r18, 1(%%r20)\n\t" \
225 "stbu %%r18, 1(%%r21)\n\t" \
226 "addi %%r19, %%r19, -1\n\t" \
227 "cmpwi %%r19, 0\n\t" \
232 #define RSEQ_FINISH_MEMCPY_STORE_INPUT(_target_memcpy, _to_write_memcpy, _len_memcpy) \
233 , [to_write_memcpy]"r"(_to_write_memcpy), \
234 [target_memcpy]"r"(_target_memcpy), \
235 [len_memcpy]"r"(_len_memcpy)
237 #define RSEQ_FINISH_MEMCPY_CLOBBER() \
238 , "r18", "r19", "r20", "r21"
240 #define RSEQ_FINISH_MEMCPY_SCRATCH()
243 * We use extra registers to hold the input registers, and we don't need to
244 * save and restore the input registers.
246 #define RSEQ_FINISH_MEMCPY_SETUP() \
247 "mr %%r19, %[len_memcpy]\n\t" \
248 "mr %%r20, %[to_write_memcpy]\n\t" \
249 "mr %%r21, %[target_memcpy]\n\t" \
251 #define RSEQ_FINISH_MEMCPY_TEARDOWN()
253 #endif /* #else #ifdef __PPC64__ */
This page took 0.035553 seconds and 5 git commands to generate.