rseq: output whether configure finds rseq syscall
[lttng-ust.git] / libringbuffer / rseq-x86.h
1 /*
2 * rseq-x86.h
3 *
4 * (C) Copyright 2016 - Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24
25 #ifdef __x86_64__
26
27 #define has_fast_acquire_release() 1
28 #define has_single_copy_load_64() 1
29
30 /*
31 * The __rseq_table section can be used by debuggers to better handle
32 * single-stepping through the restartable critical sections.
33 */
34 #define RSEQ_FINISH_ASM(_target_final, _to_write_final, _start_value, \
35 _failure, _spec_store, _spec_input, \
36 _final_store, _final_input, _extra_clobber, \
37 _setup, _teardown, _scratch) \
38 do { \
39 _scratch \
40 __asm__ __volatile__ goto ( \
41 ".pushsection __rseq_table, \"aw\"\n\t" \
42 ".balign 32\n\t" \
43 "3:\n\t" \
44 ".quad 1f, 2f, 4f, 0x0\n\t" \
45 ".popsection\n\t" \
46 "1:\n\t" \
47 _setup \
48 RSEQ_INJECT_ASM(1) \
49 "leaq 3b(%%rip), %%rax\n\t" \
50 "movq %%rax, %[rseq_cs]\n\t" \
51 RSEQ_INJECT_ASM(2) \
52 "cmpl %[start_event_counter], %[current_event_counter]\n\t" \
53 "jnz 4f\n\t" \
54 RSEQ_INJECT_ASM(3) \
55 _spec_store \
56 _final_store \
57 "2:\n\t" \
58 RSEQ_INJECT_ASM(5) \
59 _teardown \
60 ".pushsection __rseq_failure, \"a\"\n\t" \
61 "4:\n\t" \
62 _teardown \
63 "jmp %l[failure]\n\t" \
64 ".popsection\n\t" \
65 : /* gcc asm goto does not allow outputs */ \
66 : [start_event_counter]"r"((_start_value).event_counter), \
67 [current_event_counter]"m"((_start_value).rseqp->u.e.event_counter), \
68 [rseq_cs]"m"((_start_value).rseqp->rseq_cs) \
69 _spec_input \
70 _final_input \
71 RSEQ_INJECT_INPUT \
72 : "memory", "cc", "rax" \
73 _extra_clobber \
74 RSEQ_INJECT_CLOBBER \
75 : _failure \
76 ); \
77 } while (0)
78
79 #define RSEQ_FINISH_FINAL_STORE_ASM() \
80 "movq %[to_write_final], %[target_final]\n\t"
81
82 /* x86-64 is TSO */
83 #define RSEQ_FINISH_FINAL_STORE_RELEASE_ASM() \
84 RSEQ_FINISH_FINAL_STORE_ASM()
85
86 #define RSEQ_FINISH_FINAL_STORE_INPUT(_target_final, _to_write_final) \
87 , [to_write_final]"r"(_to_write_final), \
88 [target_final]"m"(*(_target_final))
89
90 #define RSEQ_FINISH_SPECULATIVE_STORE_ASM() \
91 "movq %[to_write_spec], %[target_spec]\n\t" \
92 RSEQ_INJECT_ASM(4)
93
94 #define RSEQ_FINISH_SPECULATIVE_STORE_INPUT(_target_spec, _to_write_spec) \
95 , [to_write_spec]"r"(_to_write_spec), \
96 [target_spec]"m"(*(_target_spec))
97
98 /* TODO: implement a faster memcpy. */
99 #define RSEQ_FINISH_MEMCPY_STORE_ASM() \
100 "test %[len_memcpy], %[len_memcpy]\n\t" \
101 "jz 333f\n\t" \
102 "222:\n\t" \
103 "movb (%[to_write_memcpy]), %%al\n\t" \
104 "movb %%al, (%[target_memcpy])\n\t" \
105 "inc %[to_write_memcpy]\n\t" \
106 "inc %[target_memcpy]\n\t" \
107 "dec %[len_memcpy]\n\t" \
108 "jnz 222b\n\t" \
109 "333:\n\t" \
110 RSEQ_INJECT_ASM(4)
111
112 #define RSEQ_FINISH_MEMCPY_STORE_INPUT(_target_memcpy, _to_write_memcpy, _len_memcpy) \
113 , [to_write_memcpy]"r"(_to_write_memcpy), \
114 [target_memcpy]"r"(_target_memcpy), \
115 [len_memcpy]"r"(_len_memcpy), \
116 [rseq_scratch0]"m"(rseq_scratch[0]), \
117 [rseq_scratch1]"m"(rseq_scratch[1]), \
118 [rseq_scratch2]"m"(rseq_scratch[2])
119
120 #define RSEQ_FINISH_MEMCPY_CLOBBER() \
121 , "rax"
122
123 #define RSEQ_FINISH_MEMCPY_SCRATCH() \
124 uint64_t rseq_scratch[3];
125
126 /*
127 * We need to save and restore those input registers so they can be
128 * modified within the assembly.
129 */
130 #define RSEQ_FINISH_MEMCPY_SETUP() \
131 "movq %[to_write_memcpy], %[rseq_scratch0]\n\t" \
132 "movq %[target_memcpy], %[rseq_scratch1]\n\t" \
133 "movq %[len_memcpy], %[rseq_scratch2]\n\t"
134
135 #define RSEQ_FINISH_MEMCPY_TEARDOWN() \
136 "movq %[rseq_scratch2], %[len_memcpy]\n\t" \
137 "movq %[rseq_scratch1], %[target_memcpy]\n\t" \
138 "movq %[rseq_scratch0], %[to_write_memcpy]\n\t"
139
140 #elif __i386__
141
142 #define has_fast_acquire_release() 0
143 #define has_single_copy_load_64() 0
144
145 /*
146 * Use eax as scratch register and take memory operands as input to
147 * lessen register pressure. Especially needed when compiling
148 * do_rseq_memcpy() in O0.
149 */
150 #define RSEQ_FINISH_ASM(_target_final, _to_write_final, _start_value, \
151 _failure, _spec_store, _spec_input, \
152 _final_store, _final_input, _extra_clobber, \
153 _setup, _teardown, _scratch) \
154 do { \
155 _scratch \
156 __asm__ __volatile__ goto ( \
157 ".pushsection __rseq_table, \"aw\"\n\t" \
158 ".balign 32\n\t" \
159 "3:\n\t" \
160 ".long 1f, 0x0, 2f, 0x0, 4f, 0x0, 0x0, 0x0\n\t" \
161 ".popsection\n\t" \
162 "1:\n\t" \
163 _setup \
164 RSEQ_INJECT_ASM(1) \
165 "movl $3b, %[rseq_cs]\n\t" \
166 RSEQ_INJECT_ASM(2) \
167 "movl %[start_event_counter], %%eax\n\t" \
168 "cmpl %%eax, %[current_event_counter]\n\t" \
169 "jnz 4f\n\t" \
170 RSEQ_INJECT_ASM(3) \
171 _spec_store \
172 _final_store \
173 "2:\n\t" \
174 RSEQ_INJECT_ASM(5) \
175 _teardown \
176 ".pushsection __rseq_failure, \"a\"\n\t" \
177 "4:\n\t" \
178 _teardown \
179 "jmp %l[failure]\n\t" \
180 ".popsection\n\t" \
181 : /* gcc asm goto does not allow outputs */ \
182 : [start_event_counter]"m"((_start_value).event_counter), \
183 [current_event_counter]"m"((_start_value).rseqp->u.e.event_counter), \
184 [rseq_cs]"m"((_start_value).rseqp->rseq_cs) \
185 _spec_input \
186 _final_input \
187 RSEQ_INJECT_INPUT \
188 : "memory", "cc", "eax" \
189 _extra_clobber \
190 RSEQ_INJECT_CLOBBER \
191 : _failure \
192 ); \
193 } while (0)
194
195 #define RSEQ_FINISH_FINAL_STORE_ASM() \
196 "movl %[to_write_final], %%eax\n\t" \
197 "movl %%eax, %[target_final]\n\t"
198
199 #define RSEQ_FINISH_FINAL_STORE_RELEASE_ASM() \
200 "lock; addl $0,0(%%esp)\n\t" \
201 RSEQ_FINISH_FINAL_STORE_ASM()
202
203 #define RSEQ_FINISH_FINAL_STORE_INPUT(_target_final, _to_write_final) \
204 , [to_write_final]"m"(_to_write_final), \
205 [target_final]"m"(*(_target_final))
206
207 #define RSEQ_FINISH_SPECULATIVE_STORE_ASM() \
208 "movl %[to_write_spec], %%eax\n\t" \
209 "movl %%eax, %[target_spec]\n\t" \
210 RSEQ_INJECT_ASM(4)
211
212 #define RSEQ_FINISH_SPECULATIVE_STORE_INPUT(_target_spec, _to_write_spec) \
213 , [to_write_spec]"m"(_to_write_spec), \
214 [target_spec]"m"(*(_target_spec))
215
216 /* TODO: implement a faster memcpy. */
217 #define RSEQ_FINISH_MEMCPY_STORE_ASM() \
218 "movl %[len_memcpy], %%eax\n\t" \
219 "test %%eax, %%eax\n\t" \
220 "jz 333f\n\t" \
221 "222:\n\t" \
222 "movb (%[to_write_memcpy]), %%al\n\t" \
223 "movb %%al, (%[target_memcpy])\n\t" \
224 "inc %[to_write_memcpy]\n\t" \
225 "inc %[target_memcpy]\n\t" \
226 "decl %[rseq_scratch2]\n\t" \
227 "jnz 222b\n\t" \
228 "333:\n\t" \
229 RSEQ_INJECT_ASM(4)
230
231 #define RSEQ_FINISH_MEMCPY_STORE_INPUT(_target_memcpy, _to_write_memcpy, _len_memcpy) \
232 , [to_write_memcpy]"r"(_to_write_memcpy), \
233 [target_memcpy]"r"(_target_memcpy), \
234 [len_memcpy]"m"(_len_memcpy), \
235 [rseq_scratch0]"m"(rseq_scratch[0]), \
236 [rseq_scratch1]"m"(rseq_scratch[1]), \
237 [rseq_scratch2]"m"(rseq_scratch[2])
238
239 #define RSEQ_FINISH_MEMCPY_CLOBBER()
240
241 #define RSEQ_FINISH_MEMCPY_SCRATCH() \
242 uint32_t rseq_scratch[3];
243
244 /*
245 * We need to save and restore those input registers so they can be
246 * modified within the assembly.
247 */
248 #define RSEQ_FINISH_MEMCPY_SETUP() \
249 "movl %[to_write_memcpy], %[rseq_scratch0]\n\t" \
250 "movl %[target_memcpy], %[rseq_scratch1]\n\t" \
251 "movl %[len_memcpy], %%eax\n\t" \
252 "movl %%eax, %[rseq_scratch2]\n\t"
253
254 #define RSEQ_FINISH_MEMCPY_TEARDOWN() \
255 "movl %[rseq_scratch1], %[target_memcpy]\n\t" \
256 "movl %[rseq_scratch0], %[to_write_memcpy]\n\t"
257
258 #endif
This page took 0.036676 seconds and 5 git commands to generate.