2011-01-05 Michael Snyder <msnyder@vmware.com>
[deliverable/binutils-gdb.git] / gdb / vec.h
1 /* Vector API for GDB.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
5
6 This file is part of GDB.
7
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20
21 #if !defined (GDB_VEC_H)
22 #define GDB_VEC_H
23
24 #include <stddef.h>
25 #include "gdb_string.h"
26 #include "gdb_assert.h"
27
28 /* The macros here implement a set of templated vector types and
29 associated interfaces. These templates are implemented with
30 macros, as we're not in C++ land. The interface functions are
31 typesafe and use static inline functions, sometimes backed by
32 out-of-line generic functions.
33
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
42
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
47
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
59
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use a
72 negative number for reservation. You can also create a vector of a
73 specific size from the get go.
74
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
85
86 If you need to directly manipulate a vector, then the 'address'
87 accessor will return the address of the start of the vector. Also
88 the 'space' predicate will tell you whether there is spare capacity
89 in the vector. You will not normally need to use these two functions.
90
91 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro.
92 Variables of vector type are declared using a VEC(TYPEDEF) macro.
93 The characters O, P and I indicate whether TYPEDEF is a pointer
94 (P), object (O) or integral (I) type. Be careful to pick the
95 correct one, as you'll get an awkward and inefficient API if you
96 use the wrong one. There is a check, which results in a
97 compile-time warning, for the P and I versions, but there is no
98 check for the O versions, as that is not possible in plain C.
99
100 An example of their use would be,
101
102 DEF_VEC_P(tree); // non-managed tree vector.
103
104 struct my_struct {
105 VEC(tree) *v; // A (pointer to) a vector of tree pointers.
106 };
107
108 struct my_struct *s;
109
110 if (VEC_length(tree, s->v)) { we have some contents }
111 VEC_safe_push(tree, s->v, decl); // append some decl onto the end
112 for (ix = 0; VEC_iterate(tree, s->v, ix, elt); ix++)
113 { do something with elt }
114
115 */
116
117 /* Macros to invoke API calls. A single macro works for both pointer
118 and object vectors, but the argument and return types might well be
119 different. In each macro, T is the typedef of the vector elements.
120 Some of these macros pass the vector, V, by reference (by taking
121 its address), this is noted in the descriptions. */
122
123 /* Length of vector
124 unsigned VEC_T_length(const VEC(T) *v);
125
126 Return the number of active elements in V. V can be NULL, in which
127 case zero is returned. */
128
129 #define VEC_length(T,V) (VEC_OP(T,length)(V))
130
131
132 /* Check if vector is empty
133 int VEC_T_empty(const VEC(T) *v);
134
135 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
136
137 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
138
139
140 /* Get the final element of the vector.
141 T VEC_T_last(VEC(T) *v); // Integer
142 T VEC_T_last(VEC(T) *v); // Pointer
143 T *VEC_T_last(VEC(T) *v); // Object
144
145 Return the final element. V must not be empty. */
146
147 #define VEC_last(T,V) (VEC_OP(T,last)(V VEC_ASSERT_INFO))
148
149 /* Index into vector
150 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
151 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
152 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
153
154 Return the IX'th element. If IX must be in the domain of V. */
155
156 #define VEC_index(T,V,I) (VEC_OP(T,index)(V,I VEC_ASSERT_INFO))
157
158 /* Iterate over vector
159 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
160 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
161 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
162
163 Return iteration condition and update PTR to point to the IX'th
164 element. At the end of iteration, sets PTR to NULL. Use this to
165 iterate over the elements of a vector as follows,
166
167 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
168 continue; */
169
170 #define VEC_iterate(T,V,I,P) (VEC_OP(T,iterate)(V,I,&(P)))
171
172 /* Allocate new vector.
173 VEC(T,A) *VEC_T_alloc(int reserve);
174
175 Allocate a new vector with space for RESERVE objects. If RESERVE
176 is zero, NO vector is created. */
177
178 #define VEC_alloc(T,N) (VEC_OP(T,alloc)(N))
179
180 /* Free a vector.
181 void VEC_T_free(VEC(T,A) *&);
182
183 Free a vector and set it to NULL. */
184
185 #define VEC_free(T,V) (VEC_OP(T,free)(&V))
186
187 /* A cleanup function for a vector.
188 void VEC_T_cleanup(void *);
189
190 Clean up a vector. */
191
192 #define VEC_cleanup(T) (VEC_OP(T,cleanup))
193
194 /* Use these to determine the required size and initialization of a
195 vector embedded within another structure (as the final member).
196
197 size_t VEC_T_embedded_size(int reserve);
198 void VEC_T_embedded_init(VEC(T) *v, int reserve);
199
200 These allow the caller to perform the memory allocation. */
201
202 #define VEC_embedded_size(T,N) (VEC_OP(T,embedded_size)(N))
203 #define VEC_embedded_init(T,O,N) (VEC_OP(T,embedded_init)(VEC_BASE(O),N))
204
205 /* Copy a vector.
206 VEC(T,A) *VEC_T_copy(VEC(T) *);
207
208 Copy the live elements of a vector into a new vector. The new and
209 old vectors need not be allocated by the same mechanism. */
210
211 #define VEC_copy(T,V) (VEC_OP(T,copy)(V))
212
213 /* Determine if a vector has additional capacity.
214
215 int VEC_T_space (VEC(T) *v,int reserve)
216
217 If V has space for RESERVE additional entries, return nonzero. You
218 usually only need to use this if you are doing your own vector
219 reallocation, for instance on an embedded vector. This returns
220 nonzero in exactly the same circumstances that VEC_T_reserve
221 will. */
222
223 #define VEC_space(T,V,R) (VEC_OP(T,space)(V,R VEC_ASSERT_INFO))
224
225 /* Reserve space.
226 int VEC_T_reserve(VEC(T,A) *&v, int reserve);
227
228 Ensure that V has at least abs(RESERVE) slots available. The
229 signedness of RESERVE determines the reallocation behavior. A
230 negative value will not create additional headroom beyond that
231 requested. A positive value will create additional headroom. Note
232 this can cause V to be reallocated. Returns nonzero iff
233 reallocation actually occurred. */
234
235 #define VEC_reserve(T,V,R) (VEC_OP(T,reserve)(&(V),R VEC_ASSERT_INFO))
236
237 /* Push object with no reallocation
238 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
239 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
240 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
241
242 Push a new element onto the end, returns a pointer to the slot
243 filled in. For object vectors, the new value can be NULL, in which
244 case NO initialization is performed. There must
245 be sufficient space in the vector. */
246
247 #define VEC_quick_push(T,V,O) (VEC_OP(T,quick_push)(V,O VEC_ASSERT_INFO))
248
249 /* Push object with reallocation
250 T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Integer
251 T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Pointer
252 T *VEC_T_safe_push (VEC(T,A) *&v, T *obj); // Object
253
254 Push a new element onto the end, returns a pointer to the slot
255 filled in. For object vectors, the new value can be NULL, in which
256 case NO initialization is performed. Reallocates V, if needed. */
257
258 #define VEC_safe_push(T,V,O) (VEC_OP(T,safe_push)(&(V),O VEC_ASSERT_INFO))
259
260 /* Pop element off end
261 T VEC_T_pop (VEC(T) *v); // Integer
262 T VEC_T_pop (VEC(T) *v); // Pointer
263 void VEC_T_pop (VEC(T) *v); // Object
264
265 Pop the last element off the end. Returns the element popped, for
266 pointer vectors. */
267
268 #define VEC_pop(T,V) (VEC_OP(T,pop)(V VEC_ASSERT_INFO))
269
270 /* Truncate to specific length
271 void VEC_T_truncate (VEC(T) *v, unsigned len);
272
273 Set the length as specified. The new length must be less than or
274 equal to the current length. This is an O(1) operation. */
275
276 #define VEC_truncate(T,V,I) \
277 (VEC_OP(T,truncate)(V,I VEC_ASSERT_INFO))
278
279 /* Grow to a specific length.
280 void VEC_T_safe_grow (VEC(T,A) *&v, int len);
281
282 Grow the vector to a specific length. The LEN must be as
283 long or longer than the current length. The new elements are
284 uninitialized. */
285
286 #define VEC_safe_grow(T,V,I) \
287 (VEC_OP(T,safe_grow)(&(V),I VEC_ASSERT_INFO))
288
289 /* Replace element
290 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
291 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
292 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
293
294 Replace the IXth element of V with a new value, VAL. For pointer
295 vectors returns the original value. For object vectors returns a
296 pointer to the new value. For object vectors the new value can be
297 NULL, in which case no overwriting of the slot is actually
298 performed. */
299
300 #define VEC_replace(T,V,I,O) (VEC_OP(T,replace)(V,I,O VEC_ASSERT_INFO))
301
302 /* Insert object with no reallocation
303 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
304 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
305 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
306
307 Insert an element, VAL, at the IXth position of V. Return a pointer
308 to the slot created. For vectors of object, the new value can be
309 NULL, in which case no initialization of the inserted slot takes
310 place. There must be sufficient space. */
311
312 #define VEC_quick_insert(T,V,I,O) \
313 (VEC_OP(T,quick_insert)(V,I,O VEC_ASSERT_INFO))
314
315 /* Insert object with reallocation
316 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
317 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
318 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
319
320 Insert an element, VAL, at the IXth position of V. Return a pointer
321 to the slot created. For vectors of object, the new value can be
322 NULL, in which case no initialization of the inserted slot takes
323 place. Reallocate V, if necessary. */
324
325 #define VEC_safe_insert(T,V,I,O) \
326 (VEC_OP(T,safe_insert)(&(V),I,O VEC_ASSERT_INFO))
327
328 /* Remove element retaining order
329 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
330 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
331 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
332
333 Remove an element from the IXth position of V. Ordering of
334 remaining elements is preserved. For pointer vectors returns the
335 removed object. This is an O(N) operation due to a memmove. */
336
337 #define VEC_ordered_remove(T,V,I) \
338 (VEC_OP(T,ordered_remove)(V,I VEC_ASSERT_INFO))
339
340 /* Remove element destroying order
341 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
342 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
343 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
344
345 Remove an element from the IXth position of V. Ordering of
346 remaining elements is destroyed. For pointer vectors returns the
347 removed object. This is an O(1) operation. */
348
349 #define VEC_unordered_remove(T,V,I) \
350 (VEC_OP(T,unordered_remove)(V,I VEC_ASSERT_INFO))
351
352 /* Remove a block of elements
353 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
354
355 Remove LEN elements starting at the IXth. Ordering is retained.
356 This is an O(1) operation. */
357
358 #define VEC_block_remove(T,V,I,L) \
359 (VEC_OP(T,block_remove)(V,I,L) VEC_ASSERT_INFO)
360
361 /* Get the address of the array of elements
362 T *VEC_T_address (VEC(T) v)
363
364 If you need to directly manipulate the array (for instance, you
365 want to feed it to qsort), use this accessor. */
366
367 #define VEC_address(T,V) (VEC_OP(T,address)(V))
368
369 /* Find the first index in the vector not less than the object.
370 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
371 int (*lessthan) (const T, const T)); // Integer
372 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
373 int (*lessthan) (const T, const T)); // Pointer
374 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
375 int (*lessthan) (const T*, const T*)); // Object
376
377 Find the first position in which VAL could be inserted without
378 changing the ordering of V. LESSTHAN is a function that returns
379 true if the first argument is strictly less than the second. */
380
381 #define VEC_lower_bound(T,V,O,LT) \
382 (VEC_OP(T,lower_bound)(V,O,LT VEC_ASSERT_INFO))
383
384 /* Reallocate an array of elements with prefix. */
385 extern void *vec_p_reserve (void *, int);
386 extern void *vec_o_reserve (void *, int, size_t, size_t);
387 #define vec_free_(V) xfree (V)
388
389 #define VEC_ASSERT_INFO ,__FILE__,__LINE__
390 #define VEC_ASSERT_DECL ,const char *file_,unsigned line_
391 #define VEC_ASSERT_PASS ,file_,line_
392 #define vec_assert(expr, op) \
393 ((void)((expr) ? 0 : (gdb_assert_fail (op, file_, line_, \
394 ASSERT_FUNCTION), 0)))
395
396 #define VEC(T) VEC_##T
397 #define VEC_OP(T,OP) VEC_##T##_##OP
398
399 #define VEC_T(T) \
400 typedef struct VEC(T) \
401 { \
402 unsigned num; \
403 unsigned alloc; \
404 T vec[1]; \
405 } VEC(T)
406
407 /* Vector of integer-like object. */
408 #define DEF_VEC_I(T) \
409 static inline void VEC_OP (T,must_be_integral_type) (void) \
410 { \
411 (void)~(T)0; \
412 } \
413 \
414 VEC_T(T); \
415 DEF_VEC_FUNC_P(T) \
416 DEF_VEC_ALLOC_FUNC_I(T) \
417 struct vec_swallow_trailing_semi
418
419 /* Vector of pointer to object. */
420 #define DEF_VEC_P(T) \
421 static inline void VEC_OP (T,must_be_pointer_type) (void) \
422 { \
423 (void)((T)1 == (void *)1); \
424 } \
425 \
426 VEC_T(T); \
427 DEF_VEC_FUNC_P(T) \
428 DEF_VEC_ALLOC_FUNC_P(T) \
429 struct vec_swallow_trailing_semi
430
431 /* Vector of object. */
432 #define DEF_VEC_O(T) \
433 VEC_T(T); \
434 DEF_VEC_FUNC_O(T) \
435 DEF_VEC_ALLOC_FUNC_O(T) \
436 struct vec_swallow_trailing_semi
437
438 #define DEF_VEC_ALLOC_FUNC_I(T) \
439 static inline VEC(T) *VEC_OP (T,alloc) \
440 (int alloc_) \
441 { \
442 /* We must request exact size allocation, hence the negation. */ \
443 return (VEC(T) *) vec_o_reserve (NULL, -alloc_, \
444 offsetof (VEC(T),vec), sizeof (T)); \
445 } \
446 \
447 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
448 { \
449 size_t len_ = vec_ ? vec_->num : 0; \
450 VEC (T) *new_vec_ = NULL; \
451 \
452 if (len_) \
453 { \
454 /* We must request exact size allocation, hence the negation. */ \
455 new_vec_ = (VEC (T) *) \
456 vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T)); \
457 \
458 new_vec_->num = len_; \
459 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
460 } \
461 return new_vec_; \
462 } \
463 \
464 static inline void VEC_OP (T,free) \
465 (VEC(T) **vec_) \
466 { \
467 if (*vec_) \
468 vec_free_ (*vec_); \
469 *vec_ = NULL; \
470 } \
471 \
472 static inline void VEC_OP (T,cleanup) \
473 (void *arg_) \
474 { \
475 VEC(T) **vec_ = arg_; \
476 if (*vec_) \
477 vec_free_ (*vec_); \
478 *vec_ = NULL; \
479 } \
480 \
481 static inline int VEC_OP (T,reserve) \
482 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
483 { \
484 int extend = !VEC_OP (T,space) \
485 (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS); \
486 \
487 if (extend) \
488 *vec_ = (VEC(T) *) vec_o_reserve (*vec_, alloc_, \
489 offsetof (VEC(T),vec), sizeof (T)); \
490 \
491 return extend; \
492 } \
493 \
494 static inline void VEC_OP (T,safe_grow) \
495 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
496 { \
497 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
498 "safe_grow"); \
499 VEC_OP (T,reserve) (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ \
500 VEC_ASSERT_PASS); \
501 (*vec_)->num = size_; \
502 } \
503 \
504 static inline T *VEC_OP (T,safe_push) \
505 (VEC(T) **vec_, const T obj_ VEC_ASSERT_DECL) \
506 { \
507 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
508 \
509 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
510 } \
511 \
512 static inline T *VEC_OP (T,safe_insert) \
513 (VEC(T) **vec_, unsigned ix_, const T obj_ VEC_ASSERT_DECL) \
514 { \
515 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
516 \
517 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
518 }
519
520 #define DEF_VEC_FUNC_P(T) \
521 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_) \
522 { \
523 return vec_ ? vec_->num : 0; \
524 } \
525 \
526 static inline T VEC_OP (T,last) \
527 (const VEC(T) *vec_ VEC_ASSERT_DECL) \
528 { \
529 vec_assert (vec_ && vec_->num, "last"); \
530 \
531 return vec_->vec[vec_->num - 1]; \
532 } \
533 \
534 static inline T VEC_OP (T,index) \
535 (const VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
536 { \
537 vec_assert (vec_ && ix_ < vec_->num, "index"); \
538 \
539 return vec_->vec[ix_]; \
540 } \
541 \
542 static inline int VEC_OP (T,iterate) \
543 (const VEC(T) *vec_, unsigned ix_, T *ptr) \
544 { \
545 if (vec_ && ix_ < vec_->num) \
546 { \
547 *ptr = vec_->vec[ix_]; \
548 return 1; \
549 } \
550 else \
551 { \
552 *ptr = 0; \
553 return 0; \
554 } \
555 } \
556 \
557 static inline size_t VEC_OP (T,embedded_size) \
558 (int alloc_) \
559 { \
560 return offsetof (VEC(T),vec) + alloc_ * sizeof(T); \
561 } \
562 \
563 static inline void VEC_OP (T,embedded_init) \
564 (VEC(T) *vec_, int alloc_) \
565 { \
566 vec_->num = 0; \
567 vec_->alloc = alloc_; \
568 } \
569 \
570 static inline int VEC_OP (T,space) \
571 (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL) \
572 { \
573 vec_assert (alloc_ >= 0, "space"); \
574 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
575 } \
576 \
577 static inline T *VEC_OP (T,quick_push) \
578 (VEC(T) *vec_, T obj_ VEC_ASSERT_DECL) \
579 { \
580 T *slot_; \
581 \
582 vec_assert (vec_->num < vec_->alloc, "quick_push"); \
583 slot_ = &vec_->vec[vec_->num++]; \
584 *slot_ = obj_; \
585 \
586 return slot_; \
587 } \
588 \
589 static inline T VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL) \
590 { \
591 T obj_; \
592 \
593 vec_assert (vec_->num, "pop"); \
594 obj_ = vec_->vec[--vec_->num]; \
595 \
596 return obj_; \
597 } \
598 \
599 static inline void VEC_OP (T,truncate) \
600 (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL) \
601 { \
602 vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate"); \
603 if (vec_) \
604 vec_->num = size_; \
605 } \
606 \
607 static inline T VEC_OP (T,replace) \
608 (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
609 { \
610 T old_obj_; \
611 \
612 vec_assert (ix_ < vec_->num, "replace"); \
613 old_obj_ = vec_->vec[ix_]; \
614 vec_->vec[ix_] = obj_; \
615 \
616 return old_obj_; \
617 } \
618 \
619 static inline T *VEC_OP (T,quick_insert) \
620 (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
621 { \
622 T *slot_; \
623 \
624 vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
625 slot_ = &vec_->vec[ix_]; \
626 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
627 *slot_ = obj_; \
628 \
629 return slot_; \
630 } \
631 \
632 static inline T VEC_OP (T,ordered_remove) \
633 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
634 { \
635 T *slot_; \
636 T obj_; \
637 \
638 vec_assert (ix_ < vec_->num, "ordered_remove"); \
639 slot_ = &vec_->vec[ix_]; \
640 obj_ = *slot_; \
641 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
642 \
643 return obj_; \
644 } \
645 \
646 static inline T VEC_OP (T,unordered_remove) \
647 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
648 { \
649 T *slot_; \
650 T obj_; \
651 \
652 vec_assert (ix_ < vec_->num, "unordered_remove"); \
653 slot_ = &vec_->vec[ix_]; \
654 obj_ = *slot_; \
655 *slot_ = vec_->vec[--vec_->num]; \
656 \
657 return obj_; \
658 } \
659 \
660 static inline void VEC_OP (T,block_remove) \
661 (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL) \
662 { \
663 T *slot_; \
664 \
665 vec_assert (ix_ + len_ <= vec_->num, "block_remove"); \
666 slot_ = &vec_->vec[ix_]; \
667 vec_->num -= len_; \
668 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
669 } \
670 \
671 static inline T *VEC_OP (T,address) \
672 (VEC(T) *vec_) \
673 { \
674 return vec_ ? vec_->vec : 0; \
675 } \
676 \
677 static inline unsigned VEC_OP (T,lower_bound) \
678 (VEC(T) *vec_, const T obj_, \
679 int (*lessthan_)(const T, const T) VEC_ASSERT_DECL) \
680 { \
681 unsigned int len_ = VEC_OP (T, length) (vec_); \
682 unsigned int half_, middle_; \
683 unsigned int first_ = 0; \
684 while (len_ > 0) \
685 { \
686 T middle_elem_; \
687 half_ = len_ >> 1; \
688 middle_ = first_; \
689 middle_ += half_; \
690 middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS); \
691 if (lessthan_ (middle_elem_, obj_)) \
692 { \
693 first_ = middle_; \
694 ++first_; \
695 len_ = len_ - half_ - 1; \
696 } \
697 else \
698 len_ = half_; \
699 } \
700 return first_; \
701 }
702
703 #define DEF_VEC_ALLOC_FUNC_P(T) \
704 static inline VEC(T) *VEC_OP (T,alloc) \
705 (int alloc_) \
706 { \
707 /* We must request exact size allocation, hence the negation. */ \
708 return (VEC(T) *) vec_p_reserve (NULL, -alloc_); \
709 } \
710 \
711 static inline void VEC_OP (T,free) \
712 (VEC(T) **vec_) \
713 { \
714 if (*vec_) \
715 vec_free_ (*vec_); \
716 *vec_ = NULL; \
717 } \
718 \
719 static inline void VEC_OP (T,cleanup) \
720 (void *arg_) \
721 { \
722 VEC(T) **vec_ = arg_; \
723 if (*vec_) \
724 vec_free_ (*vec_); \
725 *vec_ = NULL; \
726 } \
727 \
728 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
729 { \
730 size_t len_ = vec_ ? vec_->num : 0; \
731 VEC (T) *new_vec_ = NULL; \
732 \
733 if (len_) \
734 { \
735 /* We must request exact size allocation, hence the negation. */ \
736 new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_)); \
737 \
738 new_vec_->num = len_; \
739 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
740 } \
741 return new_vec_; \
742 } \
743 \
744 static inline int VEC_OP (T,reserve) \
745 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
746 { \
747 int extend = !VEC_OP (T,space) \
748 (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS); \
749 \
750 if (extend) \
751 *vec_ = (VEC(T) *) vec_p_reserve (*vec_, alloc_); \
752 \
753 return extend; \
754 } \
755 \
756 static inline void VEC_OP (T,safe_grow) \
757 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
758 { \
759 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
760 "safe_grow"); \
761 VEC_OP (T,reserve) \
762 (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS); \
763 (*vec_)->num = size_; \
764 } \
765 \
766 static inline T *VEC_OP (T,safe_push) \
767 (VEC(T) **vec_, T obj_ VEC_ASSERT_DECL) \
768 { \
769 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
770 \
771 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
772 } \
773 \
774 static inline T *VEC_OP (T,safe_insert) \
775 (VEC(T) **vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
776 { \
777 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
778 \
779 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
780 }
781
782 #define DEF_VEC_FUNC_O(T) \
783 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_) \
784 { \
785 return vec_ ? vec_->num : 0; \
786 } \
787 \
788 static inline T *VEC_OP (T,last) (VEC(T) *vec_ VEC_ASSERT_DECL) \
789 { \
790 vec_assert (vec_ && vec_->num, "last"); \
791 \
792 return &vec_->vec[vec_->num - 1]; \
793 } \
794 \
795 static inline T *VEC_OP (T,index) \
796 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
797 { \
798 vec_assert (vec_ && ix_ < vec_->num, "index"); \
799 \
800 return &vec_->vec[ix_]; \
801 } \
802 \
803 static inline int VEC_OP (T,iterate) \
804 (VEC(T) *vec_, unsigned ix_, T **ptr) \
805 { \
806 if (vec_ && ix_ < vec_->num) \
807 { \
808 *ptr = &vec_->vec[ix_]; \
809 return 1; \
810 } \
811 else \
812 { \
813 *ptr = 0; \
814 return 0; \
815 } \
816 } \
817 \
818 static inline size_t VEC_OP (T,embedded_size) \
819 (int alloc_) \
820 { \
821 return offsetof (VEC(T),vec) + alloc_ * sizeof(T); \
822 } \
823 \
824 static inline void VEC_OP (T,embedded_init) \
825 (VEC(T) *vec_, int alloc_) \
826 { \
827 vec_->num = 0; \
828 vec_->alloc = alloc_; \
829 } \
830 \
831 static inline int VEC_OP (T,space) \
832 (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL) \
833 { \
834 vec_assert (alloc_ >= 0, "space"); \
835 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
836 } \
837 \
838 static inline T *VEC_OP (T,quick_push) \
839 (VEC(T) *vec_, const T *obj_ VEC_ASSERT_DECL) \
840 { \
841 T *slot_; \
842 \
843 vec_assert (vec_->num < vec_->alloc, "quick_push"); \
844 slot_ = &vec_->vec[vec_->num++]; \
845 if (obj_) \
846 *slot_ = *obj_; \
847 \
848 return slot_; \
849 } \
850 \
851 static inline void VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL) \
852 { \
853 vec_assert (vec_->num, "pop"); \
854 --vec_->num; \
855 } \
856 \
857 static inline void VEC_OP (T,truncate) \
858 (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL) \
859 { \
860 vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate"); \
861 if (vec_) \
862 vec_->num = size_; \
863 } \
864 \
865 static inline T *VEC_OP (T,replace) \
866 (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
867 { \
868 T *slot_; \
869 \
870 vec_assert (ix_ < vec_->num, "replace"); \
871 slot_ = &vec_->vec[ix_]; \
872 if (obj_) \
873 *slot_ = *obj_; \
874 \
875 return slot_; \
876 } \
877 \
878 static inline T *VEC_OP (T,quick_insert) \
879 (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
880 { \
881 T *slot_; \
882 \
883 vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
884 slot_ = &vec_->vec[ix_]; \
885 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
886 if (obj_) \
887 *slot_ = *obj_; \
888 \
889 return slot_; \
890 } \
891 \
892 static inline void VEC_OP (T,ordered_remove) \
893 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
894 { \
895 T *slot_; \
896 \
897 vec_assert (ix_ < vec_->num, "ordered_remove"); \
898 slot_ = &vec_->vec[ix_]; \
899 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
900 } \
901 \
902 static inline void VEC_OP (T,unordered_remove) \
903 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
904 { \
905 vec_assert (ix_ < vec_->num, "unordered_remove"); \
906 vec_->vec[ix_] = vec_->vec[--vec_->num]; \
907 } \
908 \
909 static inline void VEC_OP (T,block_remove) \
910 (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL) \
911 { \
912 T *slot_; \
913 \
914 vec_assert (ix_ + len_ <= vec_->num, "block_remove"); \
915 slot_ = &vec_->vec[ix_]; \
916 vec_->num -= len_; \
917 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
918 } \
919 \
920 static inline T *VEC_OP (T,address) \
921 (VEC(T) *vec_) \
922 { \
923 return vec_ ? vec_->vec : 0; \
924 } \
925 \
926 static inline unsigned VEC_OP (T,lower_bound) \
927 (VEC(T) *vec_, const T *obj_, \
928 int (*lessthan_)(const T *, const T *) VEC_ASSERT_DECL) \
929 { \
930 unsigned int len_ = VEC_OP (T, length) (vec_); \
931 unsigned int half_, middle_; \
932 unsigned int first_ = 0; \
933 while (len_ > 0) \
934 { \
935 T *middle_elem_; \
936 half_ = len_ >> 1; \
937 middle_ = first_; \
938 middle_ += half_; \
939 middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS); \
940 if (lessthan_ (middle_elem_, obj_)) \
941 { \
942 first_ = middle_; \
943 ++first_; \
944 len_ = len_ - half_ - 1; \
945 } \
946 else \
947 len_ = half_; \
948 } \
949 return first_; \
950 }
951
952 #define DEF_VEC_ALLOC_FUNC_O(T) \
953 static inline VEC(T) *VEC_OP (T,alloc) \
954 (int alloc_) \
955 { \
956 /* We must request exact size allocation, hence the negation. */ \
957 return (VEC(T) *) vec_o_reserve (NULL, -alloc_, \
958 offsetof (VEC(T),vec), sizeof (T)); \
959 } \
960 \
961 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
962 { \
963 size_t len_ = vec_ ? vec_->num : 0; \
964 VEC (T) *new_vec_ = NULL; \
965 \
966 if (len_) \
967 { \
968 /* We must request exact size allocation, hence the negation. */ \
969 new_vec_ = (VEC (T) *) \
970 vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T)); \
971 \
972 new_vec_->num = len_; \
973 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
974 } \
975 return new_vec_; \
976 } \
977 \
978 static inline void VEC_OP (T,free) \
979 (VEC(T) **vec_) \
980 { \
981 if (*vec_) \
982 vec_free_ (*vec_); \
983 *vec_ = NULL; \
984 } \
985 \
986 static inline void VEC_OP (T,cleanup) \
987 (void *arg_) \
988 { \
989 VEC(T) **vec_ = arg_; \
990 if (*vec_) \
991 vec_free_ (*vec_); \
992 *vec_ = NULL; \
993 } \
994 \
995 static inline int VEC_OP (T,reserve) \
996 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
997 { \
998 int extend = !VEC_OP (T,space) (*vec_, alloc_ < 0 ? -alloc_ : alloc_ \
999 VEC_ASSERT_PASS); \
1000 \
1001 if (extend) \
1002 *vec_ = (VEC(T) *) \
1003 vec_o_reserve (*vec_, alloc_, offsetof (VEC(T),vec), sizeof (T)); \
1004 \
1005 return extend; \
1006 } \
1007 \
1008 static inline void VEC_OP (T,safe_grow) \
1009 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
1010 { \
1011 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
1012 "safe_grow"); \
1013 VEC_OP (T,reserve) \
1014 (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS); \
1015 (*vec_)->num = size_; \
1016 } \
1017 \
1018 static inline T *VEC_OP (T,safe_push) \
1019 (VEC(T) **vec_, const T *obj_ VEC_ASSERT_DECL) \
1020 { \
1021 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
1022 \
1023 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
1024 } \
1025 \
1026 static inline T *VEC_OP (T,safe_insert) \
1027 (VEC(T) **vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
1028 { \
1029 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
1030 \
1031 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
1032 }
1033
1034 #endif /* GDB_VEC_H */
This page took 0.053342 seconds and 4 git commands to generate.