2010-12-31 Michael Snyder <msnyder@vmware.com>
[deliverable/binutils-gdb.git] / gdb / vec.h
1 /* Vector API for GDB.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
5
6 This file is part of GDB.
7
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20
21 #if !defined (GDB_VEC_H)
22 #define GDB_VEC_H
23
24 #include <stddef.h>
25 #include "gdb_string.h"
26 #include "gdb_assert.h"
27
28 /* The macros here implement a set of templated vector types and
29 associated interfaces. These templates are implemented with
30 macros, as we're not in C++ land. The interface functions are
31 typesafe and use static inline functions, sometimes backed by
32 out-of-line generic functions.
33
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
42
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
47
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
59
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use a
72 negative number for reservation. You can also create a vector of a
73 specific size from the get go.
74
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
85
86 If you need to directly manipulate a vector, then the 'address'
87 accessor will return the address of the start of the vector. Also
88 the 'space' predicate will tell you whether there is spare capacity
89 in the vector. You will not normally need to use these two functions.
90
91 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro.
92 Variables of vector type are declared using a VEC(TYPEDEF) macro.
93 The characters O, P and I indicate whether TYPEDEF is a pointer
94 (P), object (O) or integral (I) type. Be careful to pick the
95 correct one, as you'll get an awkward and inefficient API if you
96 use the wrong one. There is a check, which results in a
97 compile-time warning, for the P and I versions, but there is no
98 check for the O versions, as that is not possible in plain C.
99
100 An example of their use would be,
101
102 DEF_VEC_P(tree); // non-managed tree vector.
103
104 struct my_struct {
105 VEC(tree) *v; // A (pointer to) a vector of tree pointers.
106 };
107
108 struct my_struct *s;
109
110 if (VEC_length(tree, s->v)) { we have some contents }
111 VEC_safe_push(tree, s->v, decl); // append some decl onto the end
112 for (ix = 0; VEC_iterate(tree, s->v, ix, elt); ix++)
113 { do something with elt }
114
115 */
116
117 /* Macros to invoke API calls. A single macro works for both pointer
118 and object vectors, but the argument and return types might well be
119 different. In each macro, T is the typedef of the vector elements.
120 Some of these macros pass the vector, V, by reference (by taking
121 its address), this is noted in the descriptions. */
122
123 /* Length of vector
124 unsigned VEC_T_length(const VEC(T) *v);
125
126 Return the number of active elements in V. V can be NULL, in which
127 case zero is returned. */
128
129 #define VEC_length(T,V) (VEC_OP(T,length)(V))
130
131
132 /* Check if vector is empty
133 int VEC_T_empty(const VEC(T) *v);
134
135 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
136
137 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
138
139
140 /* Get the final element of the vector.
141 T VEC_T_last(VEC(T) *v); // Integer
142 T VEC_T_last(VEC(T) *v); // Pointer
143 T *VEC_T_last(VEC(T) *v); // Object
144
145 Return the final element. V must not be empty. */
146
147 #define VEC_last(T,V) (VEC_OP(T,last)(V VEC_ASSERT_INFO))
148
149 /* Index into vector
150 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
151 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
152 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
153
154 Return the IX'th element. If IX must be in the domain of V. */
155
156 #define VEC_index(T,V,I) (VEC_OP(T,index)(V,I VEC_ASSERT_INFO))
157
158 /* Iterate over vector
159 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
160 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
161 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
162
163 Return iteration condition and update PTR to point to the IX'th
164 element. At the end of iteration, sets PTR to NULL. Use this to
165 iterate over the elements of a vector as follows,
166
167 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
168 continue; */
169
170 #define VEC_iterate(T,V,I,P) (VEC_OP(T,iterate)(V,I,&(P)))
171
172 /* Allocate new vector.
173 VEC(T,A) *VEC_T_alloc(int reserve);
174
175 Allocate a new vector with space for RESERVE objects. If RESERVE
176 is zero, NO vector is created. */
177
178 #define VEC_alloc(T,N) (VEC_OP(T,alloc)(N))
179
180 /* Free a vector.
181 void VEC_T_free(VEC(T,A) *&);
182
183 Free a vector and set it to NULL. */
184
185 #define VEC_free(T,V) (VEC_OP(T,free)(&V))
186
187 /* A cleanup function for a vector.
188 void VEC_T_cleanup(void *);
189
190 Clean up a vector. */
191
192 #define VEC_cleanup(T) (VEC_OP(T,cleanup))
193
194 /* Use these to determine the required size and initialization of a
195 vector embedded within another structure (as the final member).
196
197 size_t VEC_T_embedded_size(int reserve);
198 void VEC_T_embedded_init(VEC(T) *v, int reserve);
199
200 These allow the caller to perform the memory allocation. */
201
202 #define VEC_embedded_size(T,N) (VEC_OP(T,embedded_size)(N))
203 #define VEC_embedded_init(T,O,N) (VEC_OP(T,embedded_init)(VEC_BASE(O),N))
204
205 /* Copy a vector.
206 VEC(T,A) *VEC_T_copy(VEC(T) *);
207
208 Copy the live elements of a vector into a new vector. The new and
209 old vectors need not be allocated by the same mechanism. */
210
211 #define VEC_copy(T,V) (VEC_OP(T,copy)(V))
212
213 /* Determine if a vector has additional capacity.
214
215 int VEC_T_space (VEC(T) *v,int reserve)
216
217 If V has space for RESERVE additional entries, return nonzero. You
218 usually only need to use this if you are doing your own vector
219 reallocation, for instance on an embedded vector. This returns
220 nonzero in exactly the same circumstances that VEC_T_reserve
221 will. */
222
223 #define VEC_space(T,V,R) (VEC_OP(T,space)(V,R VEC_ASSERT_INFO))
224
225 /* Reserve space.
226 int VEC_T_reserve(VEC(T,A) *&v, int reserve);
227
228 Ensure that V has at least abs(RESERVE) slots available. The
229 signedness of RESERVE determines the reallocation behavior. A
230 negative value will not create additional headroom beyond that
231 requested. A positive value will create additional headroom. Note
232 this can cause V to be reallocated. Returns nonzero iff
233 reallocation actually occurred. */
234
235 #define VEC_reserve(T,V,R) (VEC_OP(T,reserve)(&(V),R VEC_ASSERT_INFO))
236
237 /* Push object with no reallocation
238 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
239 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
240 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
241
242 Push a new element onto the end, returns a pointer to the slot
243 filled in. For object vectors, the new value can be NULL, in which
244 case NO initialization is performed. There must
245 be sufficient space in the vector. */
246
247 #define VEC_quick_push(T,V,O) (VEC_OP(T,quick_push)(V,O VEC_ASSERT_INFO))
248
249 /* Push object with reallocation
250 T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Integer
251 T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Pointer
252 T *VEC_T_safe_push (VEC(T,A) *&v, T *obj); // Object
253
254 Push a new element onto the end, returns a pointer to the slot
255 filled in. For object vectors, the new value can be NULL, in which
256 case NO initialization is performed. Reallocates V, if needed. */
257
258 #define VEC_safe_push(T,V,O) (VEC_OP(T,safe_push)(&(V),O VEC_ASSERT_INFO))
259
260 /* Pop element off end
261 T VEC_T_pop (VEC(T) *v); // Integer
262 T VEC_T_pop (VEC(T) *v); // Pointer
263 void VEC_T_pop (VEC(T) *v); // Object
264
265 Pop the last element off the end. Returns the element popped, for
266 pointer vectors. */
267
268 #define VEC_pop(T,V) (VEC_OP(T,pop)(V VEC_ASSERT_INFO))
269
270 /* Truncate to specific length
271 void VEC_T_truncate (VEC(T) *v, unsigned len);
272
273 Set the length as specified. The new length must be less than or
274 equal to the current length. This is an O(1) operation. */
275
276 #define VEC_truncate(T,V,I) \
277 (VEC_OP(T,truncate)(V,I VEC_ASSERT_INFO))
278
279 /* Grow to a specific length.
280 void VEC_T_safe_grow (VEC(T,A) *&v, int len);
281
282 Grow the vector to a specific length. The LEN must be as
283 long or longer than the current length. The new elements are
284 uninitialized. */
285
286 #define VEC_safe_grow(T,V,I) \
287 (VEC_OP(T,safe_grow)(&(V),I VEC_ASSERT_INFO))
288
289 /* Replace element
290 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
291 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
292 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
293
294 Replace the IXth element of V with a new value, VAL. For pointer
295 vectors returns the original value. For object vectors returns a
296 pointer to the new value. For object vectors the new value can be
297 NULL, in which case no overwriting of the slot is actually
298 performed. */
299
300 #define VEC_replace(T,V,I,O) (VEC_OP(T,replace)(V,I,O VEC_ASSERT_INFO))
301
302 /* Insert object with no reallocation
303 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
304 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
305 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
306
307 Insert an element, VAL, at the IXth position of V. Return a pointer
308 to the slot created. For vectors of object, the new value can be
309 NULL, in which case no initialization of the inserted slot takes
310 place. There must be sufficient space. */
311
312 #define VEC_quick_insert(T,V,I,O) \
313 (VEC_OP(T,quick_insert)(V,I,O VEC_ASSERT_INFO))
314
315 /* Insert object with reallocation
316 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
317 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
318 T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
319
320 Insert an element, VAL, at the IXth position of V. Return a pointer
321 to the slot created. For vectors of object, the new value can be
322 NULL, in which case no initialization of the inserted slot takes
323 place. Reallocate V, if necessary. */
324
325 #define VEC_safe_insert(T,V,I,O) \
326 (VEC_OP(T,safe_insert)(&(V),I,O VEC_ASSERT_INFO))
327
328 /* Remove element retaining order
329 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
330 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
331 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
332
333 Remove an element from the IXth position of V. Ordering of
334 remaining elements is preserved. For pointer vectors returns the
335 removed object. This is an O(N) operation due to a memmove. */
336
337 #define VEC_ordered_remove(T,V,I) \
338 (VEC_OP(T,ordered_remove)(V,I VEC_ASSERT_INFO))
339
340 /* Remove element destroying order
341 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
342 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
343 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
344
345 Remove an element from the IXth position of V. Ordering of
346 remaining elements is destroyed. For pointer vectors returns the
347 removed object. This is an O(1) operation. */
348
349 #define VEC_unordered_remove(T,V,I) \
350 (VEC_OP(T,unordered_remove)(V,I VEC_ASSERT_INFO))
351
352 /* Remove a block of elements
353 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
354
355 Remove LEN elements starting at the IXth. Ordering is retained.
356 This is an O(1) operation. */
357
358 #define VEC_block_remove(T,V,I,L) \
359 (VEC_OP(T,block_remove)(V,I,L) VEC_ASSERT_INFO)
360
361 /* Get the address of the array of elements
362 T *VEC_T_address (VEC(T) v)
363
364 If you need to directly manipulate the array (for instance, you
365 want to feed it to qsort), use this accessor. */
366
367 #define VEC_address(T,V) (VEC_OP(T,address)(V))
368
369 /* Find the first index in the vector not less than the object.
370 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
371 int (*lessthan) (const T, const T)); // Integer
372 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
373 int (*lessthan) (const T, const T)); // Pointer
374 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
375 int (*lessthan) (const T*, const T*)); // Object
376
377 Find the first position in which VAL could be inserted without
378 changing the ordering of V. LESSTHAN is a function that returns
379 true if the first argument is strictly less than the second. */
380
381 #define VEC_lower_bound(T,V,O,LT) \
382 (VEC_OP(T,lower_bound)(V,O,LT VEC_ASSERT_INFO))
383
384 /* Reallocate an array of elements with prefix. */
385 extern void *vec_p_reserve (void *, int);
386 extern void *vec_o_reserve (void *, int, size_t, size_t);
387 #define vec_free_(V) xfree (V)
388
389 #define VEC_ASSERT_INFO ,__FILE__,__LINE__
390 #define VEC_ASSERT_DECL ,const char *file_,unsigned line_
391 #define VEC_ASSERT_PASS ,file_,line_
392 #define vec_assert(expr, op) \
393 ((void)((expr) ? 0 : (gdb_assert_fail (op, file_, line_, ASSERT_FUNCTION), 0)))
394
395 #define VEC(T) VEC_##T
396 #define VEC_OP(T,OP) VEC_##T##_##OP
397
398 #define VEC_T(T) \
399 typedef struct VEC(T) \
400 { \
401 unsigned num; \
402 unsigned alloc; \
403 T vec[1]; \
404 } VEC(T)
405
406 /* Vector of integer-like object. */
407 #define DEF_VEC_I(T) \
408 static inline void VEC_OP (T,must_be_integral_type) (void) \
409 { \
410 (void)~(T)0; \
411 } \
412 \
413 VEC_T(T); \
414 DEF_VEC_FUNC_P(T) \
415 DEF_VEC_ALLOC_FUNC_I(T) \
416 struct vec_swallow_trailing_semi
417
418 /* Vector of pointer to object. */
419 #define DEF_VEC_P(T) \
420 static inline void VEC_OP (T,must_be_pointer_type) (void) \
421 { \
422 (void)((T)1 == (void *)1); \
423 } \
424 \
425 VEC_T(T); \
426 DEF_VEC_FUNC_P(T) \
427 DEF_VEC_ALLOC_FUNC_P(T) \
428 struct vec_swallow_trailing_semi
429
430 /* Vector of object. */
431 #define DEF_VEC_O(T) \
432 VEC_T(T); \
433 DEF_VEC_FUNC_O(T) \
434 DEF_VEC_ALLOC_FUNC_O(T) \
435 struct vec_swallow_trailing_semi
436
437 #define DEF_VEC_ALLOC_FUNC_I(T) \
438 static inline VEC(T) *VEC_OP (T,alloc) \
439 (int alloc_) \
440 { \
441 /* We must request exact size allocation, hence the negation. */ \
442 return (VEC(T) *) vec_o_reserve (NULL, -alloc_, \
443 offsetof (VEC(T),vec), sizeof (T)); \
444 } \
445 \
446 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
447 { \
448 size_t len_ = vec_ ? vec_->num : 0; \
449 VEC (T) *new_vec_ = NULL; \
450 \
451 if (len_) \
452 { \
453 /* We must request exact size allocation, hence the negation. */ \
454 new_vec_ = (VEC (T) *) \
455 vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T)); \
456 \
457 new_vec_->num = len_; \
458 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
459 } \
460 return new_vec_; \
461 } \
462 \
463 static inline void VEC_OP (T,free) \
464 (VEC(T) **vec_) \
465 { \
466 if (*vec_) \
467 vec_free_ (*vec_); \
468 *vec_ = NULL; \
469 } \
470 \
471 static inline void VEC_OP (T,cleanup) \
472 (void *arg_) \
473 { \
474 VEC(T) **vec_ = arg_; \
475 if (*vec_) \
476 vec_free_ (*vec_); \
477 *vec_ = NULL; \
478 } \
479 \
480 static inline int VEC_OP (T,reserve) \
481 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
482 { \
483 int extend = !VEC_OP (T,space) \
484 (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS); \
485 \
486 if (extend) \
487 *vec_ = (VEC(T) *) vec_o_reserve (*vec_, alloc_, \
488 offsetof (VEC(T),vec), sizeof (T)); \
489 \
490 return extend; \
491 } \
492 \
493 static inline void VEC_OP (T,safe_grow) \
494 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
495 { \
496 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
497 "safe_grow"); \
498 VEC_OP (T,reserve) (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ \
499 VEC_ASSERT_PASS); \
500 (*vec_)->num = size_; \
501 } \
502 \
503 static inline T *VEC_OP (T,safe_push) \
504 (VEC(T) **vec_, const T obj_ VEC_ASSERT_DECL) \
505 { \
506 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
507 \
508 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
509 } \
510 \
511 static inline T *VEC_OP (T,safe_insert) \
512 (VEC(T) **vec_, unsigned ix_, const T obj_ VEC_ASSERT_DECL) \
513 { \
514 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
515 \
516 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
517 }
518
519 #define DEF_VEC_FUNC_P(T) \
520 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_) \
521 { \
522 return vec_ ? vec_->num : 0; \
523 } \
524 \
525 static inline T VEC_OP (T,last) \
526 (const VEC(T) *vec_ VEC_ASSERT_DECL) \
527 { \
528 vec_assert (vec_ && vec_->num, "last"); \
529 \
530 return vec_->vec[vec_->num - 1]; \
531 } \
532 \
533 static inline T VEC_OP (T,index) \
534 (const VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
535 { \
536 vec_assert (vec_ && ix_ < vec_->num, "index"); \
537 \
538 return vec_->vec[ix_]; \
539 } \
540 \
541 static inline int VEC_OP (T,iterate) \
542 (const VEC(T) *vec_, unsigned ix_, T *ptr) \
543 { \
544 if (vec_ && ix_ < vec_->num) \
545 { \
546 *ptr = vec_->vec[ix_]; \
547 return 1; \
548 } \
549 else \
550 { \
551 *ptr = 0; \
552 return 0; \
553 } \
554 } \
555 \
556 static inline size_t VEC_OP (T,embedded_size) \
557 (int alloc_) \
558 { \
559 return offsetof (VEC(T),vec) + alloc_ * sizeof(T); \
560 } \
561 \
562 static inline void VEC_OP (T,embedded_init) \
563 (VEC(T) *vec_, int alloc_) \
564 { \
565 vec_->num = 0; \
566 vec_->alloc = alloc_; \
567 } \
568 \
569 static inline int VEC_OP (T,space) \
570 (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL) \
571 { \
572 vec_assert (alloc_ >= 0, "space"); \
573 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
574 } \
575 \
576 static inline T *VEC_OP (T,quick_push) \
577 (VEC(T) *vec_, T obj_ VEC_ASSERT_DECL) \
578 { \
579 T *slot_; \
580 \
581 vec_assert (vec_->num < vec_->alloc, "quick_push"); \
582 slot_ = &vec_->vec[vec_->num++]; \
583 *slot_ = obj_; \
584 \
585 return slot_; \
586 } \
587 \
588 static inline T VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL) \
589 { \
590 T obj_; \
591 \
592 vec_assert (vec_->num, "pop"); \
593 obj_ = vec_->vec[--vec_->num]; \
594 \
595 return obj_; \
596 } \
597 \
598 static inline void VEC_OP (T,truncate) \
599 (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL) \
600 { \
601 vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate"); \
602 if (vec_) \
603 vec_->num = size_; \
604 } \
605 \
606 static inline T VEC_OP (T,replace) \
607 (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
608 { \
609 T old_obj_; \
610 \
611 vec_assert (ix_ < vec_->num, "replace"); \
612 old_obj_ = vec_->vec[ix_]; \
613 vec_->vec[ix_] = obj_; \
614 \
615 return old_obj_; \
616 } \
617 \
618 static inline T *VEC_OP (T,quick_insert) \
619 (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
620 { \
621 T *slot_; \
622 \
623 vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
624 slot_ = &vec_->vec[ix_]; \
625 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
626 *slot_ = obj_; \
627 \
628 return slot_; \
629 } \
630 \
631 static inline T VEC_OP (T,ordered_remove) \
632 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
633 { \
634 T *slot_; \
635 T obj_; \
636 \
637 vec_assert (ix_ < vec_->num, "ordered_remove"); \
638 slot_ = &vec_->vec[ix_]; \
639 obj_ = *slot_; \
640 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
641 \
642 return obj_; \
643 } \
644 \
645 static inline T VEC_OP (T,unordered_remove) \
646 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
647 { \
648 T *slot_; \
649 T obj_; \
650 \
651 vec_assert (ix_ < vec_->num, "unordered_remove"); \
652 slot_ = &vec_->vec[ix_]; \
653 obj_ = *slot_; \
654 *slot_ = vec_->vec[--vec_->num]; \
655 \
656 return obj_; \
657 } \
658 \
659 static inline void VEC_OP (T,block_remove) \
660 (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL) \
661 { \
662 T *slot_; \
663 \
664 vec_assert (ix_ + len_ <= vec_->num, "block_remove"); \
665 slot_ = &vec_->vec[ix_]; \
666 vec_->num -= len_; \
667 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
668 } \
669 \
670 static inline T *VEC_OP (T,address) \
671 (VEC(T) *vec_) \
672 { \
673 return vec_ ? vec_->vec : 0; \
674 } \
675 \
676 static inline unsigned VEC_OP (T,lower_bound) \
677 (VEC(T) *vec_, const T obj_, \
678 int (*lessthan_)(const T, const T) VEC_ASSERT_DECL) \
679 { \
680 unsigned int len_ = VEC_OP (T, length) (vec_); \
681 unsigned int half_, middle_; \
682 unsigned int first_ = 0; \
683 while (len_ > 0) \
684 { \
685 T middle_elem_; \
686 half_ = len_ >> 1; \
687 middle_ = first_; \
688 middle_ += half_; \
689 middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS); \
690 if (lessthan_ (middle_elem_, obj_)) \
691 { \
692 first_ = middle_; \
693 ++first_; \
694 len_ = len_ - half_ - 1; \
695 } \
696 else \
697 len_ = half_; \
698 } \
699 return first_; \
700 }
701
702 #define DEF_VEC_ALLOC_FUNC_P(T) \
703 static inline VEC(T) *VEC_OP (T,alloc) \
704 (int alloc_) \
705 { \
706 /* We must request exact size allocation, hence the negation. */ \
707 return (VEC(T) *) vec_p_reserve (NULL, -alloc_); \
708 } \
709 \
710 static inline void VEC_OP (T,free) \
711 (VEC(T) **vec_) \
712 { \
713 if (*vec_) \
714 vec_free_ (*vec_); \
715 *vec_ = NULL; \
716 } \
717 \
718 static inline void VEC_OP (T,cleanup) \
719 (void *arg_) \
720 { \
721 VEC(T) **vec_ = arg_; \
722 if (*vec_) \
723 vec_free_ (*vec_); \
724 *vec_ = NULL; \
725 } \
726 \
727 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
728 { \
729 size_t len_ = vec_ ? vec_->num : 0; \
730 VEC (T) *new_vec_ = NULL; \
731 \
732 if (len_) \
733 { \
734 /* We must request exact size allocation, hence the negation. */ \
735 new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_)); \
736 \
737 new_vec_->num = len_; \
738 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
739 } \
740 return new_vec_; \
741 } \
742 \
743 static inline int VEC_OP (T,reserve) \
744 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
745 { \
746 int extend = !VEC_OP (T,space) \
747 (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS); \
748 \
749 if (extend) \
750 *vec_ = (VEC(T) *) vec_p_reserve (*vec_, alloc_); \
751 \
752 return extend; \
753 } \
754 \
755 static inline void VEC_OP (T,safe_grow) \
756 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
757 { \
758 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
759 "safe_grow"); \
760 VEC_OP (T,reserve) \
761 (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS); \
762 (*vec_)->num = size_; \
763 } \
764 \
765 static inline T *VEC_OP (T,safe_push) \
766 (VEC(T) **vec_, T obj_ VEC_ASSERT_DECL) \
767 { \
768 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
769 \
770 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
771 } \
772 \
773 static inline T *VEC_OP (T,safe_insert) \
774 (VEC(T) **vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL) \
775 { \
776 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
777 \
778 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
779 }
780
781 #define DEF_VEC_FUNC_O(T) \
782 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_) \
783 { \
784 return vec_ ? vec_->num : 0; \
785 } \
786 \
787 static inline T *VEC_OP (T,last) (VEC(T) *vec_ VEC_ASSERT_DECL) \
788 { \
789 vec_assert (vec_ && vec_->num, "last"); \
790 \
791 return &vec_->vec[vec_->num - 1]; \
792 } \
793 \
794 static inline T *VEC_OP (T,index) \
795 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
796 { \
797 vec_assert (vec_ && ix_ < vec_->num, "index"); \
798 \
799 return &vec_->vec[ix_]; \
800 } \
801 \
802 static inline int VEC_OP (T,iterate) \
803 (VEC(T) *vec_, unsigned ix_, T **ptr) \
804 { \
805 if (vec_ && ix_ < vec_->num) \
806 { \
807 *ptr = &vec_->vec[ix_]; \
808 return 1; \
809 } \
810 else \
811 { \
812 *ptr = 0; \
813 return 0; \
814 } \
815 } \
816 \
817 static inline size_t VEC_OP (T,embedded_size) \
818 (int alloc_) \
819 { \
820 return offsetof (VEC(T),vec) + alloc_ * sizeof(T); \
821 } \
822 \
823 static inline void VEC_OP (T,embedded_init) \
824 (VEC(T) *vec_, int alloc_) \
825 { \
826 vec_->num = 0; \
827 vec_->alloc = alloc_; \
828 } \
829 \
830 static inline int VEC_OP (T,space) \
831 (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL) \
832 { \
833 vec_assert (alloc_ >= 0, "space"); \
834 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
835 } \
836 \
837 static inline T *VEC_OP (T,quick_push) \
838 (VEC(T) *vec_, const T *obj_ VEC_ASSERT_DECL) \
839 { \
840 T *slot_; \
841 \
842 vec_assert (vec_->num < vec_->alloc, "quick_push"); \
843 slot_ = &vec_->vec[vec_->num++]; \
844 if (obj_) \
845 *slot_ = *obj_; \
846 \
847 return slot_; \
848 } \
849 \
850 static inline void VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL) \
851 { \
852 vec_assert (vec_->num, "pop"); \
853 --vec_->num; \
854 } \
855 \
856 static inline void VEC_OP (T,truncate) \
857 (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL) \
858 { \
859 vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate"); \
860 if (vec_) \
861 vec_->num = size_; \
862 } \
863 \
864 static inline T *VEC_OP (T,replace) \
865 (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
866 { \
867 T *slot_; \
868 \
869 vec_assert (ix_ < vec_->num, "replace"); \
870 slot_ = &vec_->vec[ix_]; \
871 if (obj_) \
872 *slot_ = *obj_; \
873 \
874 return slot_; \
875 } \
876 \
877 static inline T *VEC_OP (T,quick_insert) \
878 (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
879 { \
880 T *slot_; \
881 \
882 vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
883 slot_ = &vec_->vec[ix_]; \
884 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
885 if (obj_) \
886 *slot_ = *obj_; \
887 \
888 return slot_; \
889 } \
890 \
891 static inline void VEC_OP (T,ordered_remove) \
892 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
893 { \
894 T *slot_; \
895 \
896 vec_assert (ix_ < vec_->num, "ordered_remove"); \
897 slot_ = &vec_->vec[ix_]; \
898 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
899 } \
900 \
901 static inline void VEC_OP (T,unordered_remove) \
902 (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL) \
903 { \
904 vec_assert (ix_ < vec_->num, "unordered_remove"); \
905 vec_->vec[ix_] = vec_->vec[--vec_->num]; \
906 } \
907 \
908 static inline void VEC_OP (T,block_remove) \
909 (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL) \
910 { \
911 T *slot_; \
912 \
913 vec_assert (ix_ + len_ <= vec_->num, "block_remove"); \
914 slot_ = &vec_->vec[ix_]; \
915 vec_->num -= len_; \
916 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
917 } \
918 \
919 static inline T *VEC_OP (T,address) \
920 (VEC(T) *vec_) \
921 { \
922 return vec_ ? vec_->vec : 0; \
923 } \
924 \
925 static inline unsigned VEC_OP (T,lower_bound) \
926 (VEC(T) *vec_, const T *obj_, \
927 int (*lessthan_)(const T *, const T *) VEC_ASSERT_DECL) \
928 { \
929 unsigned int len_ = VEC_OP (T, length) (vec_); \
930 unsigned int half_, middle_; \
931 unsigned int first_ = 0; \
932 while (len_ > 0) \
933 { \
934 T *middle_elem_; \
935 half_ = len_ >> 1; \
936 middle_ = first_; \
937 middle_ += half_; \
938 middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS); \
939 if (lessthan_ (middle_elem_, obj_)) \
940 { \
941 first_ = middle_; \
942 ++first_; \
943 len_ = len_ - half_ - 1; \
944 } \
945 else \
946 len_ = half_; \
947 } \
948 return first_; \
949 }
950
951 #define DEF_VEC_ALLOC_FUNC_O(T) \
952 static inline VEC(T) *VEC_OP (T,alloc) \
953 (int alloc_) \
954 { \
955 /* We must request exact size allocation, hence the negation. */ \
956 return (VEC(T) *) vec_o_reserve (NULL, -alloc_, \
957 offsetof (VEC(T),vec), sizeof (T)); \
958 } \
959 \
960 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_) \
961 { \
962 size_t len_ = vec_ ? vec_->num : 0; \
963 VEC (T) *new_vec_ = NULL; \
964 \
965 if (len_) \
966 { \
967 /* We must request exact size allocation, hence the negation. */ \
968 new_vec_ = (VEC (T) *) \
969 vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T)); \
970 \
971 new_vec_->num = len_; \
972 memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_); \
973 } \
974 return new_vec_; \
975 } \
976 \
977 static inline void VEC_OP (T,free) \
978 (VEC(T) **vec_) \
979 { \
980 if (*vec_) \
981 vec_free_ (*vec_); \
982 *vec_ = NULL; \
983 } \
984 \
985 static inline void VEC_OP (T,cleanup) \
986 (void *arg_) \
987 { \
988 VEC(T) **vec_ = arg_; \
989 if (*vec_) \
990 vec_free_ (*vec_); \
991 *vec_ = NULL; \
992 } \
993 \
994 static inline int VEC_OP (T,reserve) \
995 (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL) \
996 { \
997 int extend = !VEC_OP (T,space) (*vec_, alloc_ < 0 ? -alloc_ : alloc_ \
998 VEC_ASSERT_PASS); \
999 \
1000 if (extend) \
1001 *vec_ = (VEC(T) *) \
1002 vec_o_reserve (*vec_, alloc_, offsetof (VEC(T),vec), sizeof (T)); \
1003 \
1004 return extend; \
1005 } \
1006 \
1007 static inline void VEC_OP (T,safe_grow) \
1008 (VEC(T) **vec_, int size_ VEC_ASSERT_DECL) \
1009 { \
1010 vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_, \
1011 "safe_grow"); \
1012 VEC_OP (T,reserve) \
1013 (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS); \
1014 (*vec_)->num = size_; \
1015 } \
1016 \
1017 static inline T *VEC_OP (T,safe_push) \
1018 (VEC(T) **vec_, const T *obj_ VEC_ASSERT_DECL) \
1019 { \
1020 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
1021 \
1022 return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS); \
1023 } \
1024 \
1025 static inline T *VEC_OP (T,safe_insert) \
1026 (VEC(T) **vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL) \
1027 { \
1028 VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS); \
1029 \
1030 return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS); \
1031 }
1032
1033 #endif /* GDB_VEC_H */
This page took 0.078826 seconds and 4 git commands to generate.