OSDN Git Service

* ChangeLog: Additional fixes for AVX2 ChangeLog entry.
[pf3gnuchains/gcc-fork.git] / gcc / vec.h
1 /* Vector API for GNU compiler.
2    Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Nathan Sidwell <nathan@codesourcery.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21
22 #ifndef GCC_VEC_H
23 #define GCC_VEC_H
24
25 #include "statistics.h"         /* For MEM_STAT_DECL.  */
26
27 /* The macros here implement a set of templated vector types and
28    associated interfaces.  These templates are implemented with
29    macros, as we're not in C++ land.  The interface functions are
30    typesafe and use static inline functions, sometimes backed by
31    out-of-line generic functions.  The vectors are designed to
32    interoperate with the GTY machinery.
33
34    Because of the different behavior of structure objects, scalar
35    objects and of pointers, there are three flavors, one for each of
36    these variants.  Both the structure object and pointer variants
37    pass pointers to objects around -- in the former case the pointers
38    are stored into the vector and in the latter case the pointers are
39    dereferenced and the objects copied into the vector.  The scalar
40    object variant is suitable for int-like objects, and the vector
41    elements are returned by value.
42
43    There are both 'index' and 'iterate' accessors.  The iterator
44    returns a boolean iteration condition and updates the iteration
45    variable passed by reference.  Because the iterator will be
46    inlined, the address-of can be optimized away.
47
48    The vectors are implemented using the trailing array idiom, thus
49    they are not resizeable without changing the address of the vector
50    object itself.  This means you cannot have variables or fields of
51    vector type -- always use a pointer to a vector.  The one exception
52    is the final field of a structure, which could be a vector type.
53    You will have to use the embedded_size & embedded_init calls to
54    create such objects, and they will probably not be resizeable (so
55    don't use the 'safe' allocation variants).  The trailing array
56    idiom is used (rather than a pointer to an array of data), because,
57    if we allow NULL to also represent an empty vector, empty vectors
58    occupy minimal space in the structure containing them.
59
60    Each operation that increases the number of active elements is
61    available in 'quick' and 'safe' variants.  The former presumes that
62    there is sufficient allocated space for the operation to succeed
63    (it dies if there is not).  The latter will reallocate the
64    vector, if needed.  Reallocation causes an exponential increase in
65    vector size.  If you know you will be adding N elements, it would
66    be more efficient to use the reserve operation before adding the
67    elements with the 'quick' operation.  This will ensure there are at
68    least as many elements as you ask for, it will exponentially
69    increase if there are too few spare slots.  If you want reserve a
70    specific number of slots, but do not want the exponential increase
71    (for instance, you know this is the last allocation), use the
72    reserve_exact operation.  You can also create a vector of a
73    specific size from the get go.
74
75    You should prefer the push and pop operations, as they append and
76    remove from the end of the vector. If you need to remove several
77    items in one go, use the truncate operation.  The insert and remove
78    operations allow you to change elements in the middle of the
79    vector.  There are two remove operations, one which preserves the
80    element ordering 'ordered_remove', and one which does not
81    'unordered_remove'.  The latter function copies the end element
82    into the removed slot, rather than invoke a memmove operation.  The
83    'lower_bound' function will determine where to place an item in the
84    array using insert that will maintain sorted order.
85
86    When a vector type is defined, first a non-memory managed version
87    is created.  You can then define either or both garbage collected
88    and heap allocated versions.  The allocation mechanism is specified
89    when the type is defined, and is therefore part of the type.  If
90    you need both gc'd and heap allocated versions, you still must have
91    *exactly* one definition of the common non-memory managed base vector.
92
93    If you need to directly manipulate a vector, then the 'address'
94    accessor will return the address of the start of the vector.  Also
95    the 'space' predicate will tell you whether there is spare capacity
96    in the vector.  You will not normally need to use these two functions.
97
98    Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
99    get the non-memory allocation version, and then a
100    DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
101    vectors.  Variables of vector type are declared using a
102    VEC(TYPEDEF,ALLOC) macro.  The ALLOC argument specifies the
103    allocation strategy, and can be either 'gc' or 'heap' for garbage
104    collected and heap allocated respectively.  It can be 'none' to get
105    a vector that must be explicitly allocated (for instance as a
106    trailing array of another structure).  The characters O, P and I
107    indicate whether TYPEDEF is a pointer (P), object (O) or integral
108    (I) type.  Be careful to pick the correct one, as you'll get an
109    awkward and inefficient API if you use the wrong one.  There is a
110    check, which results in a compile-time warning, for the P and I
111    versions, but there is no check for the O versions, as that is not
112    possible in plain C.  Due to the way GTY works, you must annotate
113    any structures you wish to insert or reference from a vector with a
114    GTY(()) tag.  You need to do this even if you never declare the GC
115    allocated variants.
116
117    An example of their use would be,
118
119    DEF_VEC_P(tree);   // non-managed tree vector.
120    DEF_VEC_ALLOC_P(tree,gc);    // gc'd vector of tree pointers.  This must
121                                 // appear at file scope.
122
123    struct my_struct {
124      VEC(tree,gc) *v;      // A (pointer to) a vector of tree pointers.
125    };
126
127    struct my_struct *s;
128
129    if (VEC_length(tree,s->v)) { we have some contents }
130    VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
131    for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
132      { do something with elt }
133
134 */
135
136 /* Macros to invoke API calls.  A single macro works for both pointer
137    and object vectors, but the argument and return types might well be
138    different.  In each macro, T is the typedef of the vector elements,
139    and A is the allocation strategy.  The allocation strategy is only
140    present when it is required.  Some of these macros pass the vector,
141    V, by reference (by taking its address), this is noted in the
142    descriptions.  */
143
144 /* Length of vector
145    unsigned VEC_T_length(const VEC(T) *v);
146
147    Return the number of active elements in V.  V can be NULL, in which
148    case zero is returned.  */
149
150 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
151
152
153 /* Check if vector is empty
154    int VEC_T_empty(const VEC(T) *v);
155
156    Return nonzero if V is an empty vector (or V is NULL), zero otherwise.  */
157
158 #define VEC_empty(T,V)  (VEC_length (T,V) == 0)
159
160
161 /* Get the final element of the vector.
162    T VEC_T_last(VEC(T) *v); // Integer
163    T VEC_T_last(VEC(T) *v); // Pointer
164    T *VEC_T_last(VEC(T) *v); // Object
165
166    Return the final element.  V must not be empty.  */
167
168 #define VEC_last(T,V)   (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
169
170 /* Index into vector
171    T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
172    T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
173    T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
174
175    Return the IX'th element.  If IX must be in the domain of V.  */
176
177 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
178
179 /* Iterate over vector
180    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
181    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
182    int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
183
184    Return iteration condition and update PTR to point to the IX'th
185    element.  At the end of iteration, sets PTR to NULL.  Use this to
186    iterate over the elements of a vector as follows,
187
188      for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
189        continue;  */
190
191 #define VEC_iterate(T,V,I,P)    (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
192
193 /* Convenience macro for forward iteration.  */
194
195 #define FOR_EACH_VEC_ELT(T, V, I, P)            \
196   for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I))
197
198 /* Convenience macro for reverse iteration.  */
199
200 #define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \
201   for (I = VEC_length (T, (V)) - 1;           \
202        VEC_iterate (T, (V), (I), (P));    \
203        (I)--)
204
205 /* Allocate new vector.
206    VEC(T,A) *VEC_T_A_alloc(int reserve);
207
208    Allocate a new vector with space for RESERVE objects.  If RESERVE
209    is zero, NO vector is created.  */
210
211 #define VEC_alloc(T,A,N)        (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
212
213 /* Free a vector.
214    void VEC_T_A_free(VEC(T,A) *&);
215
216    Free a vector and set it to NULL.  */
217
218 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
219
220 /* Use these to determine the required size and initialization of a
221    vector embedded within another structure (as the final member).
222
223    size_t VEC_T_embedded_size(int reserve);
224    void VEC_T_embedded_init(VEC(T) *v, int reserve);
225
226    These allow the caller to perform the memory allocation.  */
227
228 #define VEC_embedded_size(T,N)   (VEC_OP(T,base,embedded_size)(N))
229 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
230
231 /* Copy a vector.
232    VEC(T,A) *VEC_T_A_copy(VEC(T) *);
233
234    Copy the live elements of a vector into a new vector.  The new and
235    old vectors need not be allocated by the same mechanism.  */
236
237 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
238
239 /* Determine if a vector has additional capacity.
240
241    int VEC_T_space (VEC(T) *v,int reserve)
242
243    If V has space for RESERVE additional entries, return nonzero.  You
244    usually only need to use this if you are doing your own vector
245    reallocation, for instance on an embedded vector.  This returns
246    nonzero in exactly the same circumstances that VEC_T_reserve
247    will.  */
248
249 #define VEC_space(T,V,R) \
250         (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
251
252 /* Reserve space.
253    int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
254
255    Ensure that V has at least RESERVE slots available.  This will
256    create additional headroom.  Note this can cause V to be
257    reallocated.  Returns nonzero iff reallocation actually
258    occurred.  */
259
260 #define VEC_reserve(T,A,V,R)    \
261         (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
262
263 /* Reserve space exactly.
264    int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
265
266    Ensure that V has at least RESERVE slots available.  This will not
267    create additional headroom.  Note this can cause V to be
268    reallocated.  Returns nonzero iff reallocation actually
269    occurred.  */
270
271 #define VEC_reserve_exact(T,A,V,R)      \
272         (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
273
274 /* Copy elements with no reallocation
275    void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Integer
276    void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Pointer
277    void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Object
278
279    Copy the elements in SRC to the end of DST as if by memcpy.  DST and
280    SRC need not be allocated with the same mechanism, although they most
281    often will be.  DST is assumed to have sufficient headroom
282    available.  */
283
284 #define VEC_splice(T,DST,SRC)                   \
285   (VEC_OP(T,base,splice)(VEC_BASE(DST), VEC_BASE(SRC) VEC_CHECK_INFO))
286
287 /* Copy elements with reallocation
288    void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Integer
289    void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Pointer
290    void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Object
291
292    Copy the elements in SRC to the end of DST as if by memcpy.  DST and
293    SRC need not be allocated with the same mechanism, although they most
294    often will be.  DST need not have sufficient headroom and will be
295    reallocated if needed.  */
296
297 #define VEC_safe_splice(T,A,DST,SRC)                                    \
298   (VEC_OP(T,A,safe_splice)(&(DST), VEC_BASE(SRC) VEC_CHECK_INFO MEM_STAT_INFO))
299   
300 /* Push object with no reallocation
301    T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
302    T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
303    T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
304
305    Push a new element onto the end, returns a pointer to the slot
306    filled in. For object vectors, the new value can be NULL, in which
307    case NO initialization is performed.  There must
308    be sufficient space in the vector.  */
309
310 #define VEC_quick_push(T,V,O)   \
311         (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
312
313 /* Push object with reallocation
314    T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
315    T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
316    T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
317
318    Push a new element onto the end, returns a pointer to the slot
319    filled in. For object vectors, the new value can be NULL, in which
320    case NO initialization is performed.  Reallocates V, if needed.  */
321
322 #define VEC_safe_push(T,A,V,O)          \
323         (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
324
325 /* Pop element off end
326    T VEC_T_pop (VEC(T) *v);             // Integer
327    T VEC_T_pop (VEC(T) *v);             // Pointer
328    void VEC_T_pop (VEC(T) *v);          // Object
329
330    Pop the last element off the end. Returns the element popped, for
331    pointer vectors.  */
332
333 #define VEC_pop(T,V)    (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
334
335 /* Truncate to specific length
336    void VEC_T_truncate (VEC(T) *v, unsigned len);
337
338    Set the length as specified.  The new length must be less than or
339    equal to the current length.  This is an O(1) operation.  */
340
341 #define VEC_truncate(T,V,I)             \
342         (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
343
344 /* Grow to a specific length.
345    void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
346
347    Grow the vector to a specific length.  The LEN must be as
348    long or longer than the current length.  The new elements are
349    uninitialized.  */
350
351 #define VEC_safe_grow(T,A,V,I)          \
352         (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
353
354 /* Grow to a specific length.
355    void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
356
357    Grow the vector to a specific length.  The LEN must be as
358    long or longer than the current length.  The new elements are
359    initialized to zero.  */
360
361 #define VEC_safe_grow_cleared(T,A,V,I)          \
362         (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
363
364 /* Replace element
365    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
366    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
367    T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val);  // Object
368
369    Replace the IXth element of V with a new value, VAL.  For pointer
370    vectors returns the original value. For object vectors returns a
371    pointer to the new value.  For object vectors the new value can be
372    NULL, in which case no overwriting of the slot is actually
373    performed.  */
374
375 #define VEC_replace(T,V,I,O)            \
376         (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
377
378 /* Insert object with no reallocation
379    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
380    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
381    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
382
383    Insert an element, VAL, at the IXth position of V. Return a pointer
384    to the slot created.  For vectors of object, the new value can be
385    NULL, in which case no initialization of the inserted slot takes
386    place. There must be sufficient space.  */
387
388 #define VEC_quick_insert(T,V,I,O)       \
389         (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
390
391 /* Insert object with reallocation
392    T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
393    T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
394    T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
395
396    Insert an element, VAL, at the IXth position of V. Return a pointer
397    to the slot created.  For vectors of object, the new value can be
398    NULL, in which case no initialization of the inserted slot takes
399    place. Reallocate V, if necessary.  */
400
401 #define VEC_safe_insert(T,A,V,I,O)      \
402         (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
403
404 /* Remove element retaining order
405    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
406    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
407    void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
408
409    Remove an element from the IXth position of V. Ordering of
410    remaining elements is preserved.  For pointer vectors returns the
411    removed object.  This is an O(N) operation due to a memmove.  */
412
413 #define VEC_ordered_remove(T,V,I)       \
414         (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
415
416 /* Remove element destroying order
417    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
418    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
419    void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
420
421    Remove an element from the IXth position of V. Ordering of
422    remaining elements is destroyed.  For pointer vectors returns the
423    removed object.  This is an O(1) operation.  */
424
425 #define VEC_unordered_remove(T,V,I)     \
426         (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
427
428 /* Remove a block of elements
429    void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
430
431    Remove LEN elements starting at the IXth.  Ordering is retained.
432    This is an O(N) operation due to memmove.  */
433
434 #define VEC_block_remove(T,V,I,L)       \
435         (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
436
437 /* Get the address of the array of elements
438    T *VEC_T_address (VEC(T) v)
439
440    If you need to directly manipulate the array (for instance, you
441    want to feed it to qsort), use this accessor.  */
442
443 #define VEC_address(T,V)                (VEC_OP(T,base,address)(VEC_BASE(V)))
444
445 /* Conveniently sort the contents of the vector with qsort.
446    void VEC_qsort (VEC(T) *v, int (*cmp_func)(const void *, const void *))  */
447
448 #define VEC_qsort(T,V,CMP) qsort(VEC_address (T,V), VEC_length(T,V),    \
449                                  sizeof (T), CMP)
450
451 /* Find the first index in the vector not less than the object.
452    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
453                                bool (*lessthan) (const T, const T)); // Integer
454    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
455                                bool (*lessthan) (const T, const T)); // Pointer
456    unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
457                                bool (*lessthan) (const T*, const T*)); // Object
458
459    Find the first position in which VAL could be inserted without
460    changing the ordering of V.  LESSTHAN is a function that returns
461    true if the first argument is strictly less than the second.  */
462
463 #define VEC_lower_bound(T,V,O,LT)    \
464        (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
465
466 /* Reallocate an array of elements with prefix.  */
467 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
468 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL);
469 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
470 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
471                                      MEM_STAT_DECL);
472 extern void ggc_free (void *);
473 #define vec_gc_free(V) ggc_free (V)
474 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
475 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL);
476 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
477 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
478                                        MEM_STAT_DECL);
479 extern void dump_vec_loc_statistics (void);
480 #ifdef GATHER_STATISTICS
481 void vec_heap_free (void *);
482 #else
483 /* Avoid problems with frontends that #define free(x).  */
484 #define vec_heap_free(V) (free) (V)
485 #endif
486
487 #if ENABLE_CHECKING
488 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
489 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
490 #define VEC_CHECK_PASS ,file_,line_,function_
491
492 #define VEC_ASSERT(EXPR,OP,T,A) \
493   (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
494
495 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
496      ATTRIBUTE_NORETURN;
497 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
498 #else
499 #define VEC_CHECK_INFO
500 #define VEC_CHECK_DECL
501 #define VEC_CHECK_PASS
502 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
503 #endif
504
505 /* Note: gengtype has hardwired knowledge of the expansions of the
506    VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros.  If you change the
507    expansions of these macros you may need to change gengtype too.  */
508
509 typedef struct GTY(()) vec_prefix
510 {
511   unsigned num;
512   unsigned alloc;
513 } vec_prefix;
514
515 #define VEC(T,A) VEC_##T##_##A
516 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
517
518 /* Base of vector type, not user visible.  */
519 #define VEC_T(T,B)                                                        \
520 typedef struct VEC(T,B)                                                   \
521 {                                                                         \
522   struct vec_prefix prefix;                                               \
523   T vec[1];                                                               \
524 } VEC(T,B)
525
526 #define VEC_T_GTY(T,B)                                                    \
527 typedef struct GTY(()) VEC(T,B)                                           \
528 {                                                                         \
529   struct vec_prefix prefix;                                               \
530   T GTY ((length ("%h.prefix.num"))) vec[1];                              \
531 } VEC(T,B)
532
533 /* Derived vector type, user visible.  */
534 #define VEC_TA_GTY(T,B,A,GTY)                                             \
535 typedef struct GTY VEC(T,A)                                               \
536 {                                                                         \
537   VEC(T,B) base;                                                          \
538 } VEC(T,A)
539
540 #define VEC_TA(T,B,A)                                                     \
541 typedef struct VEC(T,A)                                                   \
542 {                                                                         \
543   VEC(T,B) base;                                                          \
544 } VEC(T,A)
545
546 /* Convert to base type.  */
547 #define VEC_BASE(P)  ((P) ? &(P)->base : 0)
548
549 /* Vector of integer-like object.  */
550 #define DEF_VEC_I(T)                                                      \
551 static inline void VEC_OP (T,must_be,integral_type) (void)                \
552 {                                                                         \
553   (void)~(T)0;                                                            \
554 }                                                                         \
555                                                                           \
556 VEC_T(T,base);                                                            \
557 VEC_TA(T,base,none);                                                      \
558 DEF_VEC_FUNC_P(T)                                                         \
559 struct vec_swallow_trailing_semi
560 #define DEF_VEC_ALLOC_I(T,A)                                              \
561 VEC_TA(T,base,A);                                                         \
562 DEF_VEC_ALLOC_FUNC_I(T,A)                                                 \
563 DEF_VEC_NONALLOC_FUNCS_I(T,A)                                             \
564 struct vec_swallow_trailing_semi
565
566 /* Vector of pointer to object.  */
567 #define DEF_VEC_P(T)                                                      \
568 static inline void VEC_OP (T,must_be,pointer_type) (void)                 \
569 {                                                                         \
570   (void)((T)1 == (void *)1);                                              \
571 }                                                                         \
572                                                                           \
573 VEC_T_GTY(T,base);                                                        \
574 VEC_TA(T,base,none);                                                      \
575 DEF_VEC_FUNC_P(T)                                                         \
576 struct vec_swallow_trailing_semi
577 #define DEF_VEC_ALLOC_P(T,A)                                              \
578 VEC_TA(T,base,A);                                                         \
579 DEF_VEC_ALLOC_FUNC_P(T,A)                                                 \
580 DEF_VEC_NONALLOC_FUNCS_P(T,A)                                             \
581 struct vec_swallow_trailing_semi
582
583 #define DEF_VEC_FUNC_P(T)                                                 \
584 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_)   \
585 {                                                                         \
586   return vec_ ? vec_->prefix.num : 0;                                             \
587 }                                                                         \
588                                                                           \
589 static inline T VEC_OP (T,base,last)                                      \
590      (const VEC(T,base) *vec_ VEC_CHECK_DECL)                             \
591 {                                                                         \
592   VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base);                         \
593                                                                           \
594   return vec_->vec[vec_->prefix.num - 1];                                         \
595 }                                                                         \
596                                                                           \
597 static inline T VEC_OP (T,base,index)                                     \
598      (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)               \
599 {                                                                         \
600   VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base);                  \
601                                                                           \
602   return vec_->vec[ix_];                                                  \
603 }                                                                         \
604                                                                           \
605 static inline int VEC_OP (T,base,iterate)                                 \
606      (const VEC(T,base) *vec_, unsigned ix_, T *ptr)                      \
607 {                                                                         \
608   if (vec_ && ix_ < vec_->prefix.num)                                             \
609     {                                                                     \
610       *ptr = vec_->vec[ix_];                                              \
611       return 1;                                                           \
612     }                                                                     \
613   else                                                                    \
614     {                                                                     \
615       *ptr = (T) 0;                                                       \
616       return 0;                                                           \
617     }                                                                     \
618 }                                                                         \
619                                                                           \
620 static inline size_t VEC_OP (T,base,embedded_size)                        \
621      (int alloc_)                                                         \
622 {                                                                         \
623   return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T);                 \
624 }                                                                         \
625                                                                           \
626 static inline void VEC_OP (T,base,embedded_init)                          \
627      (VEC(T,base) *vec_, int alloc_)                                      \
628 {                                                                         \
629   vec_->prefix.num = 0;                                                   \
630   vec_->prefix.alloc = alloc_;                                                    \
631 }                                                                         \
632                                                                           \
633 static inline int VEC_OP (T,base,space)                                   \
634      (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL)                       \
635 {                                                                         \
636   VEC_ASSERT (alloc_ >= 0, "space", T, base);                             \
637   return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_;      \
638 }                                                                         \
639                                                                           \
640 static inline void VEC_OP(T,base,splice)                                  \
641      (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL)                \
642 {                                                                         \
643   if (src_)                                                               \
644     {                                                                     \
645       unsigned len_ = src_->prefix.num;                                   \
646       VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base);      \
647                                                                           \
648       memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T));    \
649       dst_->prefix.num += len_;                                           \
650     }                                                                     \
651 }                                                                         \
652                                                                           \
653 static inline T *VEC_OP (T,base,quick_push)                               \
654      (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL)                           \
655 {                                                                         \
656   T *slot_;                                                               \
657                                                                           \
658   VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base);            \
659   slot_ = &vec_->vec[vec_->prefix.num++];                                         \
660   *slot_ = obj_;                                                          \
661                                                                           \
662   return slot_;                                                           \
663 }                                                                         \
664                                                                           \
665 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL)    \
666 {                                                                         \
667   T obj_;                                                                 \
668                                                                           \
669   VEC_ASSERT (vec_->prefix.num, "pop", T, base);                                  \
670   obj_ = vec_->vec[--vec_->prefix.num];                                   \
671                                                                           \
672   return obj_;                                                            \
673 }                                                                         \
674                                                                           \
675 static inline void VEC_OP (T,base,truncate)                               \
676      (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL)                   \
677 {                                                                         \
678   VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base);    \
679   if (vec_)                                                               \
680     vec_->prefix.num = size_;                                                     \
681 }                                                                         \
682                                                                           \
683 static inline T VEC_OP (T,base,replace)                                   \
684      (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL)             \
685 {                                                                         \
686   T old_obj_;                                                             \
687                                                                           \
688   VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base);                        \
689   old_obj_ = vec_->vec[ix_];                                              \
690   vec_->vec[ix_] = obj_;                                                  \
691                                                                           \
692   return old_obj_;                                                        \
693 }                                                                         \
694                                                                           \
695 static inline T *VEC_OP (T,base,quick_insert)                             \
696      (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL)             \
697 {                                                                         \
698   T *slot_;                                                               \
699                                                                           \
700   VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base);                  \
701   VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base);                        \
702   slot_ = &vec_->vec[ix_];                                                \
703   memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T));            \
704   *slot_ = obj_;                                                          \
705                                                                           \
706   return slot_;                                                           \
707 }                                                                         \
708                                                                           \
709 static inline T VEC_OP (T,base,ordered_remove)                            \
710      (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
711 {                                                                         \
712   T *slot_;                                                               \
713   T obj_;                                                                 \
714                                                                           \
715   VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base);                         \
716   slot_ = &vec_->vec[ix_];                                                \
717   obj_ = *slot_;                                                          \
718   memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T));            \
719                                                                           \
720   return obj_;                                                            \
721 }                                                                         \
722                                                                           \
723 static inline T VEC_OP (T,base,unordered_remove)                          \
724      (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
725 {                                                                         \
726   T *slot_;                                                               \
727   T obj_;                                                                 \
728                                                                           \
729   VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base);                         \
730   slot_ = &vec_->vec[ix_];                                                \
731   obj_ = *slot_;                                                          \
732   *slot_ = vec_->vec[--vec_->prefix.num];                                         \
733                                                                           \
734   return obj_;                                                            \
735 }                                                                         \
736                                                                           \
737 static inline void VEC_OP (T,base,block_remove)                           \
738      (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL)      \
739 {                                                                         \
740   T *slot_;                                                               \
741                                                                           \
742   VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base);   \
743   slot_ = &vec_->vec[ix_];                                                \
744   vec_->prefix.num -= len_;                                                       \
745   memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T));   \
746 }                                                                         \
747                                                                           \
748 static inline T *VEC_OP (T,base,address)                                  \
749      (VEC(T,base) *vec_)                                                  \
750 {                                                                         \
751   return vec_ ? vec_->vec : 0;                                            \
752 }                                                                         \
753                                                                           \
754 static inline unsigned VEC_OP (T,base,lower_bound)                        \
755      (VEC(T,base) *vec_, const T obj_,                                    \
756       bool (*lessthan_)(const T, const T) VEC_CHECK_DECL)                 \
757 {                                                                         \
758    unsigned int len_ = VEC_OP (T,base, length) (vec_);                    \
759    unsigned int half_, middle_;                                           \
760    unsigned int first_ = 0;                                               \
761    while (len_ > 0)                                                       \
762      {                                                                    \
763         T middle_elem_;                                                   \
764         half_ = len_ >> 1;                                                \
765         middle_ = first_;                                                 \
766         middle_ += half_;                                                 \
767         middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
768         if (lessthan_ (middle_elem_, obj_))                               \
769           {                                                               \
770              first_ = middle_;                                            \
771              ++first_;                                                    \
772              len_ = len_ - half_ - 1;                                     \
773           }                                                               \
774         else                                                              \
775           len_ = half_;                                                   \
776      }                                                                    \
777    return first_;                                                         \
778 }
779
780 #define DEF_VEC_ALLOC_FUNC_P(T,A)                                         \
781 static inline VEC(T,A) *VEC_OP (T,A,alloc)                                \
782      (int alloc_ MEM_STAT_DECL)                                           \
783 {                                                                         \
784   return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_             \
785                                                  PASS_MEM_STAT);          \
786 }
787
788
789 #define DEF_VEC_NONALLOC_FUNCS_P(T,A)                                     \
790 static inline void VEC_OP (T,A,free)                                      \
791      (VEC(T,A) **vec_)                                                    \
792 {                                                                         \
793   if (*vec_)                                                              \
794     vec_##A##_free (*vec_);                                               \
795   *vec_ = NULL;                                                           \
796 }                                                                         \
797                                                                           \
798 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
799 {                                                                         \
800   size_t len_ = vec_ ? vec_->prefix.num : 0;                                      \
801   VEC (T,A) *new_vec_ = NULL;                                             \
802                                                                           \
803   if (len_)                                                               \
804     {                                                                     \
805       new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact                  \
806                                (NULL, len_ PASS_MEM_STAT));               \
807                                                                           \
808       new_vec_->base.prefix.num = len_;                                   \
809       memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_);          \
810     }                                                                     \
811   return new_vec_;                                                        \
812 }                                                                         \
813                                                                           \
814 static inline int VEC_OP (T,A,reserve)                                    \
815      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
816 {                                                                         \
817   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
818                                        VEC_CHECK_PASS);                   \
819                                                                           \
820   if (extend)                                                             \
821     *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
822                                                                           \
823   return extend;                                                          \
824 }                                                                         \
825                                                                           \
826 static inline int VEC_OP (T,A,reserve_exact)                              \
827      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
828 {                                                                         \
829   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
830                                        VEC_CHECK_PASS);                   \
831                                                                           \
832   if (extend)                                                             \
833     *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_         \
834                                                     PASS_MEM_STAT);       \
835                                                                           \
836   return extend;                                                          \
837 }                                                                         \
838                                                                           \
839 static inline void VEC_OP (T,A,safe_grow)                                 \
840      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
841 {                                                                         \
842   VEC_ASSERT (size_ >= 0                                                  \
843               && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
844                                                  "grow", T, A);           \
845   VEC_OP (T,A,reserve_exact) (vec_,                                       \
846                               size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
847                               VEC_CHECK_PASS PASS_MEM_STAT);              \
848   VEC_BASE (*vec_)->prefix.num = size_;                                   \
849 }                                                                         \
850                                                                           \
851 static inline void VEC_OP (T,A,safe_grow_cleared)                         \
852      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
853 {                                                                         \
854   int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_);                    \
855   VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT);      \
856   memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0,         \
857           sizeof (T) * (size_ - oldsize));                                \
858 }                                                                         \
859                                                                           \
860 static inline void VEC_OP(T,A,safe_splice)                                \
861      (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL)    \
862 {                                                                         \
863   if (src_)                                                               \
864     {                                                                     \
865       VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num                          \
866                                   VEC_CHECK_PASS MEM_STAT_INFO);          \
867                                                                           \
868       VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_                      \
869                               VEC_CHECK_PASS);                            \
870     }                                                                     \
871 }                                                                         \
872                                                                           \
873 static inline T *VEC_OP (T,A,safe_push)                                   \
874      (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL)               \
875 {                                                                         \
876   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
877                                                                           \
878   return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
879 }                                                                         \
880                                                                           \
881 static inline T *VEC_OP (T,A,safe_insert)                                 \
882      (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL)  \
883 {                                                                         \
884   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
885                                                                           \
886   return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_         \
887                                        VEC_CHECK_PASS);                   \
888 }
889
890 /* Vector of object.  */
891 #define DEF_VEC_O(T)                                                      \
892 VEC_T_GTY(T,base);                                                        \
893 VEC_TA(T,base,none);                                              \
894 DEF_VEC_FUNC_O(T)                                                         \
895 struct vec_swallow_trailing_semi
896 #define DEF_VEC_ALLOC_O(T,A)                                              \
897 VEC_TA(T,base,A);                                                         \
898 DEF_VEC_ALLOC_FUNC_O(T,A)                                                 \
899 DEF_VEC_NONALLOC_FUNCS_O(T,A)                                             \
900 struct vec_swallow_trailing_semi
901
902 #define DEF_VEC_FUNC_O(T)                                                 \
903 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_)   \
904 {                                                                         \
905   return vec_ ? vec_->prefix.num : 0;                                             \
906 }                                                                         \
907                                                                           \
908 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL)  \
909 {                                                                         \
910   VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base);                         \
911                                                                           \
912   return &vec_->vec[vec_->prefix.num - 1];                                        \
913 }                                                                         \
914                                                                           \
915 static inline T *VEC_OP (T,base,index)                                    \
916      (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
917 {                                                                         \
918   VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base);                  \
919                                                                           \
920   return &vec_->vec[ix_];                                                 \
921 }                                                                         \
922                                                                           \
923 static inline int VEC_OP (T,base,iterate)                                 \
924      (VEC(T,base) *vec_, unsigned ix_, T **ptr)                           \
925 {                                                                         \
926   if (vec_ && ix_ < vec_->prefix.num)                                             \
927     {                                                                     \
928       *ptr = &vec_->vec[ix_];                                             \
929       return 1;                                                           \
930     }                                                                     \
931   else                                                                    \
932     {                                                                     \
933       *ptr = 0;                                                           \
934       return 0;                                                           \
935     }                                                                     \
936 }                                                                         \
937                                                                           \
938 static inline size_t VEC_OP (T,base,embedded_size)                        \
939      (int alloc_)                                                         \
940 {                                                                         \
941   return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T);                 \
942 }                                                                         \
943                                                                           \
944 static inline void VEC_OP (T,base,embedded_init)                          \
945      (VEC(T,base) *vec_, int alloc_)                                      \
946 {                                                                         \
947   vec_->prefix.num = 0;                                                   \
948   vec_->prefix.alloc = alloc_;                                                    \
949 }                                                                         \
950                                                                           \
951 static inline int VEC_OP (T,base,space)                                   \
952      (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL)                       \
953 {                                                                         \
954   VEC_ASSERT (alloc_ >= 0, "space", T, base);                             \
955   return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_;      \
956 }                                                                         \
957                                                                           \
958 static inline void VEC_OP(T,base,splice)                                  \
959      (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL)                \
960 {                                                                         \
961   if (src_)                                                               \
962     {                                                                     \
963       unsigned len_ = src_->prefix.num;                                   \
964       VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base);      \
965                                                                           \
966       memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T));    \
967       dst_->prefix.num += len_;                                           \
968     }                                                                     \
969 }                                                                         \
970                                                                           \
971 static inline T *VEC_OP (T,base,quick_push)                               \
972      (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL)                    \
973 {                                                                         \
974   T *slot_;                                                               \
975                                                                           \
976   VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base);            \
977   slot_ = &vec_->vec[vec_->prefix.num++];                                         \
978   if (obj_)                                                               \
979     *slot_ = *obj_;                                                       \
980                                                                           \
981   return slot_;                                                           \
982 }                                                                         \
983                                                                           \
984 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
985 {                                                                         \
986   VEC_ASSERT (vec_->prefix.num, "pop", T, base);                                  \
987   --vec_->prefix.num;                                                             \
988 }                                                                         \
989                                                                           \
990 static inline void VEC_OP (T,base,truncate)                               \
991      (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL)                   \
992 {                                                                         \
993   VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base);    \
994   if (vec_)                                                               \
995     vec_->prefix.num = size_;                                                     \
996 }                                                                         \
997                                                                           \
998 static inline T *VEC_OP (T,base,replace)                                  \
999      (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL)      \
1000 {                                                                         \
1001   T *slot_;                                                               \
1002                                                                           \
1003   VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base);                        \
1004   slot_ = &vec_->vec[ix_];                                                \
1005   if (obj_)                                                               \
1006     *slot_ = *obj_;                                                       \
1007                                                                           \
1008   return slot_;                                                           \
1009 }                                                                         \
1010                                                                           \
1011 static inline T *VEC_OP (T,base,quick_insert)                             \
1012      (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL)      \
1013 {                                                                         \
1014   T *slot_;                                                               \
1015                                                                           \
1016   VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base);                  \
1017   VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base);                        \
1018   slot_ = &vec_->vec[ix_];                                                \
1019   memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T));            \
1020   if (obj_)                                                               \
1021     *slot_ = *obj_;                                                       \
1022                                                                           \
1023   return slot_;                                                           \
1024 }                                                                         \
1025                                                                           \
1026 static inline void VEC_OP (T,base,ordered_remove)                         \
1027      (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
1028 {                                                                         \
1029   T *slot_;                                                               \
1030                                                                           \
1031   VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base);                         \
1032   slot_ = &vec_->vec[ix_];                                                \
1033   memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T));            \
1034 }                                                                         \
1035                                                                           \
1036 static inline void VEC_OP (T,base,unordered_remove)                       \
1037      (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
1038 {                                                                         \
1039   VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base);                         \
1040   vec_->vec[ix_] = vec_->vec[--vec_->prefix.num];                                 \
1041 }                                                                         \
1042                                                                           \
1043 static inline void VEC_OP (T,base,block_remove)                           \
1044      (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL)      \
1045 {                                                                         \
1046   T *slot_;                                                               \
1047                                                                           \
1048   VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base);   \
1049   slot_ = &vec_->vec[ix_];                                                \
1050   vec_->prefix.num -= len_;                                                       \
1051   memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T));   \
1052 }                                                                         \
1053                                                                           \
1054 static inline T *VEC_OP (T,base,address)                                  \
1055      (VEC(T,base) *vec_)                                                  \
1056 {                                                                         \
1057   return vec_ ? vec_->vec : 0;                                            \
1058 }                                                                         \
1059                                                                           \
1060 static inline unsigned VEC_OP (T,base,lower_bound)                        \
1061      (VEC(T,base) *vec_, const T *obj_,                                   \
1062       bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL)             \
1063 {                                                                         \
1064    unsigned int len_ = VEC_OP (T, base, length) (vec_);                   \
1065    unsigned int half_, middle_;                                           \
1066    unsigned int first_ = 0;                                               \
1067    while (len_ > 0)                                                       \
1068      {                                                                    \
1069         T *middle_elem_;                                                  \
1070         half_ = len_ >> 1;                                                \
1071         middle_ = first_;                                                 \
1072         middle_ += half_;                                                 \
1073         middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
1074         if (lessthan_ (middle_elem_, obj_))                               \
1075           {                                                               \
1076              first_ = middle_;                                            \
1077              ++first_;                                                    \
1078              len_ = len_ - half_ - 1;                                     \
1079           }                                                               \
1080         else                                                              \
1081           len_ = half_;                                                   \
1082      }                                                                    \
1083    return first_;                                                         \
1084 }
1085
1086 #define DEF_VEC_ALLOC_FUNC_O(T,A)                                         \
1087 static inline VEC(T,A) *VEC_OP (T,A,alloc)                                \
1088      (int alloc_ MEM_STAT_DECL)                                           \
1089 {                                                                         \
1090   return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_,            \
1091                                                  offsetof (VEC(T,A),base.vec), \
1092                                                  sizeof (T)               \
1093                                                  PASS_MEM_STAT);          \
1094 }
1095
1096 #define DEF_VEC_NONALLOC_FUNCS_O(T,A)                                     \
1097 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1098 {                                                                         \
1099   size_t len_ = vec_ ? vec_->prefix.num : 0;                                      \
1100   VEC (T,A) *new_vec_ = NULL;                                             \
1101                                                                           \
1102   if (len_)                                                               \
1103     {                                                                     \
1104       new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact                  \
1105                                (NULL, len_,                               \
1106                                 offsetof (VEC(T,A),base.vec), sizeof (T)  \
1107                                 PASS_MEM_STAT));                          \
1108                                                                           \
1109       new_vec_->base.prefix.num = len_;                                   \
1110       memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_);          \
1111     }                                                                     \
1112   return new_vec_;                                                        \
1113 }                                                                         \
1114                                                                           \
1115 static inline void VEC_OP (T,A,free)                                      \
1116      (VEC(T,A) **vec_)                                                    \
1117 {                                                                         \
1118   if (*vec_)                                                              \
1119     vec_##A##_free (*vec_);                                               \
1120   *vec_ = NULL;                                                           \
1121 }                                                                         \
1122                                                                           \
1123 static inline int VEC_OP (T,A,reserve)                                    \
1124      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
1125 {                                                                         \
1126   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
1127                                        VEC_CHECK_PASS);                   \
1128                                                                           \
1129   if (extend)                                                             \
1130     *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_,              \
1131                                               offsetof (VEC(T,A),base.vec),\
1132                                               sizeof (T)                  \
1133                                               PASS_MEM_STAT);             \
1134                                                                           \
1135   return extend;                                                          \
1136 }                                                                         \
1137                                                                           \
1138 static inline int VEC_OP (T,A,reserve_exact)                              \
1139      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
1140 {                                                                         \
1141   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
1142                                        VEC_CHECK_PASS);                   \
1143                                                                           \
1144   if (extend)                                                             \
1145     *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact                        \
1146                          (*vec_, alloc_,                                  \
1147                           offsetof (VEC(T,A),base.vec),                   \
1148                           sizeof (T) PASS_MEM_STAT);                      \
1149                                                                           \
1150   return extend;                                                          \
1151 }                                                                         \
1152                                                                           \
1153 static inline void VEC_OP (T,A,safe_grow)                                 \
1154      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
1155 {                                                                         \
1156   VEC_ASSERT (size_ >= 0                                                  \
1157               && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1158                                                  "grow", T, A);           \
1159   VEC_OP (T,A,reserve_exact) (vec_,                                       \
1160                               size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1161                               VEC_CHECK_PASS PASS_MEM_STAT);              \
1162   VEC_BASE (*vec_)->prefix.num = size_;                                   \
1163 }                                                                         \
1164                                                                           \
1165 static inline void VEC_OP (T,A,safe_grow_cleared)                         \
1166      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
1167 {                                                                         \
1168   int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_);                    \
1169   VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT);      \
1170   memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0,         \
1171           sizeof (T) * (size_ - oldsize));                                \
1172 }                                                                         \
1173                                                                           \
1174 static inline void VEC_OP(T,A,safe_splice)                                \
1175      (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL)    \
1176 {                                                                         \
1177   if (src_)                                                               \
1178     {                                                                     \
1179       VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num                          \
1180                                   VEC_CHECK_PASS MEM_STAT_INFO);          \
1181                                                                           \
1182       VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_                      \
1183                               VEC_CHECK_PASS);                            \
1184     }                                                                     \
1185 }                                                                         \
1186                                                                           \
1187 static inline T *VEC_OP (T,A,safe_push)                                   \
1188      (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL)        \
1189 {                                                                         \
1190   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
1191                                                                           \
1192   return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS);  \
1193 }                                                                         \
1194                                                                           \
1195 static inline T *VEC_OP (T,A,safe_insert)                                 \
1196      (VEC(T,A) **vec_, unsigned ix_, const T *obj_                        \
1197                 VEC_CHECK_DECL MEM_STAT_DECL)                             \
1198 {                                                                         \
1199   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
1200                                                                           \
1201   return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_         \
1202                                        VEC_CHECK_PASS);                   \
1203 }
1204
1205 #define DEF_VEC_ALLOC_FUNC_I(T,A)                                         \
1206 static inline VEC(T,A) *VEC_OP (T,A,alloc)                                \
1207      (int alloc_ MEM_STAT_DECL)                                           \
1208 {                                                                         \
1209   return (VEC(T,A) *) vec_##A##_o_reserve_exact                           \
1210                       (NULL, alloc_, offsetof (VEC(T,A),base.vec),        \
1211                        sizeof (T) PASS_MEM_STAT);                         \
1212 }
1213
1214 #define DEF_VEC_NONALLOC_FUNCS_I(T,A)                                     \
1215 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1216 {                                                                         \
1217   size_t len_ = vec_ ? vec_->prefix.num : 0;                                      \
1218   VEC (T,A) *new_vec_ = NULL;                                             \
1219                                                                           \
1220   if (len_)                                                               \
1221     {                                                                     \
1222       new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact                  \
1223                                (NULL, len_,                               \
1224                                 offsetof (VEC(T,A),base.vec), sizeof (T)  \
1225                                 PASS_MEM_STAT));                          \
1226                                                                           \
1227       new_vec_->base.prefix.num = len_;                                   \
1228       memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_);          \
1229     }                                                                     \
1230   return new_vec_;                                                        \
1231 }                                                                         \
1232                                                                           \
1233 static inline void VEC_OP (T,A,free)                                      \
1234      (VEC(T,A) **vec_)                                                    \
1235 {                                                                         \
1236   if (*vec_)                                                              \
1237     vec_##A##_free (*vec_);                                               \
1238   *vec_ = NULL;                                                           \
1239 }                                                                         \
1240                                                                           \
1241 static inline int VEC_OP (T,A,reserve)                                    \
1242      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
1243 {                                                                         \
1244   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
1245                                        VEC_CHECK_PASS);                   \
1246                                                                           \
1247   if (extend)                                                             \
1248     *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_,              \
1249                                               offsetof (VEC(T,A),base.vec),\
1250                                               sizeof (T)                  \
1251                                               PASS_MEM_STAT);             \
1252                                                                           \
1253   return extend;                                                          \
1254 }                                                                         \
1255                                                                           \
1256 static inline int VEC_OP (T,A,reserve_exact)                              \
1257      (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)           \
1258 {                                                                         \
1259   int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_            \
1260                                        VEC_CHECK_PASS);                   \
1261                                                                           \
1262   if (extend)                                                             \
1263     *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact                        \
1264                          (*vec_, alloc_, offsetof (VEC(T,A),base.vec),    \
1265                           sizeof (T) PASS_MEM_STAT);                      \
1266                                                                           \
1267   return extend;                                                          \
1268 }                                                                         \
1269                                                                           \
1270 static inline void VEC_OP (T,A,safe_grow)                                 \
1271      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
1272 {                                                                         \
1273   VEC_ASSERT (size_ >= 0                                                  \
1274               && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1275                                                  "grow", T, A);           \
1276   VEC_OP (T,A,reserve_exact) (vec_,                                       \
1277                               size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1278                               VEC_CHECK_PASS PASS_MEM_STAT);              \
1279   VEC_BASE (*vec_)->prefix.num = size_;                                   \
1280 }                                                                         \
1281                                                                           \
1282 static inline void VEC_OP (T,A,safe_grow_cleared)                         \
1283      (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)            \
1284 {                                                                         \
1285   int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_);                    \
1286   VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT);      \
1287   memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0,         \
1288           sizeof (T) * (size_ - oldsize));                                \
1289 }                                                                         \
1290                                                                           \
1291 static inline void VEC_OP(T,A,safe_splice)                                \
1292      (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL)    \
1293 {                                                                         \
1294   if (src_)                                                               \
1295     {                                                                     \
1296       VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num                          \
1297                                   VEC_CHECK_PASS MEM_STAT_INFO);          \
1298                                                                           \
1299       VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_                      \
1300                               VEC_CHECK_PASS);                            \
1301     }                                                                     \
1302 }                                                                         \
1303                                                                           \
1304 static inline T *VEC_OP (T,A,safe_push)                                   \
1305      (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL)         \
1306 {                                                                         \
1307   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
1308                                                                           \
1309   return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS);  \
1310 }                                                                         \
1311                                                                           \
1312 static inline T *VEC_OP (T,A,safe_insert)                                 \
1313      (VEC(T,A) **vec_, unsigned ix_, const T obj_                         \
1314                 VEC_CHECK_DECL MEM_STAT_DECL)                             \
1315 {                                                                         \
1316   VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);            \
1317                                                                           \
1318   return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_         \
1319                                        VEC_CHECK_PASS);                   \
1320 }
1321
1322 /* We support a vector which starts out with space on the stack and
1323    switches to heap space when forced to reallocate.  This works a
1324    little differently.  Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1325    DEF_VEC_ALLOC_P_STACK(TYPE).  This uses alloca to get the initial
1326    space; because alloca can not be usefully called in an inline
1327    function, and because a macro can not define a macro, you must then
1328    write a #define for each type:
1329
1330    #define VEC_{TYPE}_stack_alloc(alloc)                          \
1331      VEC_stack_alloc({TYPE}, alloc)
1332
1333    This is really a hack and perhaps can be made better.  Note that
1334    this macro will wind up evaluating the ALLOC parameter twice.
1335
1336    Only the initial allocation will be made using alloca, so pass a
1337    reasonable estimate that doesn't use too much stack space; don't
1338    pass zero.  Don't return a VEC(TYPE,stack) vector from the function
1339    which allocated it.  */
1340
1341 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
1342 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
1343 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1344 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
1345 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1346                                          MEM_STAT_DECL);
1347 extern void vec_stack_free (void *);
1348
1349 #ifdef GATHER_STATISTICS
1350 #define VEC_stack_alloc(T,alloc,name,line,function)                       \
1351   (VEC_OP (T,stack,alloc1)                                                \
1352    (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1353 #else
1354 #define VEC_stack_alloc(T,alloc)                                          \
1355   (VEC_OP (T,stack,alloc1)                                                \
1356    (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1357 #endif
1358
1359 #define DEF_VEC_ALLOC_P_STACK(T)                                          \
1360 VEC_TA(T,base,stack);                                                     \
1361 DEF_VEC_ALLOC_FUNC_P_STACK(T)                                             \
1362 DEF_VEC_NONALLOC_FUNCS_P(T,stack)                                         \
1363 struct vec_swallow_trailing_semi
1364
1365 #define DEF_VEC_ALLOC_FUNC_P_STACK(T)                                     \
1366 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1)                       \
1367      (int alloc_, VEC(T,stack)* space)                                    \
1368 {                                                                         \
1369   return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space);    \
1370 }
1371
1372 #define DEF_VEC_ALLOC_O_STACK(T)                                          \
1373 VEC_TA(T,base,stack);                                                     \
1374 DEF_VEC_ALLOC_FUNC_O_STACK(T)                                             \
1375 DEF_VEC_NONALLOC_FUNCS_O(T,stack)                                         \
1376 struct vec_swallow_trailing_semi
1377
1378 #define DEF_VEC_ALLOC_FUNC_O_STACK(T)                                     \
1379 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1)                       \
1380      (int alloc_, VEC(T,stack)* space)                                    \
1381 {                                                                         \
1382   return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space);    \
1383 }
1384
1385 #define DEF_VEC_ALLOC_I_STACK(T)                                          \
1386 VEC_TA(T,base,stack);                                                     \
1387 DEF_VEC_ALLOC_FUNC_I_STACK(T)                                             \
1388 DEF_VEC_NONALLOC_FUNCS_I(T,stack)                                         \
1389 struct vec_swallow_trailing_semi
1390
1391 #define DEF_VEC_ALLOC_FUNC_I_STACK(T)                                     \
1392 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1)                       \
1393      (int alloc_, VEC(T,stack)* space)                                    \
1394 {                                                                         \
1395   return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space);   \
1396 }
1397
1398 #endif /* GCC_VEC_H */