vec.h (DEF_VEC_ALLOC_I): Use DEF_VEC_NONALLOC_FUNCS_I.

* vec.h (DEF_VEC_ALLOC_I): Use DEF_VEC_NONALLOC_FUNCS_I.
	(DEF_VEC_ALLOC_P): Use DEF_VEC_NONALLOC_FUNCS_P.
	(DEF_VEC_ALLOC_O): Use DEF_VEC_NONALLOC_FUNCS_O.
	(DEF_VEC_ALLOC_FUNC_P): Only define VEC_OP (T,A,alloc).
	(DEF_VEC_NONALLOC_FUNCS_P): New macro, broken out of old
	DEF_VEC_ALLOC_FUNC_P.
	(DEF_VEC_ALLOC_FUNC_O): Only define VEC_OP (T,A,alloc).
	(DEF_VEC_NONALLOC_FUNCS_O): New macro, broken out of old
	DEF_VEC_ALLOC_FUNC_O.
	(DEF_VEC_ALLOC_FUNC_I): Only define VEC_OP (T,A,alloc).
	(DEF_VEC_NONALLOC_FUNCS_I): New macro, broken out of old
	DEF_VEC_ALLOC_FUNC_I.
	(vec_stack_p_reserve, vec_stack_p_reserve_exact): Declare.
	(vec_stack_p_reserve_exact_1): Declare.
	(vec_stack_o_reserve, vec_stack_o_reserve_exact): Declare.
	(vec_stack_free): Declare.
	(VEC_stack_alloc): Define.
	(DEF_VEC_ALLOC_P_STACK, DEF_VEC_ALLOC_FUNC_P_STACK): Define.
	(DEF_VEC_ALLOC_O_STACK, DEF_VEC_ALLOC_FUNC_O_STACK): Define.
	(DEF_VEC_ALLOC_I_STACK, DEF_VEC_ALLOC_FUNC_I_STACK): Define.
	* vec.c (void_p): New type.  Call DEF_VEC_P and DEF_VEC_ALLOC_P
	for void_p.
	(stack_vecs): New static variable.
	(vec_stack_p_reserve_exact_1): New function.
	(vec_stack_o_reserve_1): New static function.
	(vec_stack_p_reserve, vec_stack_p_reserve_exact): New functions.
	(vec_stack_o_reserve, vec_stack_o_reserve_exact): New functions.
	(vec_stack_free): New function.
	* df-scan.c (df_ref): Use DEF_VEC_P and DEF_VEC_ALLOC_P_STACK.
	(VEC_df_ref_stack_alloc): Define.
	(df_mw_hardreg_ptr): New type.  Use DEF_VEC_P and
	DEF_VEC_ALLOC_P_STACK.
	(VEC_df_mw_hardreg_ptr_stack_alloc): Define.
	(struct df_collection_rec): Change _vec fields to VEC.  Remove
	_use fields.
	(df_free_collection_rec): Adjust for new fields.
	(df_insn_rescan): Use new df_collection_rec fields.
	(df_notes_rescan, df_canonize_collection_rec): Likewise.
	(df_ref_create_structure, df_ref_record): Likewise.
	(df_get_conditional_uses, df_get_call_refs): Likewise.
	(df_insn_refs_collect, df_bb_refs_collect): Likewise.
	(df_bb_refs_record, df_record_entry_block_defs): Likewise.
	(df_record_exit_block_uses, df_bb_verify): Likewise.
	(df_swap_refs): Change ref_vec parameter to VEC.  Change all
	callers.
	(df_sort_and_compress_refs): Change ref_vec parameter to VEC.
	Remove count parameter.  Change return type to void.  Change all
	callers.
	(df_sort_and_compress_mws): Change mw_vec parameter to VEC.
	Remove count parameter.  Change return type to void.  Change all
	callers.
	(df_install_refs): Change old_vec parameter to VEC.  Remove count
	parameter.  Change all callers.
	(df_install_mws): Change old_vec parameter to VEC.  Remove count
	parameter.  Change all callers.
	(df_refs_verify): Change new_rec parameter to VEC.  Change call
	callers.
	(df_mws_verify): Likewise.

From-SVN: r148347
This commit is contained in:
Ian Lance Taylor 2009-06-10 16:21:03 +00:00 committed by Ian Lance Taylor
parent e7aae3e8df
commit c25696045a
4 changed files with 484 additions and 159 deletions

View File

@ -1,3 +1,64 @@
2009-06-10 Ian Lance Taylor <iant@google.com>
* vec.h (DEF_VEC_ALLOC_I): Use DEF_VEC_NONALLOC_FUNCS_I.
(DEF_VEC_ALLOC_P): Use DEF_VEC_NONALLOC_FUNCS_P.
(DEF_VEC_ALLOC_O): Use DEF_VEC_NONALLOC_FUNCS_O.
(DEF_VEC_ALLOC_FUNC_P): Only define VEC_OP (T,A,alloc).
(DEF_VEC_NONALLOC_FUNCS_P): New macro, broken out of old
DEF_VEC_ALLOC_FUNC_P.
(DEF_VEC_ALLOC_FUNC_O): Only define VEC_OP (T,A,alloc).
(DEF_VEC_NONALLOC_FUNCS_O): New macro, broken out of old
DEF_VEC_ALLOC_FUNC_O.
(DEF_VEC_ALLOC_FUNC_I): Only define VEC_OP (T,A,alloc).
(DEF_VEC_NONALLOC_FUNCS_I): New macro, broken out of old
DEF_VEC_ALLOC_FUNC_I.
(vec_stack_p_reserve, vec_stack_p_reserve_exact): Declare.
(vec_stack_p_reserve_exact_1): Declare.
(vec_stack_o_reserve, vec_stack_o_reserve_exact): Declare.
(vec_stack_free): Declare.
(VEC_stack_alloc): Define.
(DEF_VEC_ALLOC_P_STACK, DEF_VEC_ALLOC_FUNC_P_STACK): Define.
(DEF_VEC_ALLOC_O_STACK, DEF_VEC_ALLOC_FUNC_O_STACK): Define.
(DEF_VEC_ALLOC_I_STACK, DEF_VEC_ALLOC_FUNC_I_STACK): Define.
* vec.c (void_p): New type. Call DEF_VEC_P and DEF_VEC_ALLOC_P
for void_p.
(stack_vecs): New static variable.
(vec_stack_p_reserve_exact_1): New function.
(vec_stack_o_reserve_1): New static function.
(vec_stack_p_reserve, vec_stack_p_reserve_exact): New functions.
(vec_stack_o_reserve, vec_stack_o_reserve_exact): New functions.
(vec_stack_free): New function.
* df-scan.c (df_ref): Use DEF_VEC_P and DEF_VEC_ALLOC_P_STACK.
(VEC_df_ref_stack_alloc): Define.
(df_mw_hardreg_ptr): New type. Use DEF_VEC_P and
DEF_VEC_ALLOC_P_STACK.
(VEC_df_mw_hardreg_ptr_stack_alloc): Define.
(struct df_collection_rec): Change _vec fields to VEC. Remove
_use fields.
(df_free_collection_rec): Adjust for new fields.
(df_insn_rescan): Use new df_collection_rec fields.
(df_notes_rescan, df_canonize_collection_rec): Likewise.
(df_ref_create_structure, df_ref_record): Likewise.
(df_get_conditional_uses, df_get_call_refs): Likewise.
(df_insn_refs_collect, df_bb_refs_collect): Likewise.
(df_bb_refs_record, df_record_entry_block_defs): Likewise.
(df_record_exit_block_uses, df_bb_verify): Likewise.
(df_swap_refs): Change ref_vec parameter to VEC. Change all
callers.
(df_sort_and_compress_refs): Change ref_vec parameter to VEC.
Remove count parameter. Change return type to void. Change all
callers.
(df_sort_and_compress_mws): Change mw_vec parameter to VEC.
Remove count parameter. Change return type to void. Change all
callers.
(df_install_refs): Change old_vec parameter to VEC. Remove count
parameter. Change all callers.
(df_install_mws): Change old_vec parameter to VEC. Remove count
parameter. Change all callers.
(df_refs_verify): Change new_rec parameter to VEC. Change call
callers.
(df_mws_verify): Likewise.
2009-06-10 Alexandre Oliva <aoliva@redhat.com>
* gcc.c (compare_files): Cast munmap argumento to caddr_t.

View File

@ -46,6 +46,19 @@ along with GCC; see the file COPYING3. If not see
#include "df.h"
#include "tree-pass.h"
DEF_VEC_P(df_ref);
DEF_VEC_ALLOC_P_STACK(df_ref);
#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
DEF_VEC_P(df_mw_hardreg_ptr);
DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
#endif
@ -84,14 +97,10 @@ static HARD_REG_SET elim_reg_set;
struct df_collection_rec
{
df_ref * def_vec;
df_ref * use_vec;
unsigned int next_def;
unsigned int next_use;
df_ref * eq_use_vec;
struct df_mw_hardreg **mw_vec;
unsigned int next_eq_use;
unsigned int next_mw;
VEC(df_ref,stack) *def_vec;
VEC(df_ref,stack) *use_vec;
VEC(df_ref,stack) *eq_use_vec;
VEC(df_mw_hardreg_ptr,stack) *mw_vec;
};
static df_ref df_null_ref_rec[1];
@ -1180,26 +1189,29 @@ df_insn_delete (basic_block bb, unsigned int uid)
static void
df_free_collection_rec (struct df_collection_rec *collection_rec)
{
unsigned int ix;
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
df_ref *ref;
struct df_mw_hardreg **mw;
df_ref ref;
struct df_mw_hardreg *mw;
if (collection_rec->def_vec)
for (ref = collection_rec->def_vec; *ref; ref++)
df_free_ref (*ref);
if (collection_rec->use_vec)
for (ref = collection_rec->use_vec; *ref; ref++)
df_free_ref (*ref);
if (collection_rec->eq_use_vec)
for (ref = collection_rec->eq_use_vec; *ref; ref++)
df_free_ref (*ref);
if (collection_rec->mw_vec)
for (mw = collection_rec->mw_vec; *mw; mw++)
pool_free (problem_data->mw_reg_pool, *mw);
for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
df_free_ref (ref);
for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
df_free_ref (ref);
for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
df_free_ref (ref);
for (ix = 0;
VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
++ix)
pool_free (problem_data->mw_reg_pool, mw);
VEC_free (df_ref, stack, collection_rec->def_vec);
VEC_free (df_ref, stack, collection_rec->use_vec);
VEC_free (df_ref, stack, collection_rec->eq_use_vec);
VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
}
/* Rescan INSN. Return TRUE if the rescanning produced any changes. */
bool
@ -1209,10 +1221,6 @@ df_insn_rescan (rtx insn)
struct df_insn_info *insn_info = NULL;
basic_block bb = BLOCK_FOR_INSN (insn);
struct df_collection_rec collection_rec;
collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if ((!df) || (!INSN_P (insn)))
return false;
@ -1253,6 +1261,11 @@ df_insn_rescan (rtx insn)
return false;
}
collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
bitmap_clear_bit (df->insns_to_delete, uid);
bitmap_clear_bit (df->insns_to_rescan, uid);
bitmap_clear_bit (df->insns_to_notes_rescan, uid);
@ -1288,6 +1301,12 @@ df_insn_rescan (rtx insn)
df_refs_add_to_chains (&collection_rec, bb, insn);
df_set_bb_dirty (bb);
VEC_free (df_ref, stack, collection_rec.def_vec);
VEC_free (df_ref, stack, collection_rec.use_vec);
VEC_free (df_ref, stack, collection_rec.eq_use_vec);
VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
return true;
}
@ -2131,10 +2150,11 @@ df_notes_rescan (rtx insn)
rtx note;
struct df_collection_rec collection_rec;
unsigned int num_deleted;
unsigned int mw_len;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
df_ref_chain_delete (insn_info->eq_uses);
@ -2158,7 +2178,8 @@ df_notes_rescan (rtx insn)
/* Find some place to put any new mw_hardregs. */
df_canonize_collection_rec (&collection_rec);
if (collection_rec.next_mw)
mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
if (mw_len)
{
unsigned int count = 0;
struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
@ -2172,33 +2193,36 @@ df_notes_rescan (rtx insn)
{
/* Append to the end of the existing record after
expanding it if necessary. */
if (collection_rec.next_mw > num_deleted)
if (mw_len > num_deleted)
{
insn_info->mw_hardregs =
XRESIZEVEC (struct df_mw_hardreg *,
insn_info->mw_hardregs,
count + 1 + collection_rec.next_mw);
insn_info->mw_hardregs,
count + 1 + mw_len);
}
memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
(collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
memcpy (&insn_info->mw_hardregs[count],
VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
mw_len * sizeof (struct df_mw_hardreg *));
insn_info->mw_hardregs[count + mw_len] = NULL;
qsort (insn_info->mw_hardregs, count + mw_len,
sizeof (struct df_mw_hardreg *), df_mw_compare);
}
else
{
/* No vector there. */
insn_info->mw_hardregs
= XNEWVEC (struct df_mw_hardreg*,
count + 1 + collection_rec.next_mw);
memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
(collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
= XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
memcpy (insn_info->mw_hardregs,
VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
mw_len * sizeof (struct df_mw_hardreg *));
insn_info->mw_hardregs[mw_len] = NULL;
}
}
/* Get rid of the mw_rec so that df_refs_add_to_chains will
ignore it. */
collection_rec.mw_vec = NULL;
collection_rec.next_mw = 0;
VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
df_refs_add_to_chains (&collection_rec, bb, insn);
VEC_free (df_ref, stack, collection_rec.eq_use_vec);
}
else
df_insn_rescan (insn);
@ -2316,35 +2340,43 @@ df_ref_compare (const void *r1, const void *r2)
}
static void
df_swap_refs (df_ref *ref_vec, int i, int j)
df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
{
df_ref tmp = ref_vec[i];
ref_vec[i] = ref_vec[j];
ref_vec[j] = tmp;
df_ref tmp = VEC_index (df_ref, *ref_vec, i);
VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
VEC_replace (df_ref, *ref_vec, j, tmp);
}
/* Sort and compress a set of refs. */
static unsigned int
df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
static void
df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
{
unsigned int count;
unsigned int i;
unsigned int dist = 0;
ref_vec[count] = NULL;
count = VEC_length (df_ref, *ref_vec);
/* If there are 1 or 0 elements, there is nothing to do. */
if (count < 2)
return count;
return;
else if (count == 2)
{
if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
if (df_ref_compare (&r0, &r1) > 0)
df_swap_refs (ref_vec, 0, 1);
}
else
{
for (i = 0; i < count - 1; i++)
if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
break;
{
df_ref r0 = VEC_index (df_ref, *ref_vec, i);
df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
if (df_ref_compare (&r0, &r1) >= 0)
break;
}
/* If the array is already strictly ordered,
which is the most common case for large COUNT case
(which happens for CALL INSNs),
@ -2353,26 +2385,29 @@ df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
Make sure DF_GET_ADD_REFS adds refs in the increasing order
of DF_REF_COMPARE. */
if (i == count - 1)
return count;
qsort (ref_vec, count, sizeof (df_ref), df_ref_compare);
return;
qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
df_ref_compare);
}
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
while (i + dist + 1 < count
&& df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
VEC_index (df_ref, *ref_vec, i + dist + 1)))
{
df_free_ref (ref_vec[i + dist + 1]);
df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
dist++;
}
/* Copy it down to the next position. */
if (dist)
ref_vec[i+1] = ref_vec[i + dist + 1];
if (dist && i + dist + 1 < count)
VEC_replace (df_ref, *ref_vec, i + 1,
VEC_index (df_ref, *ref_vec, i + dist + 1));
}
count -= dist;
ref_vec[count] = NULL;
return count;
VEC_truncate (df_ref, *ref_vec, count);
}
@ -2425,45 +2460,55 @@ df_mw_compare (const void *m1, const void *m2)
/* Sort and compress a set of refs. */
static unsigned int
df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
static void
df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
{
unsigned int count;
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int dist = 0;
mw_vec[count] = NULL;
count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
if (count < 2)
return count;
return;
else if (count == 2)
{
if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
if (df_mw_compare (&m0, &m1) > 0)
{
struct df_mw_hardreg *tmp = mw_vec[0];
mw_vec[0] = mw_vec[1];
mw_vec[1] = tmp;
struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
*mw_vec, 0);
VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
}
}
else
qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
sizeof (struct df_mw_hardreg *), df_mw_compare);
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
while (i + dist + 1 < count
&& df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
VEC_index (df_mw_hardreg_ptr, *mw_vec,
i + dist + 1)))
{
pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
pool_free (problem_data->mw_reg_pool,
VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
dist++;
}
/* Copy it down to the next position. */
if (dist)
mw_vec[i+1] = mw_vec[i + dist + 1];
if (dist && i + dist + 1 < count)
VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
}
count -= dist;
mw_vec[count] = NULL;
return count;
VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
}
@ -2472,22 +2517,10 @@ df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
static void
df_canonize_collection_rec (struct df_collection_rec *collection_rec)
{
if (collection_rec->def_vec)
collection_rec->next_def
= df_sort_and_compress_refs (collection_rec->def_vec,
collection_rec->next_def);
if (collection_rec->use_vec)
collection_rec->next_use
= df_sort_and_compress_refs (collection_rec->use_vec,
collection_rec->next_use);
if (collection_rec->eq_use_vec)
collection_rec->next_eq_use
= df_sort_and_compress_refs (collection_rec->eq_use_vec,
collection_rec->next_eq_use);
if (collection_rec->mw_vec)
collection_rec->next_mw
= df_sort_and_compress_mws (collection_rec->mw_vec,
collection_rec->next_mw);
df_sort_and_compress_refs (&collection_rec->def_vec);
df_sort_and_compress_refs (&collection_rec->use_vec);
df_sort_and_compress_refs (&collection_rec->eq_use_vec);
df_sort_and_compress_mws (&collection_rec->mw_vec);
}
@ -2545,16 +2578,20 @@ df_install_ref (df_ref this_ref,
static df_ref *
df_install_refs (basic_block bb,
df_ref *old_vec, unsigned int count,
VEC(df_ref,stack)* old_vec,
struct df_reg_info **reg_info,
struct df_ref_info *ref_info,
bool is_notes)
{
unsigned int count;
count = VEC_length (df_ref, old_vec);
if (count)
{
unsigned int i;
df_ref *new_vec = XNEWVEC (df_ref, count + 1);
bool add_to_table;
df_ref this_ref;
unsigned int ix;
switch (ref_info->ref_order)
{
@ -2579,10 +2616,9 @@ df_install_refs (basic_block bb,
if (add_to_table && df->analyze_subset)
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
for (i = 0; i < count; i++)
for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
{
df_ref this_ref = old_vec[i];
new_vec[i] = this_ref;
new_vec[ix] = this_ref;
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
ref_info, add_to_table);
}
@ -2599,14 +2635,18 @@ df_install_refs (basic_block bb,
insn. */
static struct df_mw_hardreg **
df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
{
unsigned int count;
count = VEC_length (df_mw_hardreg_ptr, old_vec);
if (count)
{
struct df_mw_hardreg **new_vec
= XNEWVEC (struct df_mw_hardreg*, count + 1);
memcpy (new_vec, old_vec,
sizeof (struct df_mw_hardreg*) * (count + 1));
memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
sizeof (struct df_mw_hardreg*) * count);
new_vec[count] = NULL;
return new_vec;
}
else
@ -2631,8 +2671,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
{
df_scan_free_ref_vec (insn_rec->defs);
insn_rec->defs
= df_install_refs (bb, collection_rec->def_vec,
collection_rec->next_def,
= df_install_refs (bb, collection_rec->def_vec,
df->def_regs,
&df->def_info, false);
}
@ -2641,7 +2680,6 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df_scan_free_ref_vec (insn_rec->uses);
insn_rec->uses
= df_install_refs (bb, collection_rec->use_vec,
collection_rec->next_use,
df->use_regs,
&df->use_info, false);
}
@ -2650,7 +2688,6 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df_scan_free_ref_vec (insn_rec->eq_uses);
insn_rec->eq_uses
= df_install_refs (bb, collection_rec->eq_use_vec,
collection_rec->next_eq_use,
df->eq_use_regs,
&df->use_info, true);
}
@ -2658,8 +2695,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
{
df_scan_free_mws_vec (insn_rec->mw_hardregs);
insn_rec->mw_hardregs
= df_install_mws (collection_rec->mw_vec,
collection_rec->next_mw);
= df_install_mws (collection_rec->mw_vec);
}
}
else
@ -2668,14 +2704,12 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df_scan_free_ref_vec (bb_info->artificial_defs);
bb_info->artificial_defs
= df_install_refs (bb, collection_rec->def_vec,
collection_rec->next_def,
= df_install_refs (bb, collection_rec->def_vec,
df->def_regs,
&df->def_info, false);
df_scan_free_ref_vec (bb_info->artificial_uses);
bb_info->artificial_uses
= df_install_refs (bb, collection_rec->use_vec,
collection_rec->next_use,
df->use_regs,
&df->use_info, false);
}
@ -2767,11 +2801,11 @@ df_ref_create_structure (enum df_ref_class cl,
if (collection_rec)
{
if (DF_REF_REG_DEF_P (this_ref))
collection_rec->def_vec[collection_rec->next_def++] = this_ref;
VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
else
collection_rec->use_vec[collection_rec->next_use++] = this_ref;
VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
}
return this_ref;
@ -2837,7 +2871,8 @@ df_ref_record (enum df_ref_class cl,
hardreg->start_regno = regno;
hardreg->end_regno = endregno - 1;
hardreg->mw_order = df->ref_order++;
collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
hardreg);
}
for (i = regno; i < endregno; i++)
@ -3291,10 +3326,11 @@ df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
static void
df_get_conditional_uses (struct df_collection_rec *collection_rec)
{
unsigned int i;
for (i = 0; i < collection_rec->next_def; i++)
unsigned int ix;
df_ref ref;
for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
{
df_ref ref = collection_rec->def_vec[i];
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
int width = -1;
@ -3333,16 +3369,14 @@ df_get_call_refs (struct df_collection_rec * collection_rec,
unsigned int ui;
bool is_sibling_call;
unsigned int i;
df_ref def;
bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
/* Do not generate clobbers for registers that are the result of the
call. This causes ordering problems in the chain building code
depending on which def is seen first. */
for (i=0; i<collection_rec->next_def; i++)
{
df_ref def = collection_rec->def_vec[i];
bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
}
for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
/* Record the registers used to pass arguments, and explicitly
noted as clobbered. */
@ -3420,10 +3454,10 @@ df_insn_refs_collect (struct df_collection_rec* collection_rec,
bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
/* Clear out the collection record. */
collection_rec->next_def = 0;
collection_rec->next_use = 0;
collection_rec->next_eq_use = 0;
collection_rec->next_mw = 0;
VEC_truncate (df_ref, collection_rec->def_vec, 0);
VEC_truncate (df_ref, collection_rec->use_vec, 0);
VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
/* Record register defs. */
df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
@ -3521,10 +3555,10 @@ df_need_static_chain_reg (struct function *fun)
static void
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
{
collection_rec->next_def = 0;
collection_rec->next_use = 0;
collection_rec->next_eq_use = 0;
collection_rec->next_mw = 0;
VEC_truncate (df_ref, collection_rec->def_vec, 0);
VEC_truncate (df_ref, collection_rec->use_vec, 0);
VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
if (bb->index == ENTRY_BLOCK)
{
@ -3590,10 +3624,6 @@ df_bb_refs_record (int bb_index, bool scan_insns)
int luid = 0;
struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if (!df)
return;
@ -3609,6 +3639,11 @@ df_bb_refs_record (int bb_index, bool scan_insns)
bb_info->artificial_uses = NULL;
}
collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
if (scan_insns)
/* Scan the block an insn at a time from beginning to end. */
FOR_BB_INSNS (bb, insn)
@ -3631,6 +3666,11 @@ df_bb_refs_record (int bb_index, bool scan_insns)
df_bb_refs_collect (&collection_rec, bb);
df_refs_add_to_chains (&collection_rec, bb, NULL);
VEC_free (df_ref, stack, collection_rec.def_vec);
VEC_free (df_ref, stack, collection_rec.use_vec);
VEC_free (df_ref, stack, collection_rec.eq_use_vec);
VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
/* Now that the block has been processed, set the block as dirty so
LR and LIVE will get it processed. */
df_set_bb_dirty (bb);
@ -3889,12 +3929,12 @@ df_record_entry_block_defs (bitmap entry_block_defs)
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
collection_rec.def_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
VEC_free (df_ref, stack, collection_rec.def_vec);
}
@ -4060,12 +4100,13 @@ df_record_exit_block_uses (bitmap exit_block_uses)
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
collection_rec.use_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
VEC_free (df_ref, stack, collection_rec.use_vec);
}
@ -4242,7 +4283,7 @@ df_compute_regs_ever_live (bool reset)
df_reg_chain_mark (refs, regno, is_def, is_eq_use)
df_reg_chain_verify_unmarked (refs)
df_refs_verify (ref*, ref*, bool)
df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
df_mws_verify (mw*, mw*, bool)
df_insn_refs_verify (collection_rec, bb, insn, bool)
df_bb_refs_verify (bb, refs, bool)
@ -4306,12 +4347,15 @@ df_reg_chain_verify_unmarked (df_ref refs)
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
df_refs_verify (df_ref *new_rec, df_ref *old_rec,
df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
bool abort_if_fail)
{
while ((*new_rec) && (*old_rec))
unsigned int ix;
df_ref new_ref;
for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
{
if (!df_ref_equal_p (*new_rec, *old_rec))
if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
{
if (abort_if_fail)
gcc_assert (0);
@ -4327,14 +4371,13 @@ df_refs_verify (df_ref *new_rec, df_ref *old_rec,
DF_REF_REG_UNMARK (*old_rec);
}
new_rec++;
old_rec++;
}
if (abort_if_fail)
gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
gcc_assert (*old_rec == NULL);
else
return ((*new_rec == NULL) && (*old_rec == NULL));
return *old_rec == NULL;
return false;
}
@ -4342,26 +4385,29 @@ df_refs_verify (df_ref *new_rec, df_ref *old_rec,
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
struct df_mw_hardreg **old_rec,
bool abort_if_fail)
{
while ((*new_rec) && (*old_rec))
unsigned int ix;
struct df_mw_hardreg *new_reg;
for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
{
if (!df_mw_equal_p (*new_rec, *old_rec))
if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
{
if (abort_if_fail)
gcc_assert (0);
else
return false;
}
new_rec++;
old_rec++;
}
if (abort_if_fail)
gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
gcc_assert (*old_rec == NULL);
else
return ((*new_rec == NULL) && (*old_rec == NULL));
return *old_rec == NULL;
return false;
}
@ -4424,10 +4470,10 @@ df_bb_verify (basic_block bb)
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
gcc_assert (bb_info);

141
gcc/vec.c
View File

@ -372,6 +372,147 @@ vec_heap_o_reserve_exact (void *vec, int reserve, size_t vec_offset,
PASS_MEM_STAT);
}
/* Stack vectors are a little different. VEC_alloc turns into a call
to vec_stack_p_reserve_exact1 and passes in space allocated via a
call to alloca. We record that pointer so that we know that we
shouldn't free it. If the vector is resized, we resize it on the
heap. We record the pointers in a vector and search it in LIFO
order--i.e., we look for the newest stack vectors first. We don't
expect too many stack vectors at any one level, and searching from
the end should normally be efficient even if they are used in a
recursive function. */
typedef void *void_p;
DEF_VEC_P(void_p);
DEF_VEC_ALLOC_P(void_p,heap);
static VEC(void_p,heap) *stack_vecs;
/* Allocate a vector which uses alloca for the initial allocation.
SPACE is space allocated using alloca, ALLOC is the number of
entries allocated. */
void *
vec_stack_p_reserve_exact_1 (int alloc, void *space)
{
struct vec_prefix *pfx = (struct vec_prefix *) space;
VEC_safe_push (void_p, heap, stack_vecs, space);
pfx->num = 0;
pfx->alloc = alloc;
return space;
}
/* Grow a vector allocated using alloca. When this happens, we switch
back to heap allocation. We remove the vector from stack_vecs, if
it is there, since we no longer need to avoid freeing it. */
static void *
vec_stack_o_reserve_1 (void *vec, int reserve, size_t vec_offset,
size_t elt_size, bool exact MEM_STAT_DECL)
{
bool found;
unsigned int ix;
void *newvec;
found = false;
for (ix = VEC_length (void_p, stack_vecs); ix > 0; --ix)
{
if (VEC_index (void_p, stack_vecs, ix - 1) == vec)
{
VEC_unordered_remove (void_p, stack_vecs, ix - 1);
found = true;
break;
}
}
if (!found)
{
/* VEC is already on the heap. */
return vec_heap_o_reserve_1 (vec, reserve, vec_offset, elt_size,
exact PASS_MEM_STAT);
}
/* Move VEC to the heap. */
reserve += ((struct vec_prefix *) vec)->num;
newvec = vec_heap_o_reserve_1 (NULL, reserve, vec_offset, elt_size,
exact PASS_MEM_STAT);
if (newvec && vec)
{
((struct vec_prefix *) newvec)->num = ((struct vec_prefix *) vec)->num;
memcpy (((struct vec_prefix *) newvec)->vec,
((struct vec_prefix *) vec)->vec,
((struct vec_prefix *) vec)->num * elt_size);
}
return newvec;
}
/* Grow a vector allocated on the stack. */
void *
vec_stack_p_reserve (void *vec, int reserve MEM_STAT_DECL)
{
return vec_stack_o_reserve_1 (vec, reserve,
offsetof (struct vec_prefix, vec),
sizeof (void *), false
PASS_MEM_STAT);
}
/* Exact version of vec_stack_p_reserve. */
void *
vec_stack_p_reserve_exact (void *vec, int reserve MEM_STAT_DECL)
{
return vec_stack_o_reserve_1 (vec, reserve,
offsetof (struct vec_prefix, vec),
sizeof (void *), true
PASS_MEM_STAT);
}
/* Like vec_stack_p_reserve, but for objects. */
void *
vec_stack_o_reserve (void *vec, int reserve, size_t vec_offset,
size_t elt_size MEM_STAT_DECL)
{
return vec_stack_o_reserve_1 (vec, reserve, vec_offset, elt_size, false
PASS_MEM_STAT);
}
/* Like vec_stack_p_reserve_exact, but for objects. */
void *
vec_stack_o_reserve_exact (void *vec, int reserve, size_t vec_offset,
size_t elt_size MEM_STAT_DECL)
{
return vec_stack_o_reserve_1 (vec, reserve, vec_offset, elt_size, true
PASS_MEM_STAT);
}
/* Free a vector allocated on the stack. Don't actually free it if we
find it in the hash table. */
void
vec_stack_free (void *vec)
{
unsigned int ix;
for (ix = VEC_length (void_p, stack_vecs); ix > 0; --ix)
{
if (VEC_index (void_p, stack_vecs, ix - 1) == vec)
{
VEC_unordered_remove (void_p, stack_vecs, ix - 1);
return;
}
}
/* VEC was not on the list of vecs allocated on the stack, so it
must be allocated on the heap. */
vec_heap_free (vec);
}
#if ENABLE_CHECKING
/* Issue a vector domain error, and then fall over. */

View File

@ -508,6 +508,7 @@ struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_I(T,A) \
VEC_TA(T,base,A); \
DEF_VEC_ALLOC_FUNC_I(T,A) \
DEF_VEC_NONALLOC_FUNCS_I(T,A) \
struct vec_swallow_trailing_semi
/* Vector of pointer to object. */
@ -524,6 +525,7 @@ struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_P(T,A) \
VEC_TA(T,base,A); \
DEF_VEC_ALLOC_FUNC_P(T,A) \
DEF_VEC_NONALLOC_FUNCS_P(T,A) \
struct vec_swallow_trailing_semi
#define DEF_VEC_FUNC_P(T) \
@ -716,8 +718,10 @@ static inline VEC(T,A) *VEC_OP (T,A,alloc) \
{ \
return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
PASS_MEM_STAT); \
} \
\
}
#define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
static inline void VEC_OP (T,A,free) \
(VEC(T,A) **vec_) \
{ \
@ -814,6 +818,7 @@ struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_O(T,A) \
VEC_TA(T,base,A); \
DEF_VEC_ALLOC_FUNC_O(T,A) \
DEF_VEC_NONALLOC_FUNCS_O(T,A) \
struct vec_swallow_trailing_semi
#define DEF_VEC_FUNC_O(T) \
@ -995,8 +1000,9 @@ static inline VEC(T,A) *VEC_OP (T,A,alloc) \
offsetof (VEC(T,A),base.vec), \
sizeof (T) \
PASS_MEM_STAT); \
} \
\
}
#define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
{ \
size_t len_ = vec_ ? vec_->num : 0; \
@ -1099,8 +1105,9 @@ static inline VEC(T,A) *VEC_OP (T,A,alloc) \
return (VEC(T,A) *) vec_##A##_o_reserve_exact \
(NULL, alloc_, offsetof (VEC(T,A),base.vec), \
sizeof (T) PASS_MEM_STAT); \
} \
\
}
#define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
{ \
size_t len_ = vec_ ? vec_->num : 0; \
@ -1195,4 +1202,74 @@ static inline T *VEC_OP (T,A,safe_insert) \
VEC_CHECK_PASS); \
}
/* We support a vector which starts out with space on the stack and
switches to heap space when forced to reallocate. This works a
little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
space; because alloca can not be usefully called in an inline
function, and because a macro can not define a macro, you must then
write a #define for each type:
#define VEC_{TYPE}_stack_alloc(alloc) \
VEC_stack_alloc({TYPE}, alloc)
This is really a hack and perhaps can be made better. Note that
this macro will wind up evaluating the ALLOC parameter twice.
Only the initial allocation will be made using alloca, so pass a
reasonable estimate that doesn't use too much stack space; don't
pass zero. Don't return a VEC(TYPE,stack) vector from the function
which allocated it. */
extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
extern void *vec_stack_p_reserve_exact_1 (int, void *);
extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
MEM_STAT_DECL);
extern void vec_stack_free (void *);
#define VEC_stack_alloc(T,alloc) \
(VEC_OP (T,stack,alloc1) \
(alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
#define DEF_VEC_ALLOC_P_STACK(T) \
VEC_TA(T,base,stack); \
DEF_VEC_ALLOC_FUNC_P_STACK(T) \
DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
(int alloc_, VEC(T,stack)* space MEM_STAT_DECL) \
{ \
return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
}
#define DEF_VEC_ALLOC_O_STACK(T) \
VEC_TA(T,base,stack); \
DEF_VEC_ALLOC_FUNC_O_STACK(T) \
DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
(int alloc_, VEC(T,stack)* space MEM_STAT_DECL) \
{ \
return ((VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
}
#define DEF_VEC_ALLOC_I_STACK(T) \
VEC_TA(T,base,stack); \
DEF_VEC_ALLOC_FUNC_I_STACK(T) \
DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
struct vec_swallow_trailing_semi
#define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
(int alloc_, VEC(T,stack)* space MEM_STAT_DECL) \
{ \
return ((VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
}
#endif /* GCC_VEC_H */