vec.h: Update API to separate allocation mechanism from type.
* vec.h: Update API to separate allocation mechanism from type. (VEC_safe_grow): New. * vec.c (calculate_allocation): New. (vec_gc_o_reserve, vec_heap_o_reserve): Adjust. (vec_gc_free, vec_heap_free): Remove. * gengtype-lex.l (DEF_VEC_): Process mult-argument macros. Adjust. (VEC): Likewise. (mangle_macro_name): New. (struct macro_def): New. (struct macro): Add multiple argument values. (macro_expans_end): New. (push_macro_expansion): Chain on new macro. Process multiple args, create follow on expansion. Return follow on argument. (macro_input): Deal with multiple arguments. * tree.h: Define VEC(tree,heap) and VEC(tree,gc). (struct tree_binfo): Adjust. * basic-block.h: Define VEC(edge,gc). (struct edge_def): Adjust. (struct basic_block_def, struct edge_iterator): Likewise. (ei_container, ei_start_1, ei_last_1): Likewise. * cfg.c (connect_src, connect_dest): Likewise. * cfgrtl.c (force_nonfallthru_and_redirect) * dbxout.c (dbxout_type) * dwarf2out.c (gen_member_die) * lambda-code.c: Define VEC(int,gc), VEC(lambda_loop,gc). (gcc_tree_to_linear_expression): Adjust. (gcc_loop_to_lambda_loop, gcc_loopnest_to_lambda_loopnest, lbv_to_gcc_expression, lle_to_gcc_expression, lambda_loopnest_to_gcc_loopnest, can_convert_to_perfect_nest, perfect_nestify): Likewise. * lambda.h (gcc_loopnest_to_lambda_loopnest, lambda_loopnest_to_gcc_loopnest): Adjust prototypes. * profile.c (instrument_values): Adjust. * tree-cfg.c (modified_noreturn_calls): Adjust. (remove_fallthru_edge): Likewise. * tree-dump.c (dequeue_and_dump): Adjust. * tree-flow-inline.h (mark_stmt_modified): Adjust. * tree-flow.h (modified_noreturn_calls): Adjust. (tree_on_heap): Remove. (yay!) (register_new_def): Adjust. * tree-into-ssa.c: Define VEC(int,heap). (block_defs_stack): Adjust. (find_idf, insert_phi_nodes, register_new_def, rewrite_initialize_block, rewrite_finalize_block, register_new_update_single, rewrite_update_init_block, rewrite_update_fini_block, rewrite_blocks, ssa_rewrite_finalize_block, ssa_register_new_def, ssa_rewrite_initialize_block, rewrite_ssa_into_ssa): Likewise. * tree-loop-linear.c (linear_transform_loops): Adjust. * tree-ssa-alias.c: Define VEC(fieldoff_t,heap). (push_fields_onto_fieldstack, create_overlap_variables_for): Adjust. * tree-ssa-dom.c (avail_exprs_stack, block_defs_stack, stmts_to_rescan, const_and_copies_stack, nonzero_vars_stack, vrp_variables_stack): Adjust declarations. (tree_ssa_dominator_optimize): Adjust. (dom_opt_initialize_block, remove_local_expressions_from_table, restore_nonzero_vars_to_original_value, restore_vars_to_original_value, restore_currdefs_to_original_value, dom_opt_finalize_block, record_var_is_nonzero, record_cond, record_const_or_copy_1, optimize_stmt, update_rhs_and_lookup_avail_expr, lookup_avail_expr, record_range): Likewise. * tree-ssa-pre.c: Define VEC(basic_block,heap). (compute_antic_aux): Adjust. (inserted_exprs, create_expression_by_pieces, insert_into_preds_of_block, eliminate, mark_operand_necessary, remove_dead_inserted_code, fini_pre): Likewise. * tree-ssa-propagate.c (interesting_ssa_edges): Adjust. (varying_ssa_edges, add_ssa_edge, process_ssa_edge_worklist. ssa_prop_init): Likewise. * tree-ssa.c: Define VEC(bitmap,heap). (verify_name_tags): Adjust. * value-prof.c (rtl_divmod_values_to_profile): Adjust. (insn_prefetch_values_to_profile, rtl_find_values_to_profile, tree_divmod_values_to_profile, tree_find_values_to_profile, value_profile_transformations): Likewise. * value-prof.h: Define VEC(histogram_value,heap). * varasm.c: Remove alias_pair pointer typedef, define VEC(alias_pair,gc). (finish_aliases_1, finish_aliases_2, assemble_alias): Adjust. * config/pa/pa.c (typedef extern_symbol): Typedef the structure, not a pointer to it. Create an object vector. (extern_symbols): Turn into an object vector. (pa_hpux_asm_output_external, pa_hpux_file_end): Adjust. * cp/cp-tree.h: Adjust for new VEC API. Define VEC(tree_pair_s,gc). (struct save_scope): Adjust. (struct lang_type_class): Adjust. (unemitted_tinfo_decls): Adjust. * cp/class.c (add_method, resort_type_method_vec, finish_struct_methods, struct find_final_overrider_data, dfs_find_final_overrider_pre, find_final_overrider, get_vcall_index, warn_hidden, walk_subobject_offsets, check_methods, fixup_inline_methods, end_of_class, warn_about_ambiguous_bases, finish_struct, build_vtbl_initializer, add_vcall_offset): Adjust. * cp/decl.c (xref_basetypes, finish_method): Adjust. * cp/decl2.c (check_classfn): Adjust. * cp/init.c (sort_mem_initializers, push_base_cleanups): Adjust. * cp/method.c (do_build_copy_constructor): Adjust. * cp/name-lookup.c (new_class_binding, store_binding, store_bindings, store_class_bindings): Adjust. * cp/name-lookup.h: Define VEC(cxx_saved_binding,gc), VEC(cp_class_binding,gc). (struct cp_binding_level): Adjust. * cp/parser.c: Define VEC(cp_token_position,heap). (struct cp_lexer): Adjust. (cp_lexer_new_main, cp_lexer_new_from_tokens, cp_lexer_destroy, cp_lexer_save_tokens): Adjust. * cp/pt.c (retrieve_specialization, check_explicit_specialization): Adjust. * cp/rtti.c (unemitted_tinfo_decls): Adjust. (init_rtti_processing, get_tinfo_decl, get_pseudo_ti_init, get_pseudo_ti_desc): Adjust. * cp/search.c (dfs_access_in_type, lookup_conversion_operator, lookup_fnfields_1, dfs_walk_once, dfs_walk_once_accessible, dfs_get_pure_virtuals, lookup_conversions_r, binfo_for_vbase): Adjust. * cp/semantics.c: Define VEC(deferred_access,gc). (push_deferring_access_checks): Adjust. * cp/typeck2.c (abstract_virtuals_error): Adjust. From-SVN: r98498
This commit is contained in:
parent
2412d35ca6
commit
d4e6fecb8f
|
@ -1,3 +1,92 @@
|
|||
2005-04-21 Nathan Sidwell <nathan@codesourcery.com>
|
||||
|
||||
* vec.h: Update API to separate allocation mechanism from type.
|
||||
(VEC_safe_grow): New.
|
||||
* vec.c (calculate_allocation): New.
|
||||
(vec_gc_o_reserve, vec_heap_o_reserve): Adjust.
|
||||
(vec_gc_free, vec_heap_free): Remove.
|
||||
* gengtype-lex.l (DEF_VEC_): Process mult-argument macros. Adjust.
|
||||
(VEC): Likewise.
|
||||
(mangle_macro_name): New.
|
||||
(struct macro_def): New.
|
||||
(struct macro): Add multiple argument values.
|
||||
(macro_expans_end): New.
|
||||
(push_macro_expansion): Chain on new macro. Process multiple
|
||||
args, create follow on expansion. Return follow on argument.
|
||||
(macro_input): Deal with multiple arguments.
|
||||
|
||||
* tree.h: Define VEC(tree,heap) and VEC(tree,gc).
|
||||
(struct tree_binfo): Adjust.
|
||||
* basic-block.h: Define VEC(edge,gc).
|
||||
(struct edge_def): Adjust.
|
||||
(struct basic_block_def, struct edge_iterator): Likewise.
|
||||
(ei_container, ei_start_1, ei_last_1): Likewise.
|
||||
* cfg.c (connect_src, connect_dest): Likewise.
|
||||
* cfgrtl.c (force_nonfallthru_and_redirect)
|
||||
* dbxout.c (dbxout_type)
|
||||
* dwarf2out.c (gen_member_die)
|
||||
* lambda-code.c: Define VEC(int,gc), VEC(lambda_loop,gc).
|
||||
(gcc_tree_to_linear_expression): Adjust.
|
||||
(gcc_loop_to_lambda_loop, gcc_loopnest_to_lambda_loopnest,
|
||||
lbv_to_gcc_expression, lle_to_gcc_expression,
|
||||
lambda_loopnest_to_gcc_loopnest, can_convert_to_perfect_nest,
|
||||
perfect_nestify): Likewise.
|
||||
* lambda.h (gcc_loopnest_to_lambda_loopnest,
|
||||
lambda_loopnest_to_gcc_loopnest): Adjust prototypes.
|
||||
* profile.c (instrument_values): Adjust.
|
||||
* tree-cfg.c (modified_noreturn_calls): Adjust.
|
||||
(remove_fallthru_edge): Likewise.
|
||||
* tree-dump.c (dequeue_and_dump): Adjust.
|
||||
* tree-flow-inline.h (mark_stmt_modified): Adjust.
|
||||
* tree-flow.h (modified_noreturn_calls): Adjust.
|
||||
(tree_on_heap): Remove. (yay!)
|
||||
(register_new_def): Adjust.
|
||||
* tree-into-ssa.c: Define VEC(int,heap).
|
||||
(block_defs_stack): Adjust.
|
||||
(find_idf, insert_phi_nodes, register_new_def,
|
||||
rewrite_initialize_block, rewrite_finalize_block,
|
||||
register_new_update_single, rewrite_update_init_block,
|
||||
rewrite_update_fini_block, rewrite_blocks,
|
||||
ssa_rewrite_finalize_block, ssa_register_new_def,
|
||||
ssa_rewrite_initialize_block, rewrite_ssa_into_ssa): Likewise.
|
||||
* tree-loop-linear.c (linear_transform_loops): Adjust.
|
||||
* tree-ssa-alias.c: Define VEC(fieldoff_t,heap).
|
||||
(push_fields_onto_fieldstack, create_overlap_variables_for): Adjust.
|
||||
* tree-ssa-dom.c (avail_exprs_stack, block_defs_stack,
|
||||
stmts_to_rescan, const_and_copies_stack, nonzero_vars_stack,
|
||||
vrp_variables_stack): Adjust declarations.
|
||||
(tree_ssa_dominator_optimize): Adjust.
|
||||
(dom_opt_initialize_block, remove_local_expressions_from_table,
|
||||
restore_nonzero_vars_to_original_value,
|
||||
restore_vars_to_original_value,
|
||||
restore_currdefs_to_original_value, dom_opt_finalize_block,
|
||||
record_var_is_nonzero, record_cond, record_const_or_copy_1,
|
||||
optimize_stmt, update_rhs_and_lookup_avail_expr,
|
||||
lookup_avail_expr, record_range): Likewise.
|
||||
* tree-ssa-pre.c: Define VEC(basic_block,heap).
|
||||
(compute_antic_aux): Adjust.
|
||||
(inserted_exprs, create_expression_by_pieces,
|
||||
insert_into_preds_of_block, eliminate, mark_operand_necessary,
|
||||
remove_dead_inserted_code, fini_pre): Likewise.
|
||||
* tree-ssa-propagate.c (interesting_ssa_edges): Adjust.
|
||||
(varying_ssa_edges, add_ssa_edge, process_ssa_edge_worklist.
|
||||
ssa_prop_init): Likewise.
|
||||
* tree-ssa.c: Define VEC(bitmap,heap).
|
||||
(verify_name_tags): Adjust.
|
||||
* value-prof.c (rtl_divmod_values_to_profile): Adjust.
|
||||
(insn_prefetch_values_to_profile, rtl_find_values_to_profile,
|
||||
tree_divmod_values_to_profile, tree_find_values_to_profile,
|
||||
value_profile_transformations): Likewise.
|
||||
* value-prof.h: Define VEC(histogram_value,heap).
|
||||
* varasm.c: Remove alias_pair pointer typedef, define
|
||||
VEC(alias_pair,gc).
|
||||
(finish_aliases_1, finish_aliases_2, assemble_alias): Adjust.
|
||||
|
||||
* config/pa/pa.c (typedef extern_symbol): Typedef the structure,
|
||||
not a pointer to it. Create an object vector.
|
||||
(extern_symbols): Turn into an object vector.
|
||||
(pa_hpux_asm_output_external, pa_hpux_file_end): Adjust.
|
||||
|
||||
2005-04-21 Sebastian Pop <pop@cri.ensmp.fr>
|
||||
|
||||
PR/20742
|
||||
|
|
|
@ -145,7 +145,8 @@ struct edge_def GTY(())
|
|||
};
|
||||
|
||||
typedef struct edge_def *edge;
|
||||
DEF_VEC_GC_P(edge);
|
||||
DEF_VEC_P(edge);
|
||||
DEF_VEC_ALLOC_P(edge,gc);
|
||||
|
||||
#define EDGE_FALLTHRU 1 /* 'Straight line' flow */
|
||||
#define EDGE_ABNORMAL 2 /* Strange flow, like computed
|
||||
|
@ -220,8 +221,8 @@ struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")
|
|||
tree stmt_list;
|
||||
|
||||
/* The edges into and out of the block. */
|
||||
VEC(edge) *preds;
|
||||
VEC(edge) *succs;
|
||||
VEC(edge,gc) *preds;
|
||||
VEC(edge,gc) *succs;
|
||||
|
||||
/* The registers that are live on entry to this block. */
|
||||
bitmap GTY ((skip (""))) global_live_at_start;
|
||||
|
@ -650,10 +651,10 @@ single_pred (basic_block bb)
|
|||
|
||||
typedef struct {
|
||||
unsigned index;
|
||||
VEC(edge) **container;
|
||||
VEC(edge,gc) **container;
|
||||
} edge_iterator;
|
||||
|
||||
static inline VEC(edge) *
|
||||
static inline VEC(edge,gc) *
|
||||
ei_container (edge_iterator i)
|
||||
{
|
||||
gcc_assert (i.container);
|
||||
|
@ -665,7 +666,7 @@ ei_container (edge_iterator i)
|
|||
|
||||
/* Return an iterator pointing to the start of an edge vector. */
|
||||
static inline edge_iterator
|
||||
ei_start_1 (VEC(edge) **ev)
|
||||
ei_start_1 (VEC(edge,gc) **ev)
|
||||
{
|
||||
edge_iterator i;
|
||||
|
||||
|
@ -678,7 +679,7 @@ ei_start_1 (VEC(edge) **ev)
|
|||
/* Return an iterator pointing to the last element of an edge
|
||||
vector. */
|
||||
static inline edge_iterator
|
||||
ei_last_1 (VEC(edge) **ev)
|
||||
ei_last_1 (VEC(edge,gc) **ev)
|
||||
{
|
||||
edge_iterator i;
|
||||
|
||||
|
|
|
@ -205,7 +205,7 @@ expunge_block (basic_block b)
|
|||
static inline void
|
||||
connect_src (edge e)
|
||||
{
|
||||
VEC_safe_push (edge, e->src->succs, e);
|
||||
VEC_safe_push (edge, gc, e->src->succs, e);
|
||||
}
|
||||
|
||||
/* Connect E to E->dest. */
|
||||
|
@ -214,7 +214,7 @@ static inline void
|
|||
connect_dest (edge e)
|
||||
{
|
||||
basic_block dest = e->dest;
|
||||
VEC_safe_push (edge, dest->preds, e);
|
||||
VEC_safe_push (edge, gc, dest->preds, e);
|
||||
e->dest_idx = EDGE_COUNT (dest->preds) - 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -1065,7 +1065,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
|
|||
|
||||
gcc_assert (found);
|
||||
|
||||
VEC_safe_push (edge, bb->succs, e);
|
||||
VEC_safe_push (edge, gc, bb->succs, e);
|
||||
make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9022,18 +9022,18 @@ pa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
|
|||
at the end of the file if and only if SYMBOL_REF_REFERENCED_P is true.
|
||||
This avoids putting out names that are never really used. */
|
||||
|
||||
struct extern_symbol GTY(())
|
||||
typedef struct extern_symbol GTY(())
|
||||
{
|
||||
tree decl;
|
||||
const char *name;
|
||||
};
|
||||
typedef struct extern_symbol *extern_symbol;
|
||||
} extern_symbol;
|
||||
|
||||
/* Define gc'd vector type for extern_symbol. */
|
||||
DEF_VEC_GC_P(extern_symbol);
|
||||
DEF_VEC_O(extern_symbol);
|
||||
DEF_VEC_ALLOC_O(extern_symbol,gc);
|
||||
|
||||
/* Vector of extern_symbol pointers. */
|
||||
static GTY(()) VEC(extern_symbol) *extern_symbols;
|
||||
static GTY(()) VEC(extern_symbol,gc) *extern_symbols;
|
||||
|
||||
#ifdef ASM_OUTPUT_EXTERNAL_REAL
|
||||
/* Mark DECL (name NAME) as an external reference (assembler output
|
||||
|
@ -9043,12 +9043,11 @@ static GTY(()) VEC(extern_symbol) *extern_symbols;
|
|||
void
|
||||
pa_hpux_asm_output_external (FILE *file, tree decl, const char *name)
|
||||
{
|
||||
extern_symbol p = ggc_alloc (sizeof (struct extern_symbol));
|
||||
extern_symbol * p = VEC_safe_push (extern_symbol, gc, extern_symbols, NULL);
|
||||
|
||||
gcc_assert (file == asm_out_file);
|
||||
p->decl = decl;
|
||||
p->name = name;
|
||||
VEC_safe_push (extern_symbol, extern_symbols, p);
|
||||
}
|
||||
|
||||
/* Output text required at the end of an assembler file.
|
||||
|
@ -9059,7 +9058,7 @@ static void
|
|||
pa_hpux_file_end (void)
|
||||
{
|
||||
unsigned int i;
|
||||
extern_symbol p;
|
||||
extern_symbol *p;
|
||||
|
||||
output_deferred_plabels ();
|
||||
|
||||
|
@ -9072,7 +9071,7 @@ pa_hpux_file_end (void)
|
|||
ASM_OUTPUT_EXTERNAL_REAL (asm_out_file, decl, p->name);
|
||||
}
|
||||
|
||||
extern_symbols = NULL;
|
||||
VEC_free (extern_symbol, gc, extern_symbols);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -1,3 +1,42 @@
|
|||
2005-04-21 Nathan Sidwell <nathan@codesourcery.com>
|
||||
|
||||
* cp-tree.h: Adjust for new VEC API.
|
||||
Define VEC(tree_pair_s,gc).
|
||||
(struct save_scope): Adjust.
|
||||
(struct lang_type_class): Adjust.
|
||||
(unemitted_tinfo_decls): Adjust.
|
||||
* class.c (add_method, resort_type_method_vec,
|
||||
finish_struct_methods, struct find_final_overrider_data,
|
||||
dfs_find_final_overrider_pre, find_final_overrider,
|
||||
get_vcall_index, warn_hidden, walk_subobject_offsets,
|
||||
check_methods, fixup_inline_methods, end_of_class,
|
||||
warn_about_ambiguous_bases, finish_struct, build_vtbl_initializer,
|
||||
add_vcall_offset): Adjust.
|
||||
* decl.c (xref_basetypes, finish_method): Adjust.
|
||||
* decl2.c (check_classfn): Adjust.
|
||||
* init.c (sort_mem_initializers, push_base_cleanups): Adjust.
|
||||
* method.c (do_build_copy_constructor): Adjust.
|
||||
* name-lookup.c (new_class_binding, store_binding,
|
||||
store_bindings, store_class_bindings): Adjust.
|
||||
* name-lookup.h: Define VEC(cxx_saved_binding,gc),
|
||||
VEC(cp_class_binding,gc).
|
||||
(struct cp_binding_level): Adjust.
|
||||
* parser.c: Define VEC(cp_token_position,heap).
|
||||
(struct cp_lexer): Adjust.
|
||||
(cp_lexer_new_main, cp_lexer_new_from_tokens, cp_lexer_destroy,
|
||||
cp_lexer_save_tokens): Adjust.
|
||||
* pt.c (retrieve_specialization,
|
||||
check_explicit_specialization): Adjust.
|
||||
* rtti.c (unemitted_tinfo_decls): Adjust.
|
||||
(init_rtti_processing, get_tinfo_decl, get_pseudo_ti_init,
|
||||
get_pseudo_ti_desc): Adjust.
|
||||
* search.c (dfs_access_in_type, lookup_conversion_operator,
|
||||
lookup_fnfields_1, dfs_walk_once, dfs_walk_once_accessible,
|
||||
dfs_get_pure_virtuals, lookup_conversions_r, binfo_for_vbase): Adjust.
|
||||
* semantics.c: Define VEC(deferred_access,gc).
|
||||
(push_deferring_access_checks): Adjust.
|
||||
* typeck2.c (abstract_virtuals_error): Adjust.
|
||||
|
||||
2005-04-20 Ian Lance Taylor <ian@airs.com>
|
||||
|
||||
* cp-tree.def: Add STMT_EXPR.
|
||||
|
|
|
@ -887,7 +887,7 @@ add_method (tree type, tree method)
|
|||
tree overload;
|
||||
bool template_conv_p = false;
|
||||
bool conv_p;
|
||||
VEC(tree) *method_vec;
|
||||
VEC(tree,gc) *method_vec;
|
||||
bool complete_p;
|
||||
bool insert_p = false;
|
||||
tree current_fns;
|
||||
|
@ -909,7 +909,7 @@ add_method (tree type, tree method)
|
|||
allocate at least two (for constructors and destructors), and
|
||||
we're going to end up with an assignment operator at some
|
||||
point as well. */
|
||||
method_vec = VEC_alloc (tree, 8);
|
||||
method_vec = VEC_alloc (tree, gc, 8);
|
||||
/* Create slots for constructors and destructors. */
|
||||
VEC_quick_push (tree, method_vec, NULL_TREE);
|
||||
VEC_quick_push (tree, method_vec, NULL_TREE);
|
||||
|
@ -1052,7 +1052,7 @@ add_method (tree type, tree method)
|
|||
{
|
||||
/* We only expect to add few methods in the COMPLETE_P case, so
|
||||
just make room for one more method in that case. */
|
||||
if (VEC_reserve (tree, method_vec, complete_p ? 1 : -1))
|
||||
if (VEC_reserve (tree, gc, method_vec, complete_p ? -1 : 1))
|
||||
CLASSTYPE_METHOD_VEC (type) = method_vec;
|
||||
if (slot == VEC_length (tree, method_vec))
|
||||
VEC_quick_push (tree, method_vec, overload);
|
||||
|
@ -1663,7 +1663,7 @@ resort_type_method_vec (void* obj,
|
|||
gt_pointer_operator new_value,
|
||||
void* cookie)
|
||||
{
|
||||
VEC(tree) *method_vec = (VEC(tree) *) obj;
|
||||
VEC(tree,gc) *method_vec = (VEC(tree,gc) *) obj;
|
||||
int len = VEC_length (tree, method_vec);
|
||||
size_t slot;
|
||||
tree fn;
|
||||
|
@ -1695,7 +1695,7 @@ static void
|
|||
finish_struct_methods (tree t)
|
||||
{
|
||||
tree fn_fields;
|
||||
VEC(tree) *method_vec;
|
||||
VEC(tree,gc) *method_vec;
|
||||
int slot, len;
|
||||
|
||||
method_vec = CLASSTYPE_METHOD_VEC (t);
|
||||
|
@ -1814,7 +1814,7 @@ typedef struct find_final_overrider_data_s {
|
|||
/* The candidate overriders. */
|
||||
tree candidates;
|
||||
/* Path to most derived. */
|
||||
VEC (tree) *path;
|
||||
VEC(tree,heap) *path;
|
||||
} find_final_overrider_data;
|
||||
|
||||
/* Add the overrider along the current path to FFOD->CANDIDATES.
|
||||
|
@ -1873,7 +1873,7 @@ dfs_find_final_overrider_pre (tree binfo, void *data)
|
|||
|
||||
if (binfo == ffod->declaring_base)
|
||||
dfs_find_final_overrider_1 (binfo, ffod, VEC_length (tree, ffod->path));
|
||||
VEC_safe_push (tree, ffod->path, binfo);
|
||||
VEC_safe_push (tree, heap, ffod->path, binfo);
|
||||
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
@ -1922,12 +1922,12 @@ find_final_overrider (tree derived, tree binfo, tree fn)
|
|||
ffod.fn = fn;
|
||||
ffod.declaring_base = binfo;
|
||||
ffod.candidates = NULL_TREE;
|
||||
ffod.path = VEC_alloc (tree, 30);
|
||||
ffod.path = VEC_alloc (tree, heap, 30);
|
||||
|
||||
dfs_walk_all (derived, dfs_find_final_overrider_pre,
|
||||
dfs_find_final_overrider_post, &ffod);
|
||||
|
||||
VEC_free (tree, ffod.path);
|
||||
VEC_free (tree, heap, ffod.path);
|
||||
|
||||
/* If there was no winner, issue an error message. */
|
||||
if (!ffod.candidates || TREE_CHAIN (ffod.candidates))
|
||||
|
@ -1946,7 +1946,7 @@ find_final_overrider (tree derived, tree binfo, tree fn)
|
|||
static tree
|
||||
get_vcall_index (tree fn, tree type)
|
||||
{
|
||||
VEC (tree_pair_s) *indices = CLASSTYPE_VCALL_INDICES (type);
|
||||
VEC(tree_pair_s,gc) *indices = CLASSTYPE_VCALL_INDICES (type);
|
||||
tree_pair_p p;
|
||||
unsigned ix;
|
||||
|
||||
|
@ -2350,7 +2350,7 @@ check_for_override (tree decl, tree ctype)
|
|||
void
|
||||
warn_hidden (tree t)
|
||||
{
|
||||
VEC(tree) *method_vec = CLASSTYPE_METHOD_VEC (t);
|
||||
VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (t);
|
||||
tree fns;
|
||||
size_t i;
|
||||
|
||||
|
@ -3200,7 +3200,7 @@ walk_subobject_offsets (tree type,
|
|||
if (abi_version_at_least (2) && CLASSTYPE_VBASECLASSES (type))
|
||||
{
|
||||
unsigned ix;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
|
||||
/* Iterate through the virtual base classes of TYPE. In G++
|
||||
3.2, we included virtual bases in the direct base class
|
||||
|
@ -3678,7 +3678,7 @@ check_methods (tree t)
|
|||
{
|
||||
TYPE_POLYMORPHIC_P (t) = 1;
|
||||
if (DECL_PURE_VIRTUAL_P (x))
|
||||
VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (t), x);
|
||||
VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
|
||||
}
|
||||
/* All user-declared destructors are non-trivial. */
|
||||
if (DECL_DESTRUCTOR_P (x))
|
||||
|
@ -4202,7 +4202,7 @@ static void
|
|||
fixup_inline_methods (tree type)
|
||||
{
|
||||
tree method = TYPE_METHODS (type);
|
||||
VEC (tree) *friends;
|
||||
VEC(tree,gc) *friends;
|
||||
unsigned ix;
|
||||
|
||||
if (method && TREE_CODE (method) == TREE_VEC)
|
||||
|
@ -4370,7 +4370,7 @@ static tree
|
|||
end_of_class (tree t, int include_virtuals_p)
|
||||
{
|
||||
tree result = size_zero_node;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
tree binfo;
|
||||
tree base_binfo;
|
||||
tree offset;
|
||||
|
@ -4417,7 +4417,7 @@ static void
|
|||
warn_about_ambiguous_bases (tree t)
|
||||
{
|
||||
int i;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
tree basetype;
|
||||
tree binfo;
|
||||
tree base_binfo;
|
||||
|
@ -5129,7 +5129,7 @@ finish_struct (tree t, tree attributes)
|
|||
CLASSTYPE_PURE_VIRTUALS (t) = NULL;
|
||||
for (x = TYPE_METHODS (t); x; x = TREE_CHAIN (x))
|
||||
if (DECL_PURE_VIRTUAL_P (x))
|
||||
VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (t), x);
|
||||
VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
|
||||
complete_vars (t);
|
||||
}
|
||||
else
|
||||
|
@ -7130,7 +7130,7 @@ build_vtbl_initializer (tree binfo,
|
|||
vtbl_init_data vid;
|
||||
unsigned ix;
|
||||
tree vbinfo;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
|
||||
/* Initialize VID. */
|
||||
memset (&vid, 0, sizeof (vid));
|
||||
|
@ -7598,7 +7598,7 @@ add_vcall_offset (tree orig_fn, tree binfo, vtbl_init_data *vid)
|
|||
offset. */
|
||||
if (vid->binfo == TYPE_BINFO (vid->derived))
|
||||
{
|
||||
tree_pair_p elt = VEC_safe_push (tree_pair_s,
|
||||
tree_pair_p elt = VEC_safe_push (tree_pair_s, gc,
|
||||
CLASSTYPE_VCALL_INDICES (vid->derived),
|
||||
NULL);
|
||||
elt->purpose = orig_fn;
|
||||
|
|
|
@ -656,7 +656,7 @@ extern GTY(()) tree cp_global_trees[CPTI_MAX];
|
|||
|
||||
struct saved_scope GTY(())
|
||||
{
|
||||
VEC(cxx_saved_binding) *old_bindings;
|
||||
VEC(cxx_saved_binding,gc) *old_bindings;
|
||||
tree old_namespace;
|
||||
tree decl_ns_list;
|
||||
tree class_name;
|
||||
|
@ -972,7 +972,8 @@ typedef struct tree_pair_s GTY (())
|
|||
tree value;
|
||||
} tree_pair_s;
|
||||
typedef tree_pair_s *tree_pair_p;
|
||||
DEF_VEC_GC_O (tree_pair_s);
|
||||
DEF_VEC_O (tree_pair_s);
|
||||
DEF_VEC_ALLOC_O (tree_pair_s,gc);
|
||||
|
||||
/* This is a few header flags for 'struct lang_type'. Actually,
|
||||
all but the first are used only for lang_type_class; they
|
||||
|
@ -1059,15 +1060,15 @@ struct lang_type_class GTY(())
|
|||
unsigned dummy : 12;
|
||||
|
||||
tree primary_base;
|
||||
VEC (tree_pair_s) *vcall_indices;
|
||||
VEC(tree_pair_s,gc) *vcall_indices;
|
||||
tree vtables;
|
||||
tree typeinfo_var;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
binding_table nested_udts;
|
||||
tree as_base;
|
||||
VEC (tree) *pure_virtuals;
|
||||
VEC(tree,gc) *pure_virtuals;
|
||||
tree friend_classes;
|
||||
VEC (tree) * GTY((reorder ("resort_type_method_vec"))) methods;
|
||||
VEC(tree,gc) * GTY((reorder ("resort_type_method_vec"))) methods;
|
||||
tree key_method;
|
||||
tree decl_list;
|
||||
tree template_info;
|
||||
|
@ -4022,7 +4023,7 @@ extern void finish_repo (void);
|
|||
|
||||
/* in rtti.c */
|
||||
/* A vector of all tinfo decls that haven't been emitted yet. */
|
||||
extern GTY(()) VEC(tree) *unemitted_tinfo_decls;
|
||||
extern GTY(()) VEC(tree,gc) *unemitted_tinfo_decls;
|
||||
|
||||
extern void init_rtti_processing (void);
|
||||
extern tree build_typeid (tree);
|
||||
|
|
|
@ -9340,7 +9340,7 @@ xref_basetypes (tree ref, tree base_list)
|
|||
|
||||
if (max_bases)
|
||||
{
|
||||
BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, max_bases);
|
||||
BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, gc, max_bases);
|
||||
/* An aggregate cannot have baseclasses. */
|
||||
CLASSTYPE_NON_AGGREGATE (ref) = 1;
|
||||
|
||||
|
@ -9356,7 +9356,7 @@ xref_basetypes (tree ref, tree base_list)
|
|||
|
||||
if (max_vbases)
|
||||
{
|
||||
CLASSTYPE_VBASECLASSES (ref) = VEC_alloc (tree, max_vbases);
|
||||
CLASSTYPE_VBASECLASSES (ref) = VEC_alloc (tree, gc, max_vbases);
|
||||
|
||||
if (TYPE_FOR_JAVA (ref))
|
||||
error ("Java class %qT cannot have virtual bases", ref);
|
||||
|
@ -10866,7 +10866,7 @@ finish_method (tree decl)
|
|||
for String.cc in libg++. */
|
||||
if (DECL_FRIEND_P (fndecl))
|
||||
{
|
||||
VEC_safe_push (tree, CLASSTYPE_INLINE_FRIENDS (current_class_type),
|
||||
VEC_safe_push (tree, gc, CLASSTYPE_INLINE_FRIENDS (current_class_type),
|
||||
fndecl);
|
||||
decl = void_type_node;
|
||||
}
|
||||
|
|
|
@ -622,7 +622,7 @@ check_classfn (tree ctype, tree function, tree template_parms)
|
|||
ix = class_method_index_for_fn (complete_type (ctype), function);
|
||||
if (ix >= 0)
|
||||
{
|
||||
VEC(tree) *methods = CLASSTYPE_METHOD_VEC (ctype);
|
||||
VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (ctype);
|
||||
tree fndecls, fndecl = 0;
|
||||
bool is_conv_op;
|
||||
tree pushed_scope;
|
||||
|
|
|
@ -465,7 +465,7 @@ sort_mem_initializers (tree t, tree mem_inits)
|
|||
tree base, binfo, base_binfo;
|
||||
tree sorted_inits;
|
||||
tree next_subobject;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
int i;
|
||||
int uses_unions_p;
|
||||
|
||||
|
@ -2867,7 +2867,7 @@ push_base_cleanups (void)
|
|||
int i;
|
||||
tree member;
|
||||
tree expr;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
|
||||
/* Run destructors for all virtual baseclasses. */
|
||||
if (CLASSTYPE_VBASECLASSES (current_class_type))
|
||||
|
|
|
@ -537,7 +537,7 @@ do_build_copy_constructor (tree fndecl)
|
|||
int cvquals = cp_type_quals (TREE_TYPE (parm));
|
||||
int i;
|
||||
tree binfo, base_binfo;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
|
||||
/* Initialize all the base-classes with the parameter converted
|
||||
to their type so that we get their copy constructor and not
|
||||
|
|
|
@ -306,7 +306,7 @@ new_class_binding (tree name, tree value, tree type, cxx_scope *scope)
|
|||
{
|
||||
cp_class_binding *old_base;
|
||||
old_base = VEC_index (cp_class_binding, scope->class_shadowed, 0);
|
||||
if (VEC_reserve (cp_class_binding, scope->class_shadowed, -1))
|
||||
if (VEC_reserve (cp_class_binding, gc, scope->class_shadowed, 1))
|
||||
{
|
||||
/* Fixup the current bindings, as they might have moved. */
|
||||
size_t i;
|
||||
|
@ -325,7 +325,7 @@ new_class_binding (tree name, tree value, tree type, cxx_scope *scope)
|
|||
cb = VEC_quick_push (cp_class_binding, scope->class_shadowed, NULL);
|
||||
}
|
||||
else
|
||||
cb = VEC_safe_push (cp_class_binding, scope->class_shadowed, NULL);
|
||||
cb = VEC_safe_push (cp_class_binding, gc, scope->class_shadowed, NULL);
|
||||
|
||||
cb->identifier = name;
|
||||
binding = &cb->base;
|
||||
|
@ -4741,7 +4741,7 @@ struct saved_scope *scope_chain;
|
|||
*OLD_BINDINGS. */
|
||||
|
||||
static void
|
||||
store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
|
||||
store_binding (tree id, VEC(cxx_saved_binding,gc) **old_bindings)
|
||||
{
|
||||
cxx_saved_binding *saved;
|
||||
|
||||
|
@ -4753,7 +4753,7 @@ store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
|
|||
|
||||
IDENTIFIER_MARKED (id) = 1;
|
||||
|
||||
saved = VEC_safe_push (cxx_saved_binding, *old_bindings, NULL);
|
||||
saved = VEC_safe_push (cxx_saved_binding, gc, *old_bindings, NULL);
|
||||
saved->identifier = id;
|
||||
saved->binding = IDENTIFIER_BINDING (id);
|
||||
saved->real_type_value = REAL_IDENTIFIER_TYPE_VALUE (id);
|
||||
|
@ -4761,7 +4761,7 @@ store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
|
|||
}
|
||||
|
||||
static void
|
||||
store_bindings (tree names, VEC(cxx_saved_binding) **old_bindings)
|
||||
store_bindings (tree names, VEC(cxx_saved_binding,gc) **old_bindings)
|
||||
{
|
||||
tree t;
|
||||
|
||||
|
@ -4784,8 +4784,8 @@ store_bindings (tree names, VEC(cxx_saved_binding) **old_bindings)
|
|||
objects, rather than a TREE_LIST. */
|
||||
|
||||
static void
|
||||
store_class_bindings (VEC(cp_class_binding) *names,
|
||||
VEC(cxx_saved_binding) **old_bindings)
|
||||
store_class_bindings (VEC(cp_class_binding,gc) *names,
|
||||
VEC(cxx_saved_binding,gc) **old_bindings)
|
||||
{
|
||||
size_t i;
|
||||
cp_class_binding *cb;
|
||||
|
|
|
@ -89,7 +89,8 @@ typedef struct cxx_saved_binding GTY(())
|
|||
tree real_type_value;
|
||||
} cxx_saved_binding;
|
||||
|
||||
DEF_VEC_GC_O(cxx_saved_binding);
|
||||
DEF_VEC_O(cxx_saved_binding);
|
||||
DEF_VEC_ALLOC_O(cxx_saved_binding,gc);
|
||||
|
||||
extern tree identifier_type_value (tree);
|
||||
extern void set_identifier_type_value (tree, tree);
|
||||
|
@ -145,7 +146,8 @@ typedef struct cp_class_binding GTY(())
|
|||
tree identifier;
|
||||
} cp_class_binding;
|
||||
|
||||
DEF_VEC_GC_O(cp_class_binding);
|
||||
DEF_VEC_O(cp_class_binding);
|
||||
DEF_VEC_ALLOC_O(cp_class_binding,gc);
|
||||
|
||||
/* For each binding contour we allocate a binding_level structure
|
||||
which records the names defined in that contour.
|
||||
|
@ -200,7 +202,7 @@ struct cp_binding_level GTY(())
|
|||
|
||||
/* For the binding level corresponding to a class, the entities
|
||||
declared in the class or its base classes. */
|
||||
VEC(cp_class_binding) *class_shadowed;
|
||||
VEC(cp_class_binding,gc) *class_shadowed;
|
||||
|
||||
/* Similar to class_shadowed, but for IDENTIFIER_TYPE_VALUE, and
|
||||
is used for all binding levels. The TREE_PURPOSE is the name of
|
||||
|
|
|
@ -66,7 +66,8 @@ typedef struct cp_token GTY (())
|
|||
|
||||
/* We use a stack of token pointer for saving token sets. */
|
||||
typedef struct cp_token *cp_token_position;
|
||||
DEF_VEC_MALLOC_P (cp_token_position);
|
||||
DEF_VEC_P (cp_token_position);
|
||||
DEF_VEC_ALLOC_P (cp_token_position,heap);
|
||||
|
||||
static const cp_token eof_token =
|
||||
{
|
||||
|
@ -104,7 +105,7 @@ typedef struct cp_lexer GTY (())
|
|||
called. The top entry is the most recent position at which we
|
||||
began saving tokens. If the stack is non-empty, we are saving
|
||||
tokens. */
|
||||
VEC (cp_token_position) *GTY ((skip)) saved_tokens;
|
||||
VEC(cp_token_position,heap) *GTY ((skip)) saved_tokens;
|
||||
|
||||
/* True if we should output debugging information. */
|
||||
bool debugging_p;
|
||||
|
@ -260,7 +261,8 @@ cp_lexer_new_main (void)
|
|||
/* Initially we are not debugging. */
|
||||
lexer->debugging_p = false;
|
||||
#endif /* ENABLE_CHECKING */
|
||||
lexer->saved_tokens = VEC_alloc (cp_token_position, CP_SAVED_TOKEN_STACK);
|
||||
lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
|
||||
CP_SAVED_TOKEN_STACK);
|
||||
|
||||
/* Create the buffer. */
|
||||
alloc = CP_LEXER_BUFFER_SIZE;
|
||||
|
@ -314,7 +316,8 @@ cp_lexer_new_from_tokens (cp_token_cache *cache)
|
|||
lexer->next_token = first == last ? (cp_token *)&eof_token : first;
|
||||
lexer->last_token = last;
|
||||
|
||||
lexer->saved_tokens = VEC_alloc (cp_token_position, CP_SAVED_TOKEN_STACK);
|
||||
lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
|
||||
CP_SAVED_TOKEN_STACK);
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* Initially we are not debugging. */
|
||||
|
@ -332,7 +335,7 @@ cp_lexer_destroy (cp_lexer *lexer)
|
|||
{
|
||||
if (lexer->buffer)
|
||||
ggc_free (lexer->buffer);
|
||||
VEC_free (cp_token_position, lexer->saved_tokens);
|
||||
VEC_free (cp_token_position, heap, lexer->saved_tokens);
|
||||
ggc_free (lexer);
|
||||
}
|
||||
|
||||
|
@ -621,7 +624,8 @@ cp_lexer_save_tokens (cp_lexer* lexer)
|
|||
if (cp_lexer_debugging_p (lexer))
|
||||
fprintf (cp_lexer_debug_stream, "cp_lexer: saving tokens\n");
|
||||
|
||||
VEC_safe_push (cp_token_position, lexer->saved_tokens, lexer->next_token);
|
||||
VEC_safe_push (cp_token_position, heap,
|
||||
lexer->saved_tokens, lexer->next_token);
|
||||
}
|
||||
|
||||
/* Commit to the portion of the token stream most recently saved. */
|
||||
|
|
|
@ -822,7 +822,7 @@ retrieve_specialization (tree tmpl, tree args,
|
|||
{
|
||||
tree class_template;
|
||||
tree class_specialization;
|
||||
VEC(tree) *methods;
|
||||
VEC(tree,gc) *methods;
|
||||
tree fns;
|
||||
int idx;
|
||||
|
||||
|
@ -1967,7 +1967,7 @@ check_explicit_specialization (tree declarator,
|
|||
}
|
||||
else
|
||||
{
|
||||
VEC(tree) *methods;
|
||||
VEC(tree,gc) *methods;
|
||||
tree ovl;
|
||||
|
||||
/* For a type-conversion operator, we cannot do a
|
||||
|
|
|
@ -75,7 +75,7 @@ Boston, MA 02111-1307, USA. */
|
|||
#define TINFO_REAL_NAME(NODE) TREE_PURPOSE (NODE)
|
||||
|
||||
/* A vector of all tinfo decls that haven't yet been emitted. */
|
||||
VEC (tree) *unemitted_tinfo_decls;
|
||||
VEC(tree,gc) *unemitted_tinfo_decls;
|
||||
|
||||
static tree build_headof (tree);
|
||||
static tree ifnonnull (tree, tree);
|
||||
|
@ -119,7 +119,7 @@ init_rtti_processing (void)
|
|||
= build_qualified_type (type_info_type, TYPE_QUAL_CONST);
|
||||
type_info_ptr_type = build_pointer_type (const_type_info_type_node);
|
||||
|
||||
unemitted_tinfo_decls = VEC_alloc (tree, 124);
|
||||
unemitted_tinfo_decls = VEC_alloc (tree, gc, 124);
|
||||
|
||||
create_tinfo_types ();
|
||||
}
|
||||
|
@ -364,7 +364,7 @@ get_tinfo_decl (tree type)
|
|||
pushdecl_top_level_and_finish (d, NULL_TREE);
|
||||
|
||||
/* Add decl to the global array of tinfo decls. */
|
||||
VEC_safe_push (tree, unemitted_tinfo_decls, d);
|
||||
VEC_safe_push (tree, gc, unemitted_tinfo_decls, d);
|
||||
}
|
||||
|
||||
return d;
|
||||
|
@ -1003,7 +1003,7 @@ get_pseudo_ti_init (tree type, tree var_desc)
|
|||
| (CLASSTYPE_DIAMOND_SHAPED_P (type) << 1));
|
||||
tree binfo = TYPE_BINFO (type);
|
||||
int nbases = BINFO_N_BASE_BINFOS (binfo);
|
||||
VEC (tree) *base_accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
tree base_inits = NULL_TREE;
|
||||
int ix;
|
||||
|
||||
|
@ -1152,7 +1152,7 @@ get_pseudo_ti_desc (tree type)
|
|||
else
|
||||
{
|
||||
tree binfo = TYPE_BINFO (type);
|
||||
VEC (tree) *base_accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
|
||||
int num_bases = BINFO_N_BASE_BINFOS (binfo);
|
||||
|
||||
|
|
|
@ -641,7 +641,7 @@ dfs_access_in_type (tree binfo, void *data)
|
|||
{
|
||||
int i;
|
||||
tree base_binfo;
|
||||
VEC (tree) *accesses;
|
||||
VEC(tree,gc) *accesses;
|
||||
|
||||
/* Otherwise, scan our baseclasses, and pick the most favorable
|
||||
access. */
|
||||
|
@ -1314,7 +1314,7 @@ lookup_conversion_operator (tree class_type, tree type)
|
|||
{
|
||||
int i;
|
||||
tree fn;
|
||||
VEC(tree) *methods = CLASSTYPE_METHOD_VEC (class_type);
|
||||
VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (class_type);
|
||||
|
||||
for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
|
||||
VEC_iterate (tree, methods, i, fn); ++i)
|
||||
|
@ -1345,7 +1345,7 @@ lookup_conversion_operator (tree class_type, tree type)
|
|||
int
|
||||
lookup_fnfields_1 (tree type, tree name)
|
||||
{
|
||||
VEC(tree) *method_vec;
|
||||
VEC(tree,gc) *method_vec;
|
||||
tree fn;
|
||||
tree tmp;
|
||||
size_t i;
|
||||
|
@ -1658,7 +1658,7 @@ dfs_walk_once (tree binfo, tree (*pre_fn) (tree, void *),
|
|||
/* We are at the top of the hierarchy, and can use the
|
||||
CLASSTYPE_VBASECLASSES list for unmarking the virtual
|
||||
bases. */
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
unsigned ix;
|
||||
tree base_binfo;
|
||||
|
||||
|
@ -1766,7 +1766,7 @@ dfs_walk_once_accessible (tree binfo, bool friends_p,
|
|||
/* We are at the top of the hierarchy, and can use the
|
||||
CLASSTYPE_VBASECLASSES list for unmarking the virtual
|
||||
bases. */
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
unsigned ix;
|
||||
tree base_binfo;
|
||||
|
||||
|
@ -2000,7 +2000,7 @@ dfs_get_pure_virtuals (tree binfo, void *data)
|
|||
virtuals;
|
||||
virtuals = TREE_CHAIN (virtuals))
|
||||
if (DECL_PURE_VIRTUAL_P (BV_FN (virtuals)))
|
||||
VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (type),
|
||||
VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (type),
|
||||
BV_FN (virtuals));
|
||||
}
|
||||
|
||||
|
@ -2270,7 +2270,7 @@ lookup_conversions_r (tree binfo,
|
|||
tree child_tpl_convs = NULL_TREE;
|
||||
unsigned i;
|
||||
tree base_binfo;
|
||||
VEC(tree) *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
|
||||
VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
|
||||
tree conv;
|
||||
|
||||
/* If we have no conversion operators, then don't look. */
|
||||
|
@ -2523,7 +2523,7 @@ binfo_for_vbase (tree base, tree t)
|
|||
{
|
||||
unsigned ix;
|
||||
tree binfo;
|
||||
VEC (tree) *vbases;
|
||||
VEC(tree,gc) *vbases;
|
||||
|
||||
for (vbases = CLASSTYPE_VBASECLASSES (t), ix = 0;
|
||||
VEC_iterate (tree, vbases, ix, binfo); ix++)
|
||||
|
|
|
@ -139,10 +139,11 @@ typedef struct deferred_access GTY(())
|
|||
enum deferring_kind deferring_access_checks_kind;
|
||||
|
||||
} deferred_access;
|
||||
DEF_VEC_GC_O (deferred_access);
|
||||
DEF_VEC_O (deferred_access);
|
||||
DEF_VEC_ALLOC_O (deferred_access,gc);
|
||||
|
||||
/* Data for deferred access checking. */
|
||||
static GTY(()) VEC (deferred_access) *deferred_access_stack;
|
||||
static GTY(()) VEC(deferred_access,gc) *deferred_access_stack;
|
||||
static GTY(()) unsigned deferred_access_no_check;
|
||||
|
||||
/* Save the current deferred access states and start deferred
|
||||
|
@ -159,7 +160,7 @@ push_deferring_access_checks (deferring_kind deferring)
|
|||
{
|
||||
deferred_access *ptr;
|
||||
|
||||
ptr = VEC_safe_push (deferred_access, deferred_access_stack, NULL);
|
||||
ptr = VEC_safe_push (deferred_access, gc, deferred_access_stack, NULL);
|
||||
ptr->deferred_access_checks = NULL_TREE;
|
||||
ptr->deferring_access_checks_kind = deferring;
|
||||
}
|
||||
|
|
|
@ -235,7 +235,7 @@ complete_type_check_abstract (tree type)
|
|||
int
|
||||
abstract_virtuals_error (tree decl, tree type)
|
||||
{
|
||||
VEC (tree) *pure;
|
||||
VEC(tree,gc) *pure;
|
||||
|
||||
/* This function applies only to classes. Any other entity can never
|
||||
be abstract. */
|
||||
|
|
|
@ -2063,7 +2063,7 @@ dbxout_type (tree type, int full)
|
|||
{
|
||||
int i;
|
||||
tree child;
|
||||
VEC (tree) *accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
|
||||
if (use_gnu_debug_info_extensions)
|
||||
{
|
||||
|
|
|
@ -12004,7 +12004,7 @@ gen_member_die (tree type, dw_die_ref context_die)
|
|||
/* First output info about the base classes. */
|
||||
if (binfo)
|
||||
{
|
||||
VEC (tree) *accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
|
||||
int i;
|
||||
tree base;
|
||||
|
||||
|
|
|
@ -33,8 +33,10 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|||
#define YY_INPUT(BUF,RESULT,SIZE) ((RESULT) = macro_input (BUF,SIZE))
|
||||
|
||||
static unsigned macro_input (char *buffer, unsigned);
|
||||
static void push_macro_expansion (const char *, unsigned,
|
||||
const char *, unsigned);
|
||||
static const char *push_macro_expansion (const char *, unsigned,
|
||||
const char *, unsigned);
|
||||
static char *mangle_macro_name (const char *, unsigned,
|
||||
const char *, unsigned);
|
||||
static void update_lineno (const char *l, size_t len);
|
||||
|
||||
struct fileloc lexer_line;
|
||||
|
@ -237,33 +239,40 @@ ITYPE {IWORD}({WS}{IWORD})*
|
|||
return ENT_YACCUNION;
|
||||
}
|
||||
|
||||
^"DEF_VEC_"[[:alnum:]_]*{WS}?"("{WS}?{ID}{WS}?")" {
|
||||
^"DEF_VEC_"[[:alnum:]_]*{WS}?"("{WS}?{ID}{WS}?(","{WS}?{ID}{WS}?)*")" {
|
||||
char *macro, *arg;
|
||||
unsigned macro_len, arg_len;
|
||||
char *ptr = yytext;
|
||||
const char *additional;
|
||||
type_p t;
|
||||
|
||||
/* Locate the macro and argument strings. */
|
||||
macro = ptr;
|
||||
while (*ptr != '(' && !ISSPACE (*ptr))
|
||||
ptr++;
|
||||
macro_len = ptr - macro;
|
||||
while (*ptr == '(' || ISSPACE (*ptr))
|
||||
ptr++;
|
||||
arg = ptr;
|
||||
while (*ptr != ')' && !ISSPACE (*ptr))
|
||||
ptr++;
|
||||
/* Find the macro name. */
|
||||
for (macro = ptr; *ptr != '(' && !ISSPACE (*ptr); ptr++)
|
||||
continue;
|
||||
for (macro_len = ptr - macro; !(ISALNUM (*ptr) || *ptr == '_'); ptr++)
|
||||
continue;
|
||||
|
||||
/* Find the argument(s). */
|
||||
for (arg = ptr; *ptr != ')'; ptr++)
|
||||
continue;
|
||||
arg_len = ptr - arg;
|
||||
|
||||
/* Push the macro for later expansion. */
|
||||
push_macro_expansion (macro, macro_len, arg, arg_len);
|
||||
|
||||
/* Create the struct and typedef. */
|
||||
ptr = (char *) xmemdup ("VEC_", 4, 4 + arg_len + 1);
|
||||
memcpy (&ptr[4], arg, arg_len);
|
||||
ptr[4 + arg_len] = 0;
|
||||
ptr = mangle_macro_name ("VEC", 3, arg, arg_len);
|
||||
|
||||
t = find_structure (ptr, 0);
|
||||
do_typedef (ptr, t, &lexer_line);
|
||||
|
||||
/* Push the macro for later expansion. */
|
||||
additional = push_macro_expansion (macro, macro_len, arg, arg_len);
|
||||
|
||||
if (additional)
|
||||
{
|
||||
ptr = mangle_macro_name (ptr, strlen (ptr),
|
||||
additional, strlen (additional));
|
||||
t = find_structure (ptr, 0);
|
||||
do_typedef (ptr, t, &lexer_line);
|
||||
}
|
||||
}
|
||||
|
||||
<in_struct>{
|
||||
|
@ -299,24 +308,23 @@ ITYPE {IWORD}({WS}{IWORD})*
|
|||
return SCALAR;
|
||||
}
|
||||
|
||||
"VEC"{WS}?"("{WS}?{ID}{WS}?")" {
|
||||
"VEC"{WS}?"("{WS}?{ID}{WS}?(","{WS}?{ID}{WS}?)*")" {
|
||||
char *macro, *arg;
|
||||
unsigned macro_len, arg_len;
|
||||
char *ptr = yytext;
|
||||
|
||||
macro = ptr;
|
||||
while (*ptr != '(' && !ISSPACE (*ptr)) /* )*/
|
||||
ptr++;
|
||||
macro_len = ptr - macro;
|
||||
while (*ptr == '(' || ISSPACE (*ptr))
|
||||
ptr++;
|
||||
arg = ptr;
|
||||
while (*ptr != ')' && !ISSPACE (*ptr))
|
||||
ptr++;
|
||||
/* Find the macro name */
|
||||
for (macro = ptr; *ptr != '(' && !ISSPACE (*ptr); ptr++)
|
||||
continue;
|
||||
for (macro_len = ptr - macro; !(ISALNUM(*ptr) || *ptr == '_'); ptr++)
|
||||
continue;
|
||||
|
||||
/* Find the arguments. */
|
||||
for (arg = ptr; *ptr != ')'; ptr++)
|
||||
continue;
|
||||
arg_len = ptr - arg;
|
||||
ptr = (char *) xmemdup (macro, macro_len, macro_len + arg_len + 2);
|
||||
ptr[macro_len] = '_';
|
||||
memcpy (&ptr[macro_len+1], arg, arg_len);
|
||||
|
||||
ptr = mangle_macro_name (macro, macro_len, arg, arg_len);
|
||||
yylval.s = ptr;
|
||||
return ID;
|
||||
}
|
||||
|
@ -411,14 +419,43 @@ ITYPE {IWORD}({WS}{IWORD})*
|
|||
|
||||
/* Deal with the expansion caused by the DEF_VEC_x macros. */
|
||||
|
||||
typedef struct macro
|
||||
/* Mangle a macro and argument list as done by cpp concatenation in
|
||||
the compiler proper. */
|
||||
static char *
|
||||
mangle_macro_name (const char *macro, unsigned macro_len,
|
||||
const char *arg, unsigned arg_len)
|
||||
{
|
||||
char *ptr = (char *) xmemdup (macro, macro_len, macro_len + arg_len + 2);
|
||||
|
||||
/* Now copy and concatenate each argument */
|
||||
while (arg_len)
|
||||
{
|
||||
ptr[macro_len++] = '_';
|
||||
for (; arg_len && (ISALNUM(*arg) || *arg == '_'); arg_len--)
|
||||
ptr[macro_len++] = *arg++;
|
||||
for (; arg_len && !(ISALNUM(*arg) || *arg == '_'); arg_len--)
|
||||
arg++;
|
||||
}
|
||||
ptr[macro_len] = 0;
|
||||
|
||||
return ptr;
|
||||
}
|
||||
|
||||
typedef struct macro_def
|
||||
{
|
||||
const char *name;
|
||||
const char *expansion;
|
||||
const char *additional;
|
||||
} macro_def_t;
|
||||
|
||||
typedef struct macro
|
||||
{
|
||||
const macro_def_t *def;
|
||||
struct macro *next;
|
||||
const char *args[10];
|
||||
} macro_t;
|
||||
|
||||
static const macro_t macro_defs[] =
|
||||
static const macro_def_t macro_defs[] =
|
||||
{
|
||||
#define IN_GENGTYPE 1
|
||||
#include "vec.h"
|
||||
|
@ -427,11 +464,12 @@ static const macro_t macro_defs[] =
|
|||
|
||||
/* Chain of macro expansions to do at end of scanning. */
|
||||
static macro_t *macro_expns;
|
||||
static macro_t *macro_expns_end;
|
||||
|
||||
/* Push macro NAME (NAME_LEN) with argument ARG (ARG_LEN) onto the
|
||||
expansion queue. We ensure NAME is known at this point. */
|
||||
|
||||
static void
|
||||
static const char *
|
||||
push_macro_expansion (const char *name, unsigned name_len,
|
||||
const char *arg, unsigned arg_len)
|
||||
{
|
||||
|
@ -442,15 +480,51 @@ push_macro_expansion (const char *name, unsigned name_len,
|
|||
&& !memcmp (name, macro_defs[ix].name, name_len))
|
||||
{
|
||||
macro_t *expansion = XNEW (macro_t);
|
||||
char *args;
|
||||
unsigned argno, last_arg;
|
||||
|
||||
expansion->next = macro_expns;
|
||||
expansion->name = (char *) xmemdup (arg, arg_len, arg_len+1);
|
||||
expansion->expansion = macro_defs[ix].expansion;
|
||||
macro_expns = expansion;
|
||||
return;
|
||||
expansion->def = ¯o_defs[ix];
|
||||
expansion->next = NULL;
|
||||
args = (char *) xmemdup (arg, arg_len, arg_len+1);
|
||||
args[arg_len] = 0;
|
||||
for (argno = 0; *args;)
|
||||
{
|
||||
expansion->args[argno++] = args;
|
||||
while (*args && (ISALNUM (*args) || *args == '_'))
|
||||
args++;
|
||||
if (argno == 1)
|
||||
expansion->args[argno++] = "base";
|
||||
if (!*args)
|
||||
break;
|
||||
*args++ = 0;
|
||||
while (*args && !(ISALNUM (*args) || *args == '_'))
|
||||
args++;
|
||||
}
|
||||
last_arg = argno;
|
||||
for (; argno != 10; argno++)
|
||||
expansion->args[argno] = NULL;
|
||||
if (macro_expns_end)
|
||||
macro_expns_end->next = expansion;
|
||||
else
|
||||
macro_expns = expansion;
|
||||
macro_expns_end = expansion;
|
||||
if (macro_defs[ix].additional)
|
||||
{
|
||||
macro_t *expn2 = XNEW (macro_t);
|
||||
memcpy (expn2, expansion, sizeof (*expn2));
|
||||
expansion = expn2;
|
||||
expansion->def += 1;
|
||||
expansion->args[last_arg++] = macro_defs[ix].additional;
|
||||
macro_expns_end->next = expansion;
|
||||
macro_expns_end = expansion;
|
||||
}
|
||||
if (last_arg > 2 && strcmp (expansion->args[last_arg - 1], "heap"))
|
||||
expansion->args[last_arg++] = "GTY (())";
|
||||
return macro_defs[ix].additional;
|
||||
}
|
||||
error_at_line (&lexer_line, "unrecognized macro `%.*s(%.*s)'",
|
||||
name_len, name, arg_len, arg);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Attempt to read some input. Use fread until we're at the end of
|
||||
|
@ -472,15 +546,30 @@ macro_input (char *buffer, unsigned size)
|
|||
const char *expn;
|
||||
unsigned len;
|
||||
|
||||
for (expn = macro_expns->expansion; *expn; expn++)
|
||||
for (expn = macro_expns->def->expansion; *expn; expn++)
|
||||
{
|
||||
if (*expn == '#')
|
||||
{
|
||||
int argno;
|
||||
|
||||
argno = expn[1] - '0';
|
||||
expn += 1;
|
||||
|
||||
/* Remove inserted space? */
|
||||
if (buffer[result-1] == ' ' && buffer[result-2] == '_')
|
||||
result--;
|
||||
len = strlen (macro_expns->name);
|
||||
memcpy (&buffer[result], macro_expns->name, len);
|
||||
result += len;
|
||||
|
||||
/* Insert the argument value */
|
||||
if (macro_expns->args[argno])
|
||||
{
|
||||
len = strlen (macro_expns->args[argno]);
|
||||
memcpy (&buffer[result], macro_expns->args[argno], len);
|
||||
result += len;
|
||||
}
|
||||
|
||||
/* Skip next space? */
|
||||
if (expn[1] == ' ' && expn[2] == '_')
|
||||
expn++;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -492,6 +581,8 @@ macro_input (char *buffer, unsigned size)
|
|||
if (result > size)
|
||||
YY_FATAL_ERROR ("buffer too small to expand macro");
|
||||
macro_expns = macro_expns->next;
|
||||
if (!macro_expns)
|
||||
macro_expns_end = NULL;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -115,12 +115,16 @@
|
|||
Fourier-Motzkin elimination is used to compute the bounds of the base space
|
||||
of the lattice. */
|
||||
|
||||
/* FIXME: I'm sure the vectors used here could be heap allocated.
|
||||
There certainly should be explicit VEC_frees, either way. (nathan
|
||||
2005/04/14) */
|
||||
|
||||
DEF_VEC_GC_P(int);
|
||||
DEF_VEC_P(int);
|
||||
DEF_VEC_ALLOC_P(int,gc);
|
||||
|
||||
static bool perfect_nestify (struct loops *,
|
||||
struct loop *, VEC (tree) *,
|
||||
VEC (tree) *, VEC (int) *, VEC (tree) *);
|
||||
struct loop *, VEC(tree,gc) *,
|
||||
VEC(tree,gc) *, VEC(int,gc) *, VEC(tree,gc) *);
|
||||
/* Lattice stuff that is internal to the code generation algorithm. */
|
||||
|
||||
typedef struct
|
||||
|
@ -1152,8 +1156,8 @@ lambda_loopnest_transform (lambda_loopnest nest, lambda_trans_matrix trans)
|
|||
|
||||
static lambda_linear_expression
|
||||
gcc_tree_to_linear_expression (int depth, tree expr,
|
||||
VEC(tree) *outerinductionvars,
|
||||
VEC(tree) *invariants, int extra)
|
||||
VEC(tree,gc) *outerinductionvars,
|
||||
VEC(tree,gc) *invariants, int extra)
|
||||
{
|
||||
lambda_linear_expression lle = NULL;
|
||||
switch (TREE_CODE (expr))
|
||||
|
@ -1248,12 +1252,12 @@ invariant_in_loop_and_outer_loops (struct loop *loop, tree op)
|
|||
|
||||
static lambda_loop
|
||||
gcc_loop_to_lambda_loop (struct loop *loop, int depth,
|
||||
VEC (tree) ** invariants,
|
||||
VEC(tree,gc) ** invariants,
|
||||
tree * ourinductionvar,
|
||||
VEC (tree) * outerinductionvars,
|
||||
VEC (tree) ** lboundvars,
|
||||
VEC (tree) ** uboundvars,
|
||||
VEC (int) ** steps)
|
||||
VEC(tree,gc) * outerinductionvars,
|
||||
VEC(tree,gc) ** lboundvars,
|
||||
VEC(tree,gc) ** uboundvars,
|
||||
VEC(int,gc) ** steps)
|
||||
{
|
||||
tree phi;
|
||||
tree exit_cond;
|
||||
|
@ -1403,10 +1407,10 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
|
|||
/* One part of the test may be a loop invariant tree. */
|
||||
if (TREE_CODE (TREE_OPERAND (test, 1)) == SSA_NAME
|
||||
&& invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 1)))
|
||||
VEC_safe_push (tree, *invariants, TREE_OPERAND (test, 1));
|
||||
VEC_safe_push (tree, gc, *invariants, TREE_OPERAND (test, 1));
|
||||
else if (TREE_CODE (TREE_OPERAND (test, 0)) == SSA_NAME
|
||||
&& invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 0)))
|
||||
VEC_safe_push (tree, *invariants, TREE_OPERAND (test, 0));
|
||||
VEC_safe_push (tree, gc, *invariants, TREE_OPERAND (test, 0));
|
||||
|
||||
/* The non-induction variable part of the test is the upper bound variable.
|
||||
*/
|
||||
|
@ -1438,9 +1442,9 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
|
|||
*invariants, extra);
|
||||
uboundresult = build (PLUS_EXPR, TREE_TYPE (uboundvar), uboundvar,
|
||||
build_int_cst (TREE_TYPE (uboundvar), extra));
|
||||
VEC_safe_push (tree, *uboundvars, uboundresult);
|
||||
VEC_safe_push (tree, *lboundvars, lboundvar);
|
||||
VEC_safe_push (int, *steps, stepint);
|
||||
VEC_safe_push (tree, gc, *uboundvars, uboundresult);
|
||||
VEC_safe_push (tree, gc, *lboundvars, lboundvar);
|
||||
VEC_safe_push (int, gc, *steps, stepint);
|
||||
if (!ubound)
|
||||
{
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
|
@ -1488,7 +1492,9 @@ find_induction_var_from_exit_cond (struct loop *loop)
|
|||
return ivarop;
|
||||
}
|
||||
|
||||
DEF_VEC_GC_P(lambda_loop);
|
||||
DEF_VEC_P(lambda_loop);
|
||||
DEF_VEC_ALLOC_P(lambda_loop,gc);
|
||||
|
||||
/* Generate a lambda loopnest from a gcc loopnest LOOP_NEST.
|
||||
Return the new loop nest.
|
||||
INDUCTIONVARS is a pointer to an array of induction variables for the
|
||||
|
@ -1499,18 +1505,18 @@ DEF_VEC_GC_P(lambda_loop);
|
|||
lambda_loopnest
|
||||
gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
|
||||
struct loop * loop_nest,
|
||||
VEC (tree) **inductionvars,
|
||||
VEC (tree) **invariants,
|
||||
VEC(tree,gc) **inductionvars,
|
||||
VEC(tree,gc) **invariants,
|
||||
bool need_perfect_nest)
|
||||
{
|
||||
lambda_loopnest ret;
|
||||
struct loop *temp;
|
||||
int depth = 0;
|
||||
size_t i;
|
||||
VEC (lambda_loop) *loops = NULL;
|
||||
VEC (tree) *uboundvars = NULL;
|
||||
VEC (tree) *lboundvars = NULL;
|
||||
VEC (int) *steps = NULL;
|
||||
VEC(lambda_loop,gc) *loops = NULL;
|
||||
VEC(tree,gc) *uboundvars = NULL;
|
||||
VEC(tree,gc) *lboundvars = NULL;
|
||||
VEC(int,gc) *steps = NULL;
|
||||
lambda_loop newloop;
|
||||
tree inductionvar = NULL;
|
||||
|
||||
|
@ -1524,8 +1530,8 @@ gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
|
|||
&steps);
|
||||
if (!newloop)
|
||||
return NULL;
|
||||
VEC_safe_push (tree, *inductionvars, inductionvar);
|
||||
VEC_safe_push (lambda_loop, loops, newloop);
|
||||
VEC_safe_push (tree, gc, *inductionvars, inductionvar);
|
||||
VEC_safe_push (lambda_loop, gc, loops, newloop);
|
||||
temp = temp->inner;
|
||||
}
|
||||
if (need_perfect_nest)
|
||||
|
@ -1559,7 +1565,7 @@ gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
|
|||
|
||||
static tree
|
||||
lbv_to_gcc_expression (lambda_body_vector lbv,
|
||||
tree type, VEC (tree) *induction_vars,
|
||||
tree type, VEC(tree,gc) *induction_vars,
|
||||
tree * stmts_to_insert)
|
||||
{
|
||||
tree stmts, stmt, resvar, name;
|
||||
|
@ -1642,15 +1648,15 @@ static tree
|
|||
lle_to_gcc_expression (lambda_linear_expression lle,
|
||||
lambda_linear_expression offset,
|
||||
tree type,
|
||||
VEC(tree) *induction_vars,
|
||||
VEC(tree) *invariants,
|
||||
VEC(tree,gc) *induction_vars,
|
||||
VEC(tree,gc) *invariants,
|
||||
enum tree_code wrap, tree * stmts_to_insert)
|
||||
{
|
||||
tree stmts, stmt, resvar, name;
|
||||
size_t i;
|
||||
tree_stmt_iterator tsi;
|
||||
tree iv, invar;
|
||||
VEC(tree) *results = NULL;
|
||||
VEC(tree,gc) *results = NULL;
|
||||
|
||||
name = NULL_TREE;
|
||||
/* Create a statement list and a linear expression temporary. */
|
||||
|
@ -1801,7 +1807,7 @@ lle_to_gcc_expression (lambda_linear_expression lle,
|
|||
tsi = tsi_last (stmts);
|
||||
tsi_link_after (&tsi, stmt, TSI_CONTINUE_LINKING);
|
||||
}
|
||||
VEC_safe_push (tree, results, name);
|
||||
VEC_safe_push (tree, gc, results, name);
|
||||
}
|
||||
|
||||
/* Again, out of laziness, we don't handle this case yet. It's not
|
||||
|
@ -1839,8 +1845,8 @@ lle_to_gcc_expression (lambda_linear_expression lle,
|
|||
|
||||
void
|
||||
lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
|
||||
VEC(tree) *old_ivs,
|
||||
VEC(tree) *invariants,
|
||||
VEC(tree,gc) *old_ivs,
|
||||
VEC(tree,gc) *invariants,
|
||||
lambda_loopnest new_loopnest,
|
||||
lambda_trans_matrix transform)
|
||||
{
|
||||
|
@ -1848,7 +1854,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
|
|||
struct loop *temp;
|
||||
size_t i = 0;
|
||||
size_t depth = 0;
|
||||
VEC(tree) *new_ivs = NULL;
|
||||
VEC(tree,gc) *new_ivs = NULL;
|
||||
tree oldiv;
|
||||
|
||||
block_stmt_iterator bsi;
|
||||
|
@ -1883,7 +1889,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
|
|||
ivvar = create_tmp_var (type, "lnivtmp");
|
||||
add_referenced_tmp_var (ivvar);
|
||||
|
||||
VEC_safe_push (tree, new_ivs, ivvar);
|
||||
VEC_safe_push (tree, gc, new_ivs, ivvar);
|
||||
|
||||
newloop = LN_LOOPS (new_loopnest)[i];
|
||||
|
||||
|
@ -2207,7 +2213,7 @@ stmt_uses_op (tree stmt, tree op)
|
|||
|
||||
static bool
|
||||
can_convert_to_perfect_nest (struct loop *loop,
|
||||
VEC (tree) *loopivs)
|
||||
VEC(tree,gc) *loopivs)
|
||||
{
|
||||
basic_block *bbs;
|
||||
tree exit_condition, phi;
|
||||
|
@ -2307,10 +2313,10 @@ can_convert_to_perfect_nest (struct loop *loop,
|
|||
static bool
|
||||
perfect_nestify (struct loops *loops,
|
||||
struct loop *loop,
|
||||
VEC (tree) *lbounds,
|
||||
VEC (tree) *ubounds,
|
||||
VEC (int) *steps,
|
||||
VEC (tree) *loopivs)
|
||||
VEC(tree,gc) *lbounds,
|
||||
VEC(tree,gc) *ubounds,
|
||||
VEC(int,gc) *steps,
|
||||
VEC(tree,gc) *loopivs)
|
||||
{
|
||||
basic_block *bbs;
|
||||
tree exit_condition;
|
||||
|
@ -2325,7 +2331,7 @@ perfect_nestify (struct loops *loops,
|
|||
tree uboundvar;
|
||||
tree stmt;
|
||||
tree oldivvar, ivvar, ivvarinced;
|
||||
VEC (tree) *phis = NULL;
|
||||
VEC(tree,gc) *phis = NULL;
|
||||
|
||||
if (!can_convert_to_perfect_nest (loop, loopivs))
|
||||
return false;
|
||||
|
@ -2339,8 +2345,9 @@ perfect_nestify (struct loops *loops,
|
|||
/* Push the exit phi nodes that we are moving. */
|
||||
for (phi = phi_nodes (olddest); phi; phi = PHI_CHAIN (phi))
|
||||
{
|
||||
VEC_safe_push (tree, phis, PHI_RESULT (phi));
|
||||
VEC_safe_push (tree, phis, PHI_ARG_DEF (phi, 0));
|
||||
VEC_reserve (tree, gc, phis, 2);
|
||||
VEC_quick_push (tree, phis, PHI_RESULT (phi));
|
||||
VEC_quick_push (tree, phis, PHI_ARG_DEF (phi, 0));
|
||||
}
|
||||
e = redirect_edge_and_branch (single_succ_edge (preheaderbb), headerbb);
|
||||
|
||||
|
|
|
@ -196,11 +196,11 @@ lambda_body_vector lambda_body_vector_compute_new (lambda_trans_matrix,
|
|||
void print_lambda_body_vector (FILE *, lambda_body_vector);
|
||||
lambda_loopnest gcc_loopnest_to_lambda_loopnest (struct loops *,
|
||||
struct loop *,
|
||||
VEC(tree) **,
|
||||
VEC(tree) **,
|
||||
VEC(tree,gc) **,
|
||||
VEC(tree,gc) **,
|
||||
bool);
|
||||
void lambda_loopnest_to_gcc_loopnest (struct loop *, VEC(tree) *,
|
||||
VEC(tree) *,
|
||||
void lambda_loopnest_to_gcc_loopnest (struct loop *, VEC(tree,gc) *,
|
||||
VEC(tree,gc) *,
|
||||
lambda_loopnest,
|
||||
lambda_trans_matrix);
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ instrument_values (histogram_values values)
|
|||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
VEC_free (histogram_value, values);
|
||||
VEC_free (histogram_value, heap, values);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ static void make_goto_expr_edges (basic_block);
|
|||
static edge tree_redirect_edge_and_branch (edge, basic_block);
|
||||
static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
|
||||
static void split_critical_edges (void);
|
||||
static bool remove_fallthru_edge (VEC(edge) *);
|
||||
static bool remove_fallthru_edge (VEC(edge,gc) *);
|
||||
|
||||
/* Various helpers. */
|
||||
static inline bool stmt_starts_bb_p (tree, tree);
|
||||
|
@ -2015,7 +2015,7 @@ remove_bb (basic_block bb)
|
|||
happens, all the instructions after the call are no longer
|
||||
reachable and must be deleted as dead. */
|
||||
|
||||
VEC(tree) *modified_noreturn_calls;
|
||||
VEC(tree,gc) *modified_noreturn_calls;
|
||||
|
||||
/* Try to remove superfluous control structures. */
|
||||
|
||||
|
@ -2172,7 +2172,7 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
|
|||
/* Remove any fallthru edge from EV. Return true if an edge was removed. */
|
||||
|
||||
static bool
|
||||
remove_fallthru_edge (VEC(edge) *ev)
|
||||
remove_fallthru_edge (VEC(edge,gc) *ev)
|
||||
{
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
|
|
|
@ -254,7 +254,7 @@ dequeue_and_dump (dump_info_p di)
|
|||
{
|
||||
unsigned ix;
|
||||
tree base;
|
||||
VEC (tree) *accesses = BINFO_BASE_ACCESSES (t);
|
||||
VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (t);
|
||||
|
||||
dump_child ("type", BINFO_TYPE (t));
|
||||
|
||||
|
|
|
@ -151,7 +151,7 @@ mark_stmt_modified (tree t)
|
|||
if (ann == NULL)
|
||||
ann = create_stmt_ann (t);
|
||||
else if (noreturn_call_p (t))
|
||||
VEC_safe_push (tree, modified_noreturn_calls, t);
|
||||
VEC_safe_push (tree, gc, modified_noreturn_calls, t);
|
||||
ann->modified = 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -340,7 +340,7 @@ union tree_ann_d GTY((desc ("ann_type ((tree_ann_t)&%h)")))
|
|||
struct stmt_ann_d GTY((tag ("STMT_ANN"))) stmt;
|
||||
};
|
||||
|
||||
extern GTY(()) VEC(tree) *modified_noreturn_calls;
|
||||
extern GTY(()) VEC(tree,gc) *modified_noreturn_calls;
|
||||
|
||||
typedef union tree_ann_d *tree_ann_t;
|
||||
typedef struct var_ann_d *var_ann_t;
|
||||
|
@ -581,9 +581,6 @@ extern tree make_rename_temp (tree, const char *);
|
|||
extern void record_vars (tree);
|
||||
extern bool block_may_fallthru (tree block);
|
||||
|
||||
typedef tree tree_on_heap;
|
||||
DEF_VEC_MALLOC_P (tree_on_heap);
|
||||
|
||||
/* In tree-ssa-alias.c */
|
||||
extern void dump_may_aliases_for (FILE *, tree);
|
||||
extern void debug_may_aliases_for (tree);
|
||||
|
@ -623,7 +620,7 @@ extern bool tree_ssa_useless_type_conversion (tree);
|
|||
extern bool tree_ssa_useless_type_conversion_1 (tree, tree);
|
||||
extern void verify_ssa (bool);
|
||||
extern void delete_tree_ssa (void);
|
||||
extern void register_new_def (tree, VEC (tree_on_heap) **);
|
||||
extern void register_new_def (tree, VEC(tree,heap) **);
|
||||
extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
|
||||
extern bool stmt_references_memory_p (tree);
|
||||
|
||||
|
|
|
@ -98,10 +98,13 @@ static htab_t def_blocks;
|
|||
|
||||
- A NULL node at the top entry is used to mark the last node
|
||||
associated with the current block. */
|
||||
static VEC(tree_on_heap) *block_defs_stack;
|
||||
static VEC(tree,heap) *block_defs_stack;
|
||||
|
||||
/* Basic block vectors used in this file ought to be allocated in the heap. */
|
||||
DEF_VEC_MALLOC_P(int);
|
||||
/* Basic block vectors used in this file ought to be allocated in the
|
||||
heap. We use pointer vector, because ints can be easily passed by
|
||||
value. */
|
||||
DEF_VEC_P(int);
|
||||
DEF_VEC_ALLOC_P(int,heap);
|
||||
|
||||
/* Set of existing SSA names being replaced by update_ssa. */
|
||||
static sbitmap old_ssa_names;
|
||||
|
@ -683,10 +686,10 @@ find_idf (bitmap def_blocks, bitmap *dfs)
|
|||
{
|
||||
bitmap_iterator bi;
|
||||
unsigned bb_index;
|
||||
VEC(int) *work_stack;
|
||||
VEC(int,heap) *work_stack;
|
||||
bitmap phi_insertion_points;
|
||||
|
||||
work_stack = VEC_alloc (int, n_basic_blocks);
|
||||
work_stack = VEC_alloc (int, heap, n_basic_blocks);
|
||||
phi_insertion_points = BITMAP_ALLOC (NULL);
|
||||
|
||||
/* Seed the work list with all the blocks in DEF_BLOCKS. */
|
||||
|
@ -719,12 +722,12 @@ find_idf (bitmap def_blocks, bitmap *dfs)
|
|||
/* Use a safe push because if there is a definition of VAR
|
||||
in every basic block, then WORK_STACK may eventually have
|
||||
more than N_BASIC_BLOCK entries. */
|
||||
VEC_safe_push (int, work_stack, bb_index);
|
||||
VEC_safe_push (int, heap, work_stack, bb_index);
|
||||
bitmap_set_bit (phi_insertion_points, bb_index);
|
||||
}
|
||||
}
|
||||
|
||||
VEC_free (int, work_stack);
|
||||
VEC_free (int, heap, work_stack);
|
||||
|
||||
return phi_insertion_points;
|
||||
}
|
||||
|
@ -892,7 +895,7 @@ insert_phi_nodes (bitmap *dfs, bitmap names_to_rename)
|
|||
into the stack pointed by BLOCK_DEFS_P. */
|
||||
|
||||
void
|
||||
register_new_def (tree def, VEC (tree_on_heap) **block_defs_p)
|
||||
register_new_def (tree def, VEC(tree,heap) **block_defs_p)
|
||||
{
|
||||
tree var = SSA_NAME_VAR (def);
|
||||
tree currdef;
|
||||
|
@ -918,7 +921,7 @@ register_new_def (tree def, VEC (tree_on_heap) **block_defs_p)
|
|||
definitions for all the variables defined in the block after a recursive
|
||||
visit to all its immediately dominated blocks. If there is no current
|
||||
reaching definition, then just record the underlying _DECL node. */
|
||||
VEC_safe_push (tree_on_heap, *block_defs_p, currdef ? currdef : var);
|
||||
VEC_safe_push (tree, heap, *block_defs_p, currdef ? currdef : var);
|
||||
|
||||
/* Set the current reaching definition for VAR to be DEF. */
|
||||
set_current_def (var, def);
|
||||
|
@ -963,7 +966,7 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
|
||||
|
||||
/* Mark the unwind point for this block. */
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
|
||||
|
||||
/* Step 1. Register new definitions for every PHI node in the block.
|
||||
Conceptually, all the PHI nodes are executed in parallel and each PHI
|
||||
|
@ -1089,9 +1092,9 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
basic_block bb ATTRIBUTE_UNUSED)
|
||||
{
|
||||
/* Restore CURRDEFS to its original state. */
|
||||
while (VEC_length (tree_on_heap, block_defs_stack) > 0)
|
||||
while (VEC_length (tree, block_defs_stack) > 0)
|
||||
{
|
||||
tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
tree tmp = VEC_pop (tree, block_defs_stack);
|
||||
tree saved_def, var;
|
||||
|
||||
if (tmp == NULL_TREE)
|
||||
|
@ -1249,8 +1252,9 @@ register_new_update_single (tree new_name, tree old_name)
|
|||
restore the reaching definitions for all the variables
|
||||
defined in the block after a recursive visit to all its
|
||||
immediately dominated blocks. */
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, currdef);
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, old_name);
|
||||
VEC_reserve (tree, heap, block_defs_stack, 2);
|
||||
VEC_quick_push (tree, block_defs_stack, currdef);
|
||||
VEC_quick_push (tree, block_defs_stack, old_name);
|
||||
|
||||
/* Set the current reaching definition for OLD_NAME to be
|
||||
NEW_NAME. */
|
||||
|
@ -1292,7 +1296,7 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
bb->index);
|
||||
|
||||
/* Mark the unwind point for this block. */
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
|
||||
|
||||
/* Mark the LHS if any of the arguments flows through an abnormal
|
||||
edge. */
|
||||
|
@ -1362,9 +1366,9 @@ static void
|
|||
rewrite_update_fini_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
||||
basic_block bb ATTRIBUTE_UNUSED)
|
||||
{
|
||||
while (VEC_length (tree_on_heap, block_defs_stack) > 0)
|
||||
while (VEC_length (tree, block_defs_stack) > 0)
|
||||
{
|
||||
tree var = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
tree var = VEC_pop (tree, block_defs_stack);
|
||||
tree saved_def;
|
||||
|
||||
/* NULL indicates the unwind stop point for this block (see
|
||||
|
@ -1372,7 +1376,7 @@ rewrite_update_fini_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
if (var == NULL)
|
||||
return;
|
||||
|
||||
saved_def = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
saved_def = VEC_pop (tree, block_defs_stack);
|
||||
set_current_def (var, saved_def);
|
||||
}
|
||||
}
|
||||
|
@ -1603,7 +1607,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
|
|||
else
|
||||
gcc_unreachable ();
|
||||
|
||||
block_defs_stack = VEC_alloc (tree_on_heap, 10);
|
||||
block_defs_stack = VEC_alloc (tree, heap, 10);
|
||||
|
||||
/* Initialize the dominator walker. */
|
||||
init_walk_dominator_tree (&walk_data);
|
||||
|
@ -1629,8 +1633,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
|
|||
def_blocks = NULL;
|
||||
}
|
||||
|
||||
VEC_free (tree_on_heap, block_defs_stack);
|
||||
block_defs_stack = NULL;
|
||||
VEC_free (tree, heap, block_defs_stack);
|
||||
|
||||
timevar_pop (TV_TREE_SSA_REWRITE_BLOCKS);
|
||||
}
|
||||
|
@ -2855,15 +2858,15 @@ ssa_rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
|
||||
/* Step 5. Restore the current reaching definition for each variable
|
||||
referenced in the block (in reverse order). */
|
||||
while (VEC_length (tree_on_heap, block_defs_stack) > 0)
|
||||
while (VEC_length (tree, block_defs_stack) > 0)
|
||||
{
|
||||
tree var = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
tree var = VEC_pop (tree, block_defs_stack);
|
||||
tree saved_def;
|
||||
|
||||
if (var == NULL)
|
||||
break;
|
||||
|
||||
saved_def = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
saved_def = VEC_pop (tree, block_defs_stack);
|
||||
set_current_def (var, saved_def);
|
||||
}
|
||||
}
|
||||
|
@ -2894,8 +2897,9 @@ ssa_register_new_def (tree var, tree def)
|
|||
later used by the dominator tree callbacks to restore the reaching
|
||||
definitions for all the variables defined in the block after a recursive
|
||||
visit to all its immediately dominated blocks. */
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, currdef);
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, var);
|
||||
VEC_reserve (tree, heap, block_defs_stack, 2);
|
||||
VEC_quick_push (tree, block_defs_stack, currdef);
|
||||
VEC_quick_push (tree, block_defs_stack, var);
|
||||
|
||||
/* Set the current reaching definition for VAR to be DEF. */
|
||||
set_current_def (var, def);
|
||||
|
@ -2999,7 +3003,7 @@ ssa_rewrite_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
|
|||
fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
|
||||
|
||||
/* Mark the unwind point for this block. */
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
|
@ -3194,7 +3198,7 @@ rewrite_ssa_into_ssa (void)
|
|||
mark_def_sites_global_data.names_to_rename = snames_to_rename;
|
||||
walk_data.global_data = &mark_def_sites_global_data;
|
||||
|
||||
block_defs_stack = VEC_alloc (tree_on_heap, 10);
|
||||
block_defs_stack = VEC_alloc (tree, heap, 10);
|
||||
|
||||
/* We do not have any local data. */
|
||||
walk_data.block_local_data_size = 0;
|
||||
|
@ -3284,7 +3288,6 @@ rewrite_ssa_into_ssa (void)
|
|||
|
||||
BITMAP_FREE (to_rename);
|
||||
|
||||
VEC_free (tree_on_heap, block_defs_stack);
|
||||
block_defs_stack = NULL;
|
||||
VEC_free (tree, heap, block_defs_stack);
|
||||
timevar_pop (TV_TREE_SSA_OTHER);
|
||||
}
|
||||
|
|
|
@ -243,6 +243,9 @@ void
|
|||
linear_transform_loops (struct loops *loops)
|
||||
{
|
||||
unsigned int i;
|
||||
VEC(tree,gc) *oldivs = NULL; /* FIXME:These should really be on the
|
||||
heap. (nathan 2005/04/15)*/
|
||||
VEC(tree,gc) *invariants = NULL; /* FIXME:Likewise. */
|
||||
|
||||
for (i = 1; i < loops->num; i++)
|
||||
{
|
||||
|
@ -251,8 +254,6 @@ linear_transform_loops (struct loops *loops)
|
|||
varray_type dependence_relations;
|
||||
struct loop *loop_nest = loops->parray[i];
|
||||
struct loop *temp;
|
||||
VEC (tree) *oldivs = NULL;
|
||||
VEC (tree) *invariants = NULL;
|
||||
lambda_loopnest before, after;
|
||||
lambda_trans_matrix trans;
|
||||
bool problem = false;
|
||||
|
@ -273,6 +274,8 @@ linear_transform_loops (struct loops *loops)
|
|||
} */
|
||||
if (!loop_nest || !loop_nest->inner)
|
||||
continue;
|
||||
VEC_truncate (tree, oldivs, 0);
|
||||
VEC_truncate (tree, invariants, 0);
|
||||
depth = 1;
|
||||
for (temp = loop_nest->inner; temp; temp = temp->inner)
|
||||
{
|
||||
|
@ -365,11 +368,11 @@ linear_transform_loops (struct loops *loops)
|
|||
after, trans);
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Successfully transformed loop.\n");
|
||||
oldivs = NULL;
|
||||
invariants = NULL;
|
||||
free_dependence_relations (dependence_relations);
|
||||
free_data_refs (datarefs);
|
||||
}
|
||||
VEC_free (tree, gc, oldivs);
|
||||
VEC_free (tree, gc, invariants);
|
||||
scev_reset ();
|
||||
update_ssa (TODO_update_ssa);
|
||||
rewrite_into_loop_closed_ssa (NULL);
|
||||
|
|
|
@ -2791,7 +2791,9 @@ typedef struct fieldoff
|
|||
HOST_WIDE_INT offset;
|
||||
} *fieldoff_t;
|
||||
|
||||
DEF_VEC_MALLOC_P(fieldoff_t);
|
||||
DEF_VEC_P (fieldoff_t); /* FIXME: This can be a vector of struct
|
||||
fieldoff objects (nathan 2005/04/15) */
|
||||
DEF_VEC_ALLOC_P(fieldoff_t,heap);
|
||||
|
||||
/* Return the position, in bits, of FIELD_DECL from the beginning of its
|
||||
structure.
|
||||
|
@ -2816,7 +2818,7 @@ bitpos_of_field (const tree fdecl)
|
|||
than just the immediately containing structure. */
|
||||
|
||||
static void
|
||||
push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
|
||||
push_fields_onto_fieldstack (tree type, VEC(fieldoff_t,heap) **fieldstack,
|
||||
HOST_WIDE_INT offset)
|
||||
{
|
||||
fieldoff_t pair;
|
||||
|
@ -2838,7 +2840,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
|
|||
pair = xmalloc (sizeof (struct fieldoff));
|
||||
pair->field = field;
|
||||
pair->offset = offset;
|
||||
VEC_safe_push (fieldoff_t, *fieldstack, pair);
|
||||
VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
|
||||
}
|
||||
}
|
||||
else if (TREE_CODE (field) == FIELD_DECL)
|
||||
|
@ -2846,7 +2848,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
|
|||
pair = xmalloc (sizeof (struct fieldoff));
|
||||
pair->field = field;
|
||||
pair->offset = offset + bitpos_of_field (field);
|
||||
VEC_safe_push (fieldoff_t, *fieldstack, pair);
|
||||
VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
|
||||
}
|
||||
for (field = TREE_CHAIN (field); field; field = TREE_CHAIN (field))
|
||||
{
|
||||
|
@ -2867,7 +2869,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
|
|||
pair = xmalloc (sizeof (struct fieldoff));
|
||||
pair->field = field;
|
||||
pair->offset = offset + bitpos_of_field (field);
|
||||
VEC_safe_push (fieldoff_t, *fieldstack, pair);
|
||||
VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -2875,7 +2877,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
|
|||
pair = xmalloc (sizeof (struct fieldoff));
|
||||
pair->field = field;
|
||||
pair->offset = offset + bitpos_of_field (field);
|
||||
VEC_safe_push (fieldoff_t, *fieldstack, pair);
|
||||
VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2944,7 +2946,7 @@ fieldoff_compare (const void *pa, const void *pb)
|
|||
static void
|
||||
create_overlap_variables_for (tree var)
|
||||
{
|
||||
VEC(fieldoff_t) *fieldstack = NULL;
|
||||
VEC(fieldoff_t,heap) *fieldstack = NULL;
|
||||
used_part_t up;
|
||||
size_t uid = var_ann (var)->uid;
|
||||
|
||||
|
@ -3019,7 +3021,7 @@ create_overlap_variables_for (tree var)
|
|||
fo = VEC_pop (fieldoff_t, fieldstack);
|
||||
free (fo);
|
||||
}
|
||||
VEC_free (fieldoff_t, fieldstack);
|
||||
VEC_free (fieldoff_t, heap, fieldstack);
|
||||
return;
|
||||
}
|
||||
/* Otherwise, create the variables. */
|
||||
|
@ -3109,7 +3111,7 @@ create_overlap_variables_for (tree var)
|
|||
|
||||
}
|
||||
|
||||
VEC_free (fieldoff_t, fieldstack);
|
||||
VEC_free (fieldoff_t, heap, fieldstack);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ static htab_t avail_exprs;
|
|||
(null). When we finish processing the block, we pop off entries and
|
||||
remove the expressions from the global hash table until we hit the
|
||||
marker. */
|
||||
static VEC(tree_on_heap) *avail_exprs_stack;
|
||||
static VEC(tree,heap) *avail_exprs_stack;
|
||||
|
||||
/* Stack of trees used to restore the global currdefs to its original
|
||||
state after completing optimization of a block and its dominator children.
|
||||
|
@ -107,7 +107,7 @@ static VEC(tree_on_heap) *avail_exprs_stack;
|
|||
|
||||
A NULL node is used to mark the last node associated with the
|
||||
current block. */
|
||||
static VEC(tree_on_heap) *block_defs_stack;
|
||||
static VEC(tree,heap) *block_defs_stack;
|
||||
|
||||
/* Stack of statements we need to rescan during finalization for newly
|
||||
exposed variables.
|
||||
|
@ -116,7 +116,7 @@ static VEC(tree_on_heap) *block_defs_stack;
|
|||
expressions are removed from AVAIL_EXPRS. Else we may change the
|
||||
hash code for an expression and be unable to find/remove it from
|
||||
AVAIL_EXPRS. */
|
||||
static VEC(tree_on_heap) *stmts_to_rescan;
|
||||
static VEC(tree,heap) *stmts_to_rescan;
|
||||
|
||||
/* Structure for entries in the expression hash table.
|
||||
|
||||
|
@ -148,7 +148,7 @@ struct expr_hash_elt
|
|||
|
||||
A NULL entry is used to mark the end of pairs which need to be
|
||||
restored during finalization of this block. */
|
||||
static VEC(tree_on_heap) *const_and_copies_stack;
|
||||
static VEC(tree,heap) *const_and_copies_stack;
|
||||
|
||||
/* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
|
||||
know their exact value. */
|
||||
|
@ -159,7 +159,7 @@ static bitmap nonzero_vars;
|
|||
|
||||
A NULL entry is used to mark the end of names needing their
|
||||
entry in NONZERO_VARS cleared during finalization of this block. */
|
||||
static VEC(tree_on_heap) *nonzero_vars_stack;
|
||||
static VEC(tree,heap) *nonzero_vars_stack;
|
||||
|
||||
/* Track whether or not we have changed the control flow graph. */
|
||||
static bool cfg_altered;
|
||||
|
@ -254,7 +254,7 @@ struct vrp_hash_elt
|
|||
list to determine which variables need their VRP data updated.
|
||||
|
||||
A NULL entry marks the end of the SSA_NAMEs associated with this block. */
|
||||
static VEC(tree_on_heap) *vrp_variables_stack;
|
||||
static VEC(tree,heap) *vrp_variables_stack;
|
||||
|
||||
struct eq_expr_value
|
||||
{
|
||||
|
@ -382,12 +382,12 @@ tree_ssa_dominator_optimize (void)
|
|||
/* Create our hash tables. */
|
||||
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
|
||||
vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
|
||||
avail_exprs_stack = VEC_alloc (tree_on_heap, 20);
|
||||
block_defs_stack = VEC_alloc (tree_on_heap, 20);
|
||||
const_and_copies_stack = VEC_alloc (tree_on_heap, 20);
|
||||
nonzero_vars_stack = VEC_alloc (tree_on_heap, 20);
|
||||
vrp_variables_stack = VEC_alloc (tree_on_heap, 20);
|
||||
stmts_to_rescan = VEC_alloc (tree_on_heap, 20);
|
||||
avail_exprs_stack = VEC_alloc (tree, heap, 20);
|
||||
block_defs_stack = VEC_alloc (tree, heap, 20);
|
||||
const_and_copies_stack = VEC_alloc (tree, heap, 20);
|
||||
nonzero_vars_stack = VEC_alloc (tree, heap, 20);
|
||||
vrp_variables_stack = VEC_alloc (tree, heap, 20);
|
||||
stmts_to_rescan = VEC_alloc (tree, heap, 20);
|
||||
nonzero_vars = BITMAP_ALLOC (NULL);
|
||||
need_eh_cleanup = BITMAP_ALLOC (NULL);
|
||||
|
||||
|
@ -545,12 +545,12 @@ tree_ssa_dominator_optimize (void)
|
|||
BITMAP_FREE (nonzero_vars);
|
||||
BITMAP_FREE (need_eh_cleanup);
|
||||
|
||||
VEC_free (tree_on_heap, block_defs_stack);
|
||||
VEC_free (tree_on_heap, avail_exprs_stack);
|
||||
VEC_free (tree_on_heap, const_and_copies_stack);
|
||||
VEC_free (tree_on_heap, nonzero_vars_stack);
|
||||
VEC_free (tree_on_heap, vrp_variables_stack);
|
||||
VEC_free (tree_on_heap, stmts_to_rescan);
|
||||
VEC_free (tree, heap, block_defs_stack);
|
||||
VEC_free (tree, heap, avail_exprs_stack);
|
||||
VEC_free (tree, heap, const_and_copies_stack);
|
||||
VEC_free (tree, heap, nonzero_vars_stack);
|
||||
VEC_free (tree, heap, vrp_variables_stack);
|
||||
VEC_free (tree, heap, stmts_to_rescan);
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -850,11 +850,11 @@ dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
|||
|
||||
/* Push a marker on the stacks of local information so that we know how
|
||||
far to unwind when we finalize this block. */
|
||||
VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, nonzero_vars_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, vrp_variables_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, nonzero_vars_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, vrp_variables_stack, NULL_TREE);
|
||||
|
||||
record_equivalences_from_incoming_edge (bb);
|
||||
|
||||
|
@ -910,10 +910,10 @@ static void
|
|||
remove_local_expressions_from_table (void)
|
||||
{
|
||||
/* Remove all the expressions made available in this block. */
|
||||
while (VEC_length (tree_on_heap, avail_exprs_stack) > 0)
|
||||
while (VEC_length (tree, avail_exprs_stack) > 0)
|
||||
{
|
||||
struct expr_hash_elt element;
|
||||
tree expr = VEC_pop (tree_on_heap, avail_exprs_stack);
|
||||
tree expr = VEC_pop (tree, avail_exprs_stack);
|
||||
|
||||
if (expr == NULL_TREE)
|
||||
break;
|
||||
|
@ -929,9 +929,9 @@ remove_local_expressions_from_table (void)
|
|||
static void
|
||||
restore_nonzero_vars_to_original_value (void)
|
||||
{
|
||||
while (VEC_length (tree_on_heap, nonzero_vars_stack) > 0)
|
||||
while (VEC_length (tree, nonzero_vars_stack) > 0)
|
||||
{
|
||||
tree name = VEC_pop (tree_on_heap, nonzero_vars_stack);
|
||||
tree name = VEC_pop (tree, nonzero_vars_stack);
|
||||
|
||||
if (name == NULL)
|
||||
break;
|
||||
|
@ -947,16 +947,16 @@ restore_nonzero_vars_to_original_value (void)
|
|||
static void
|
||||
restore_vars_to_original_value (void)
|
||||
{
|
||||
while (VEC_length (tree_on_heap, const_and_copies_stack) > 0)
|
||||
while (VEC_length (tree, const_and_copies_stack) > 0)
|
||||
{
|
||||
tree prev_value, dest;
|
||||
|
||||
dest = VEC_pop (tree_on_heap, const_and_copies_stack);
|
||||
dest = VEC_pop (tree, const_and_copies_stack);
|
||||
|
||||
if (dest == NULL)
|
||||
break;
|
||||
|
||||
prev_value = VEC_pop (tree_on_heap, const_and_copies_stack);
|
||||
prev_value = VEC_pop (tree, const_and_copies_stack);
|
||||
SSA_NAME_VALUE (dest) = prev_value;
|
||||
}
|
||||
}
|
||||
|
@ -967,9 +967,9 @@ static void
|
|||
restore_currdefs_to_original_value (void)
|
||||
{
|
||||
/* Restore CURRDEFS to its original state. */
|
||||
while (VEC_length (tree_on_heap, block_defs_stack) > 0)
|
||||
while (VEC_length (tree, block_defs_stack) > 0)
|
||||
{
|
||||
tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
|
||||
tree tmp = VEC_pop (tree, block_defs_stack);
|
||||
tree saved_def, var;
|
||||
|
||||
if (tmp == NULL_TREE)
|
||||
|
@ -1050,9 +1050,9 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
|
|||
/* Push a marker onto the available expression stack so that we
|
||||
unwind any expressions related to the TRUE arm before processing
|
||||
the false arm below. */
|
||||
VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
|
||||
VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
|
||||
|
||||
edge_info = true_edge->aux;
|
||||
|
||||
|
@ -1154,9 +1154,9 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
|
|||
To be efficient, we note which variables have had their values
|
||||
constrained in this block. So walk over each variable in the
|
||||
VRP_VARIABLEs array. */
|
||||
while (VEC_length (tree_on_heap, vrp_variables_stack) > 0)
|
||||
while (VEC_length (tree, vrp_variables_stack) > 0)
|
||||
{
|
||||
tree var = VEC_pop (tree_on_heap, vrp_variables_stack);
|
||||
tree var = VEC_pop (tree, vrp_variables_stack);
|
||||
struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
|
||||
void **slot;
|
||||
|
||||
|
@ -1192,15 +1192,15 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
|
|||
|
||||
/* If we queued any statements to rescan in this block, then
|
||||
go ahead and rescan them now. */
|
||||
while (VEC_length (tree_on_heap, stmts_to_rescan) > 0)
|
||||
while (VEC_length (tree, stmts_to_rescan) > 0)
|
||||
{
|
||||
tree stmt = VEC_last (tree_on_heap, stmts_to_rescan);
|
||||
tree stmt = VEC_last (tree, stmts_to_rescan);
|
||||
basic_block stmt_bb = bb_for_stmt (stmt);
|
||||
|
||||
if (stmt_bb != bb)
|
||||
break;
|
||||
|
||||
VEC_pop (tree_on_heap, stmts_to_rescan);
|
||||
VEC_pop (tree, stmts_to_rescan);
|
||||
mark_new_vars_to_rename (stmt);
|
||||
}
|
||||
}
|
||||
|
@ -1436,7 +1436,7 @@ record_var_is_nonzero (tree var)
|
|||
|
||||
/* Record this SSA_NAME so that we can reset the global table
|
||||
when we leave this block. */
|
||||
VEC_safe_push (tree_on_heap, nonzero_vars_stack, var);
|
||||
VEC_safe_push (tree, heap, nonzero_vars_stack, var);
|
||||
}
|
||||
|
||||
/* Enter a statement into the true/false expression hash table indicating
|
||||
|
@ -1455,7 +1455,7 @@ record_cond (tree cond, tree value)
|
|||
if (*slot == NULL)
|
||||
{
|
||||
*slot = (void *) element;
|
||||
VEC_safe_push (tree_on_heap, avail_exprs_stack, cond);
|
||||
VEC_safe_push (tree, heap, avail_exprs_stack, cond);
|
||||
}
|
||||
else
|
||||
free (element);
|
||||
|
@ -1594,8 +1594,9 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
|
|||
{
|
||||
SSA_NAME_VALUE (x) = y;
|
||||
|
||||
VEC_safe_push (tree_on_heap, const_and_copies_stack, prev_x);
|
||||
VEC_safe_push (tree_on_heap, const_and_copies_stack, x);
|
||||
VEC_reserve (tree, heap, const_and_copies_stack, 2);
|
||||
VEC_quick_push (tree, const_and_copies_stack, prev_x);
|
||||
VEC_quick_push (tree, const_and_copies_stack, x);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3125,7 +3126,7 @@ optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
|
|||
}
|
||||
|
||||
if (may_have_exposed_new_symbols)
|
||||
VEC_safe_push (tree_on_heap, stmts_to_rescan, bsi_stmt (si));
|
||||
VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
|
||||
}
|
||||
|
||||
/* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
|
||||
|
@ -3177,7 +3178,7 @@ update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
|
|||
we found a copy of this statement in the second hash table lookup
|
||||
we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
|
||||
if (insert)
|
||||
VEC_pop (tree_on_heap, avail_exprs_stack);
|
||||
VEC_pop (tree, avail_exprs_stack);
|
||||
|
||||
/* And make sure we record the fact that we modified this
|
||||
statement. */
|
||||
|
@ -3253,7 +3254,7 @@ lookup_avail_expr (tree stmt, bool insert)
|
|||
if (*slot == NULL)
|
||||
{
|
||||
*slot = (void *) element;
|
||||
VEC_safe_push (tree_on_heap, avail_exprs_stack,
|
||||
VEC_safe_push (tree, heap, avail_exprs_stack,
|
||||
stmt ? stmt : element->rhs);
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
@ -3393,7 +3394,7 @@ record_range (tree cond, basic_block bb)
|
|||
VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
|
||||
|
||||
VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
|
||||
VEC_safe_push (tree_on_heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
|
||||
VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1112,7 +1112,8 @@ clean (value_set_t set)
|
|||
}
|
||||
}
|
||||
|
||||
DEF_VEC_MALLOC_P (basic_block);
|
||||
DEF_VEC_P (basic_block);
|
||||
DEF_VEC_ALLOC_P (basic_block, heap);
|
||||
static sbitmap has_abnormal_preds;
|
||||
|
||||
/* Compute the ANTIC set for BLOCK.
|
||||
|
@ -1162,15 +1163,15 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
|
|||
them. */
|
||||
else
|
||||
{
|
||||
VEC (basic_block) * worklist;
|
||||
VEC(basic_block, heap) * worklist;
|
||||
edge e;
|
||||
size_t i;
|
||||
basic_block bprime, first;
|
||||
edge_iterator ei;
|
||||
|
||||
worklist = VEC_alloc (basic_block, 2);
|
||||
worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
|
||||
FOR_EACH_EDGE (e, ei, block->succs)
|
||||
VEC_safe_push (basic_block, worklist, e->dest);
|
||||
VEC_quick_push (basic_block, worklist, e->dest);
|
||||
first = VEC_index (basic_block, worklist, 0);
|
||||
set_copy (ANTIC_OUT, ANTIC_IN (first));
|
||||
|
||||
|
@ -1187,7 +1188,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
|
|||
node = next;
|
||||
}
|
||||
}
|
||||
VEC_free (basic_block, worklist);
|
||||
VEC_free (basic_block, heap, worklist);
|
||||
}
|
||||
|
||||
/* Generate ANTIC_OUT - TMP_GEN. */
|
||||
|
@ -1271,7 +1272,7 @@ compute_antic (void)
|
|||
fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
|
||||
}
|
||||
|
||||
static VEC(tree_on_heap) *inserted_exprs;
|
||||
static VEC(tree,heap) *inserted_exprs;
|
||||
/* Find a leader for an expression, or generate one using
|
||||
create_expression_by_pieces if it's ANTIC but
|
||||
complex.
|
||||
|
@ -1367,7 +1368,7 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
|
|||
TREE_OPERAND (newexpr, 0) = name;
|
||||
tsi = tsi_last (stmts);
|
||||
tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
|
||||
VEC_safe_push (tree_on_heap, inserted_exprs, newexpr);
|
||||
VEC_safe_push (tree, heap, inserted_exprs, newexpr);
|
||||
pre_stats.insertions++;
|
||||
break;
|
||||
}
|
||||
|
@ -1415,7 +1416,7 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
|
|||
NECESSARY (newexpr) = 0;
|
||||
tsi = tsi_last (stmts);
|
||||
tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
|
||||
VEC_safe_push (tree_on_heap, inserted_exprs, newexpr);
|
||||
VEC_safe_push (tree, heap, inserted_exprs, newexpr);
|
||||
pre_stats.insertions++;
|
||||
|
||||
break;
|
||||
|
@ -1533,7 +1534,7 @@ insert_into_preds_of_block (basic_block block, value_set_node_t node,
|
|||
add_referenced_tmp_var (temp);
|
||||
temp = create_phi_node (temp, block);
|
||||
NECESSARY (temp) = 0;
|
||||
VEC_safe_push (tree_on_heap, inserted_exprs, temp);
|
||||
VEC_safe_push (tree, heap, inserted_exprs, temp);
|
||||
FOR_EACH_EDGE (pred, ei, block->preds)
|
||||
add_phi_arg (temp, avail[pred->src->index], pred);
|
||||
|
||||
|
@ -2111,10 +2112,11 @@ eliminate (void)
|
|||
this may be a bit faster, and we may want critical edges kept split. */
|
||||
|
||||
/* If OP's defining statement has not already been determined to be necessary,
|
||||
mark that statement necessary. and place it on the WORKLIST. */
|
||||
mark that statement necessary. Return the stmt, if it is newly
|
||||
necessary. */
|
||||
|
||||
static inline void
|
||||
mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
|
||||
static inline tree
|
||||
mark_operand_necessary (tree op)
|
||||
{
|
||||
tree stmt;
|
||||
|
||||
|
@ -2125,10 +2127,10 @@ mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
|
|||
|
||||
if (NECESSARY (stmt)
|
||||
|| IS_EMPTY_STMT (stmt))
|
||||
return;
|
||||
return NULL;
|
||||
|
||||
NECESSARY (stmt) = 1;
|
||||
VEC_safe_push (tree_on_heap, *worklist, stmt);
|
||||
return stmt;
|
||||
}
|
||||
|
||||
/* Because we don't follow exactly the standard PRE algorithm, and decide not
|
||||
|
@ -2139,18 +2141,19 @@ mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
|
|||
static void
|
||||
remove_dead_inserted_code (void)
|
||||
{
|
||||
VEC (tree_on_heap) *worklist = NULL;
|
||||
VEC(tree,heap) *worklist = NULL;
|
||||
int i;
|
||||
tree t;
|
||||
|
||||
for (i = 0; VEC_iterate (tree_on_heap, inserted_exprs, i, t); i++)
|
||||
worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
|
||||
for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
|
||||
{
|
||||
if (NECESSARY (t))
|
||||
VEC_safe_push (tree_on_heap, worklist, t);
|
||||
VEC_quick_push (tree, worklist, t);
|
||||
}
|
||||
while (VEC_length (tree_on_heap, worklist) > 0)
|
||||
while (VEC_length (tree, worklist) > 0)
|
||||
{
|
||||
t = VEC_pop (tree_on_heap, worklist);
|
||||
t = VEC_pop (tree, worklist);
|
||||
if (TREE_CODE (t) == PHI_NODE)
|
||||
{
|
||||
/* PHI nodes are somewhat special in that each PHI alternative has
|
||||
|
@ -2160,11 +2163,17 @@ remove_dead_inserted_code (void)
|
|||
predecessor block associated with each PHI alternative as
|
||||
necessary. */
|
||||
int k;
|
||||
|
||||
VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
|
||||
for (k = 0; k < PHI_NUM_ARGS (t); k++)
|
||||
{
|
||||
tree arg = PHI_ARG_DEF (t, k);
|
||||
if (TREE_CODE (arg) == SSA_NAME)
|
||||
mark_operand_necessary (arg, &worklist);
|
||||
{
|
||||
arg = mark_operand_necessary (arg);
|
||||
if (arg)
|
||||
VEC_quick_push (tree, worklist, arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -2181,10 +2190,14 @@ remove_dead_inserted_code (void)
|
|||
links). */
|
||||
|
||||
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
|
||||
mark_operand_necessary (use, &worklist);
|
||||
{
|
||||
tree n = mark_operand_necessary (use);
|
||||
if (n)
|
||||
VEC_safe_push (tree, heap, worklist, n);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (i = 0; VEC_iterate (tree_on_heap, inserted_exprs, i, t); i++)
|
||||
for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
|
||||
{
|
||||
if (!NECESSARY (t))
|
||||
{
|
||||
|
@ -2205,7 +2218,7 @@ remove_dead_inserted_code (void)
|
|||
}
|
||||
}
|
||||
}
|
||||
VEC_free (tree_on_heap, worklist);
|
||||
VEC_free (tree, heap, worklist);
|
||||
}
|
||||
/* Initialize data structures used by PRE. */
|
||||
|
||||
|
@ -2272,7 +2285,7 @@ fini_pre (bool do_fre)
|
|||
basic_block bb;
|
||||
unsigned int i;
|
||||
|
||||
VEC_free (tree_on_heap, inserted_exprs);
|
||||
VEC_free (tree, heap, inserted_exprs);
|
||||
bitmap_obstack_release (&grand_bitmap_obstack);
|
||||
free_alloc_pool (value_set_pool);
|
||||
free_alloc_pool (bitmap_set_pool);
|
||||
|
|
|
@ -143,7 +143,7 @@ static sbitmap bb_in_list;
|
|||
definition has changed. SSA edges are def-use edges in the SSA
|
||||
web. For each D-U edge, we store the target statement or PHI node
|
||||
U. */
|
||||
static GTY(()) VEC(tree) *interesting_ssa_edges;
|
||||
static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
|
||||
|
||||
/* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
|
||||
list of SSA edges is split into two. One contains all SSA edges
|
||||
|
@ -159,7 +159,7 @@ static GTY(()) VEC(tree) *interesting_ssa_edges;
|
|||
don't use a separate worklist for VARYING edges, we end up with
|
||||
situations where lattice values move from
|
||||
UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
|
||||
static GTY(()) VEC(tree) *varying_ssa_edges;
|
||||
static GTY(()) VEC(tree,gc) *varying_ssa_edges;
|
||||
|
||||
|
||||
/* Return true if the block worklist empty. */
|
||||
|
@ -244,9 +244,9 @@ add_ssa_edge (tree var, bool is_varying)
|
|||
{
|
||||
STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1;
|
||||
if (is_varying)
|
||||
VEC_safe_push (tree, varying_ssa_edges, use_stmt);
|
||||
VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt);
|
||||
else
|
||||
VEC_safe_push (tree, interesting_ssa_edges, use_stmt);
|
||||
VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ simulate_stmt (tree stmt)
|
|||
SSA edge is added to it in simulate_stmt. */
|
||||
|
||||
static void
|
||||
process_ssa_edge_worklist (VEC(tree) **worklist)
|
||||
process_ssa_edge_worklist (VEC(tree,gc) **worklist)
|
||||
{
|
||||
/* Drain the entire worklist. */
|
||||
while (VEC_length (tree, *worklist) > 0)
|
||||
|
@ -462,8 +462,8 @@ ssa_prop_init (void)
|
|||
size_t i;
|
||||
|
||||
/* Worklists of SSA edges. */
|
||||
interesting_ssa_edges = VEC_alloc (tree, 20);
|
||||
varying_ssa_edges = VEC_alloc (tree, 20);
|
||||
interesting_ssa_edges = VEC_alloc (tree, gc, 20);
|
||||
varying_ssa_edges = VEC_alloc (tree, gc, 20);
|
||||
|
||||
executable_blocks = sbitmap_alloc (last_basic_block);
|
||||
sbitmap_zero (executable_blocks);
|
||||
|
@ -506,8 +506,8 @@ ssa_prop_init (void)
|
|||
static void
|
||||
ssa_prop_fini (void)
|
||||
{
|
||||
VEC_free (tree, interesting_ssa_edges);
|
||||
VEC_free (tree, varying_ssa_edges);
|
||||
VEC_free (tree, gc, interesting_ssa_edges);
|
||||
VEC_free (tree, gc, varying_ssa_edges);
|
||||
cfg_blocks = NULL;
|
||||
sbitmap_free (bb_in_list);
|
||||
sbitmap_free (executable_blocks);
|
||||
|
|
|
@ -493,7 +493,8 @@ err:
|
|||
internal_error ("verify_flow_sensitive_alias_info failed.");
|
||||
}
|
||||
|
||||
DEF_VEC_MALLOC_P (bitmap);
|
||||
DEF_VEC_P (bitmap);
|
||||
DEF_VEC_ALLOC_P (bitmap,heap);
|
||||
|
||||
/* Verify that all name tags have different points to sets.
|
||||
This algorithm takes advantage of the fact that every variable with the
|
||||
|
@ -512,8 +513,8 @@ verify_name_tags (void)
|
|||
size_t i;
|
||||
size_t j;
|
||||
bitmap first, second;
|
||||
VEC (tree) *name_tag_reps = NULL;
|
||||
VEC (bitmap) *pt_vars_for_reps = NULL;
|
||||
VEC(tree,heap) *name_tag_reps = NULL;
|
||||
VEC(bitmap,heap) *pt_vars_for_reps = NULL;
|
||||
bitmap type_aliases = BITMAP_ALLOC (NULL);
|
||||
|
||||
/* First we compute the name tag representatives and their points-to sets. */
|
||||
|
@ -539,8 +540,8 @@ verify_name_tags (void)
|
|||
if (pi->pt_vars == NULL)
|
||||
continue;
|
||||
|
||||
VEC_safe_push (tree, name_tag_reps, ptr);
|
||||
VEC_safe_push (bitmap, pt_vars_for_reps, pi->pt_vars);
|
||||
VEC_safe_push (tree, heap, name_tag_reps, ptr);
|
||||
VEC_safe_push (bitmap, heap, pt_vars_for_reps, pi->pt_vars);
|
||||
|
||||
/* Verify that alias set of PTR's type tag is a superset of the
|
||||
alias set of PTR's name tag. */
|
||||
|
@ -605,7 +606,10 @@ verify_name_tags (void)
|
|||
}
|
||||
}
|
||||
|
||||
VEC_free (bitmap, pt_vars_for_reps);
|
||||
/* We do not have to free the bitmaps or trees in the vectors, as
|
||||
they are not owned by us. */
|
||||
VEC_free (bitmap, heap, pt_vars_for_reps);
|
||||
VEC_free (tree, heap, name_tag_reps);
|
||||
BITMAP_FREE (type_aliases);
|
||||
return;
|
||||
|
||||
|
|
10
gcc/tree.h
10
gcc/tree.h
|
@ -158,8 +158,10 @@ extern const unsigned char tree_code_length[];
|
|||
|
||||
extern const char *const tree_code_name[];
|
||||
|
||||
/* A garbage collected vector of trees. */
|
||||
DEF_VEC_GC_P(tree);
|
||||
/* A vectors of trees. */
|
||||
DEF_VEC_P(tree);
|
||||
DEF_VEC_ALLOC_P(tree,gc);
|
||||
DEF_VEC_ALLOC_P(tree,heap);
|
||||
|
||||
|
||||
/* Classify which part of the compiler has defined a given builtin function.
|
||||
|
@ -1853,13 +1855,13 @@ struct tree_binfo GTY (())
|
|||
tree vtable;
|
||||
tree virtuals;
|
||||
tree vptr_field;
|
||||
VEC(tree) *base_accesses;
|
||||
VEC(tree,gc) *base_accesses;
|
||||
tree inheritance;
|
||||
|
||||
tree vtt_subvtt;
|
||||
tree vtt_vptr;
|
||||
|
||||
VEC(tree) base_binfos;
|
||||
VEC(tree,none) base_binfos;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -182,7 +182,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
|
|||
hist->hvalue.rtl.insn = insn;
|
||||
hist->type = HIST_TYPE_POW2;
|
||||
hist->hdata.pow2.may_be_other = 1;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_safe_push (histogram_value, heap, *values, hist);
|
||||
}
|
||||
|
||||
/* Check whether the divisor is not in fact a constant. */
|
||||
|
@ -194,7 +194,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
|
|||
hist->hvalue.rtl.seq = NULL_RTX;
|
||||
hist->hvalue.rtl.insn = insn;
|
||||
hist->type = HIST_TYPE_SINGLE_VALUE;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_safe_push (histogram_value, heap, *values, hist);
|
||||
}
|
||||
|
||||
/* For mod, check whether it is not often a noop (or replaceable by
|
||||
|
@ -214,7 +214,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
|
|||
hist->type = HIST_TYPE_INTERVAL;
|
||||
hist->hdata.intvl.int_start = 0;
|
||||
hist->hdata.intvl.steps = 2;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_safe_push (histogram_value, heap, *values, hist);
|
||||
}
|
||||
return;
|
||||
|
||||
|
@ -305,7 +305,7 @@ insn_prefetch_values_to_profile (rtx insn, histogram_values* values)
|
|||
hist->hvalue.rtl.seq = NULL_RTX;
|
||||
hist->hvalue.rtl.insn = insn;
|
||||
hist->type = HIST_TYPE_CONST_DELTA;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_safe_push (histogram_value, heap, *values, hist);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -330,19 +330,18 @@ rtl_find_values_to_profile (histogram_values *values)
|
|||
{
|
||||
rtx insn;
|
||||
unsigned i, libcall_level;
|
||||
histogram_value hist;
|
||||
|
||||
life_analysis (NULL, PROP_DEATH_NOTES);
|
||||
|
||||
*values = VEC_alloc (histogram_value, 0);
|
||||
*values = NULL;
|
||||
libcall_level = 0;
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
rtl_values_to_profile (insn, values);
|
||||
static_values = *values;
|
||||
|
||||
for (i = 0; i < VEC_length (histogram_value, *values); i++)
|
||||
for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
|
||||
{
|
||||
histogram_value hist = VEC_index (histogram_value, *values, i);
|
||||
|
||||
switch (hist->type)
|
||||
{
|
||||
case HIST_TYPE_INTERVAL:
|
||||
|
@ -1665,6 +1664,8 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
|
|||
op1 = TREE_OPERAND (op, 0);
|
||||
op2 = TREE_OPERAND (op, 1);
|
||||
|
||||
VEC_reserve (histogram_value, heap, *values, 3);
|
||||
|
||||
/* Check for a special case where the divisor is power(s) of 2.
|
||||
This is more aggressive than the RTL version, under the
|
||||
assumption that later phases will reduce / or % by power of 2
|
||||
|
@ -1676,7 +1677,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
|
|||
hist->hvalue.tree.stmt = stmt;
|
||||
hist->type = HIST_TYPE_POW2;
|
||||
hist->hdata.pow2.may_be_other = 1;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_quick_push (histogram_value, *values, hist);
|
||||
}
|
||||
|
||||
/* Check for the case where the divisor is the same value most
|
||||
|
@ -1687,7 +1688,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
|
|||
hist->hvalue.tree.value = op2;
|
||||
hist->hvalue.tree.stmt = stmt;
|
||||
hist->type = HIST_TYPE_SINGLE_VALUE;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_quick_push (histogram_value, *values, hist);
|
||||
}
|
||||
|
||||
/* For mod, check whether it is not often a noop (or replaceable by
|
||||
|
@ -1700,7 +1701,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
|
|||
hist->type = HIST_TYPE_INTERVAL;
|
||||
hist->hdata.intvl.int_start = 0;
|
||||
hist->hdata.intvl.steps = 2;
|
||||
VEC_safe_push (histogram_value, *values, hist);
|
||||
VEC_quick_push (histogram_value, *values, hist);
|
||||
}
|
||||
return;
|
||||
|
||||
|
@ -1725,8 +1726,9 @@ tree_find_values_to_profile (histogram_values *values)
|
|||
block_stmt_iterator bsi;
|
||||
tree stmt;
|
||||
unsigned int i;
|
||||
|
||||
*values = VEC_alloc (histogram_value, 0);
|
||||
histogram_value hist;
|
||||
|
||||
*values = NULL;
|
||||
FOR_EACH_BB (bb)
|
||||
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
|
||||
{
|
||||
|
@ -1735,10 +1737,8 @@ tree_find_values_to_profile (histogram_values *values)
|
|||
}
|
||||
static_values = *values;
|
||||
|
||||
for (i = 0; i < VEC_length (histogram_value, *values); i++)
|
||||
for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
|
||||
{
|
||||
histogram_value hist = VEC_index (histogram_value, *values, i);
|
||||
|
||||
switch (hist->type)
|
||||
{
|
||||
case HIST_TYPE_INTERVAL:
|
||||
|
@ -1817,6 +1817,6 @@ bool
|
|||
value_profile_transformations (void)
|
||||
{
|
||||
bool retval = (value_prof_hooks->value_profile_transformations) ();
|
||||
VEC_free (histogram_value, static_values);
|
||||
VEC_free (histogram_value, heap, static_values);
|
||||
return retval;
|
||||
}
|
||||
|
|
|
@ -75,9 +75,10 @@ struct histogram_value_t
|
|||
|
||||
typedef struct histogram_value_t *histogram_value;
|
||||
|
||||
DEF_VEC_MALLOC_P(histogram_value);
|
||||
DEF_VEC_P(histogram_value);
|
||||
DEF_VEC_ALLOC_P(histogram_value,heap);
|
||||
|
||||
typedef VEC(histogram_value) *histogram_values;
|
||||
typedef VEC(histogram_value,heap) *histogram_values;
|
||||
|
||||
/* Hooks registration. */
|
||||
extern void rtl_register_value_prof_hooks (void);
|
||||
|
|
21
gcc/varasm.c
21
gcc/varasm.c
|
@ -4426,17 +4426,17 @@ globalize_decl (tree decl)
|
|||
of an alias. This requires that the decl have been defined. Aliases
|
||||
that precede their definition have to be queued for later processing. */
|
||||
|
||||
struct alias_pair GTY(())
|
||||
typedef struct alias_pair GTY(())
|
||||
{
|
||||
tree decl;
|
||||
tree target;
|
||||
};
|
||||
typedef struct alias_pair *alias_pair;
|
||||
} alias_pair;
|
||||
|
||||
/* Define gc'd vector type. */
|
||||
DEF_VEC_GC_P(alias_pair);
|
||||
DEF_VEC_O(alias_pair);
|
||||
DEF_VEC_ALLOC_O(alias_pair,gc);
|
||||
|
||||
static GTY(()) VEC(alias_pair) *alias_pairs;
|
||||
static GTY(()) VEC(alias_pair,gc) *alias_pairs;
|
||||
|
||||
/* Given an assembly name, find the decl it is associated with. At the
|
||||
same time, mark it needed for cgraph. */
|
||||
|
@ -4534,7 +4534,7 @@ void
|
|||
finish_aliases_1 (void)
|
||||
{
|
||||
unsigned i;
|
||||
alias_pair p;
|
||||
alias_pair *p;
|
||||
|
||||
for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
|
||||
{
|
||||
|
@ -4558,12 +4558,12 @@ void
|
|||
finish_aliases_2 (void)
|
||||
{
|
||||
unsigned i;
|
||||
alias_pair p;
|
||||
alias_pair *p;
|
||||
|
||||
for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
|
||||
do_assemble_alias (p->decl, p->target);
|
||||
|
||||
alias_pairs = NULL;
|
||||
VEC_truncate (alias_pair, alias_pairs, 0);
|
||||
}
|
||||
|
||||
/* Emit an assembler directive to make the symbol for DECL an alias to
|
||||
|
@ -4610,12 +4610,9 @@ assemble_alias (tree decl, tree target)
|
|||
do_assemble_alias (decl, target);
|
||||
else
|
||||
{
|
||||
alias_pair p;
|
||||
|
||||
p = ggc_alloc (sizeof (struct alias_pair));
|
||||
alias_pair *p = VEC_safe_push (alias_pair, gc, alias_pairs, NULL);
|
||||
p->decl = decl;
|
||||
p->target = target;
|
||||
VEC_safe_push (alias_pair, alias_pairs, p);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
111
gcc/vec.c
111
gcc/vec.c
|
@ -34,9 +34,54 @@ struct vec_prefix
|
|||
void *vec[1];
|
||||
};
|
||||
|
||||
/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
|
||||
0. If RESERVE < 0 increase the current allocation exponentially.
|
||||
VEC can be NULL, to create a new vector. */
|
||||
/* Calculate the new ALLOC value, making sure that abs(RESERVE) slots
|
||||
are free. If RESERVE < 0 grow exactly, otherwise grow
|
||||
exponentially. */
|
||||
|
||||
static inline unsigned
|
||||
calculate_allocation (const struct vec_prefix *pfx, int reserve)
|
||||
{
|
||||
unsigned alloc = 0;
|
||||
unsigned num = 0;
|
||||
|
||||
if (pfx)
|
||||
{
|
||||
alloc = pfx->alloc;
|
||||
num = pfx->num;
|
||||
}
|
||||
else if (!reserve)
|
||||
/* If there's no prefix, and we've not requested anything, then we
|
||||
will create a NULL vector. */
|
||||
return 0;
|
||||
|
||||
/* We must have run out of room. */
|
||||
gcc_assert (alloc - num < (unsigned)(reserve < 0 ? -reserve : reserve));
|
||||
|
||||
if (reserve < 0)
|
||||
/* Exact size. */
|
||||
alloc = num + -reserve;
|
||||
else
|
||||
{
|
||||
/* Exponential growth. */
|
||||
if (!alloc)
|
||||
alloc = 4;
|
||||
else if (alloc < 16)
|
||||
/* Double when small. */
|
||||
alloc = alloc * 2;
|
||||
else
|
||||
/* Grow slower when large. */
|
||||
alloc = (alloc * 3 / 2);
|
||||
|
||||
/* If this is still too small, set it to the right size. */
|
||||
if (alloc < num + reserve)
|
||||
alloc = num + reserve;
|
||||
}
|
||||
return alloc;
|
||||
}
|
||||
|
||||
/* Ensure there are at least abs(RESERVE) free slots in VEC. If
|
||||
RESERVE < 0 grow exactly, else grow exponentially. As a special
|
||||
case, if VEC is NULL, and RESERVE is 0, no vector will be created. */
|
||||
|
||||
void *
|
||||
vec_gc_p_reserve (void *vec, int reserve MEM_STAT_DECL)
|
||||
|
@ -46,27 +91,19 @@ vec_gc_p_reserve (void *vec, int reserve MEM_STAT_DECL)
|
|||
PASS_MEM_STAT);
|
||||
}
|
||||
|
||||
/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
|
||||
0. If RESERVE < 0, increase the current allocation exponentially.
|
||||
VEC can be NULL, in which case a new vector is created. The
|
||||
vector's trailing array is at VEC_OFFSET offset and consists of
|
||||
ELT_SIZE sized elements. */
|
||||
/* As vec_gc_p_reserve, but for object vectors. The vector's trailing
|
||||
array is at VEC_OFFSET offset and consists of ELT_SIZE sized
|
||||
elements. */
|
||||
|
||||
void *
|
||||
vec_gc_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
|
||||
MEM_STAT_DECL)
|
||||
{
|
||||
struct vec_prefix *pfx = vec;
|
||||
unsigned alloc = pfx ? pfx->num : 0;
|
||||
|
||||
if (reserve >= 0)
|
||||
alloc += reserve;
|
||||
else if (alloc)
|
||||
alloc *= 2;
|
||||
else
|
||||
alloc = 4;
|
||||
|
||||
gcc_assert (!pfx || pfx->alloc < alloc);
|
||||
unsigned alloc = alloc = calculate_allocation (pfx, reserve);
|
||||
|
||||
if (!alloc)
|
||||
return NULL;
|
||||
|
||||
vec = ggc_realloc_stat (vec, vec_offset + alloc * elt_size PASS_MEM_STAT);
|
||||
((struct vec_prefix *)vec)->alloc = alloc;
|
||||
|
@ -76,17 +113,7 @@ vec_gc_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
|
|||
return vec;
|
||||
}
|
||||
|
||||
/* Explicitly release a vector. */
|
||||
|
||||
void
|
||||
vec_gc_free (void *vec)
|
||||
{
|
||||
ggc_free (vec);
|
||||
}
|
||||
|
||||
/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
|
||||
0. If RESERVE < 0 increase the current allocation exponentially.
|
||||
VEC can be NULL, to create a new vector. */
|
||||
/* As for vec_gc_p_reserve, but for heap allocated vectors. */
|
||||
|
||||
void *
|
||||
vec_heap_p_reserve (void *vec, int reserve MEM_STAT_DECL)
|
||||
|
@ -96,27 +123,17 @@ vec_heap_p_reserve (void *vec, int reserve MEM_STAT_DECL)
|
|||
PASS_MEM_STAT);
|
||||
}
|
||||
|
||||
/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
|
||||
0. If RESERVE < 0, increase the current allocation exponentially.
|
||||
VEC can be NULL, in which case a new vector is created. The
|
||||
vector's trailing array is at VEC_OFFSET offset and consists of
|
||||
ELT_SIZE sized elements. */
|
||||
/* As for vec_gc_o_reserve, but for heap allocated vectors. */
|
||||
|
||||
void *
|
||||
vec_heap_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
|
||||
MEM_STAT_DECL)
|
||||
{
|
||||
struct vec_prefix *pfx = vec;
|
||||
unsigned alloc = pfx ? pfx->num : 0;
|
||||
unsigned alloc = calculate_allocation (pfx, reserve);
|
||||
|
||||
if (reserve >= 0)
|
||||
alloc += reserve;
|
||||
else if (alloc)
|
||||
alloc *= 2;
|
||||
else
|
||||
alloc = 4;
|
||||
|
||||
gcc_assert (!pfx || pfx->alloc < alloc);
|
||||
if (!alloc)
|
||||
return NULL;
|
||||
|
||||
vec = xrealloc (vec, vec_offset + alloc * elt_size);
|
||||
((struct vec_prefix *)vec)->alloc = alloc;
|
||||
|
@ -126,14 +143,6 @@ vec_heap_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
|
|||
return vec;
|
||||
}
|
||||
|
||||
/* Explicitly release a vector. */
|
||||
|
||||
void
|
||||
vec_heap_free (void *vec)
|
||||
{
|
||||
free (vec);
|
||||
}
|
||||
|
||||
#if ENABLE_CHECKING
|
||||
/* Issue a vector domain error, and then fall over. */
|
||||
|
||||
|
|
Loading…
Reference in New Issue