Adjust by-value function vec arguments to by-reference.

gcc/c-family/ChangeLog:

	* c-common.c (c_build_shufflevector): Adjust by-value argument to
	by-const-reference.
	* c-common.h (c_build_shufflevector): Same.

gcc/c/ChangeLog:

	* c-tree.h (c_build_function_call_vec): Adjust by-value argument to
	by-const-reference.
	* c-typeck.c (c_build_function_call_vec): Same.

gcc/ChangeLog:

	* cfgloop.h (single_likely_exit): Adjust by-value argument to
	by-const-reference.
	* cfgloopanal.c (single_likely_exit): Same.
	* cgraph.h (struct cgraph_node): Same.
	* cgraphclones.c (cgraph_node::create_virtual_clone): Same.
	* genautomata.c (merge_states): Same.
	* genextract.c (VEC_char_to_string): Same.
	* genmatch.c (dt_node::gen_kids_1): Same.
	(walk_captures): Adjust by-value argument to by-reference.
	* gimple-ssa-store-merging.c (check_no_overlap): Adjust by-value argument
	to by-const-reference.
	* gimple.c (gimple_build_call_vec): Same.
	(gimple_build_call_internal_vec): Same.
	(gimple_build_switch): Same.
	(sort_case_labels): Same.
	(preprocess_case_label_vec_for_gimple): Adjust by-value argument to
	by-reference.
	* gimple.h (gimple_build_call_vec): Adjust by-value argument to
	by-const-reference.
	(gimple_build_call_internal_vec): Same.
	(gimple_build_switch): Same.
	(sort_case_labels): Same.
	(preprocess_case_label_vec_for_gimple): Adjust by-value argument to
	by-reference.
	* haifa-sched.c (calc_priorities): Adjust by-value argument to
	by-const-reference.
	(sched_init_luids): Same.
	(haifa_init_h_i_d): Same.
	* ipa-cp.c (ipa_get_indirect_edge_target_1): Same.
	(adjust_callers_for_value_intersection): Adjust by-value argument to
	by-reference.
	(find_more_scalar_values_for_callers_subset): Adjust by-value argument to
	by-const-reference.
	(find_more_contexts_for_caller_subset): Same.
	(find_aggregate_values_for_callers_subset): Same.
	(copy_useful_known_contexts): Same.
	* ipa-fnsummary.c (remap_edge_summaries): Same.
	(remap_freqcounting_predicate): Same.
	* ipa-inline.c (add_new_edges_to_heap): Adjust by-value argument to
	by-reference.
	* ipa-predicate.c (predicate::remap_after_inlining): Adjust by-value argument
	to by-const-reference.
	* ipa-predicate.h (predicate::remap_after_inlining): Same.
	* ipa-prop.c (ipa_find_agg_cst_for_param): Same.
	* ipa-prop.h (ipa_find_agg_cst_for_param): Same.
	* ira-build.c (ira_loop_tree_body_rev_postorder): Same.
	* read-rtl.c (add_overload_instance): Same.
	* rtl.h (native_decode_rtx): Same.
	(native_decode_vector_rtx): Same.
	* sched-int.h (sched_init_luids): Same.
	(haifa_init_h_i_d): Same.
	* simplify-rtx.c (native_decode_vector_rtx): Same.
	(native_decode_rtx): Same.
	* tree-call-cdce.c (gen_shrink_wrap_conditions): Same.
	(shrink_wrap_one_built_in_call_with_conds): Same.
	(shrink_wrap_conditional_dead_built_in_calls): Same.
	* tree-data-ref.c (create_runtime_alias_checks): Same.
	(compute_all_dependences): Same.
	* tree-data-ref.h (compute_all_dependences): Same.
	(create_runtime_alias_checks): Same.
	(index_in_loop_nest): Same.
	* tree-if-conv.c (mask_exists): Same.
	* tree-loop-distribution.c (class loop_distribution): Same.
	(loop_distribution::create_rdg_vertices): Same.
	(dump_rdg_partitions): Same.
	(debug_rdg_partitions): Same.
	(partition_contains_all_rw): Same.
	(loop_distribution::distribute_loop): Same.
	* tree-parloops.c (oacc_entry_exit_ok_1): Same.
	(oacc_entry_exit_single_gang): Same.
	* tree-ssa-loop-im.c (hoist_memory_references): Same.
	(loop_suitable_for_sm): Same.
	* tree-ssa-loop-niter.c (bound_index): Same.
	* tree-ssa-reassoc.c (update_ops): Same.
	(swap_ops_for_binary_stmt): Same.
	(rewrite_expr_tree): Same.
	(rewrite_expr_tree_parallel): Same.
	* tree-ssa-sccvn.c (ao_ref_init_from_vn_reference): Same.
	* tree-ssa-sccvn.h (ao_ref_init_from_vn_reference): Same.
	* tree-ssa-structalias.c (process_all_all_constraints): Same.
	(make_constraints_to): Same.
	(handle_lhs_call): Same.
	(find_func_aliases_for_builtin_call): Same.
	(sort_fieldstack): Same.
	(check_for_overlaps): Same.
	* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Same.
	(vect_create_cond_for_unequal_addrs): Same.
	(vect_create_cond_for_lower_bounds): Same.
	(vect_create_cond_for_alias_checks): Same.
	* tree-vect-slp-patterns.c (vect_validate_multiplication): Same.
	* tree-vect-slp.c (vect_analyze_slp_instance): Same.
	(vect_make_slp_decision): Same.
	(vect_slp_bbs): Same.
	(duplicate_and_interleave): Same.
	(vect_transform_slp_perm_load): Same.
	(vect_schedule_slp): Same.
	* tree-vectorizer.h (vect_transform_slp_perm_load): Same.
	(vect_schedule_slp): Same.
	(duplicate_and_interleave): Same.
	* tree.c (build_vector_from_ctor): Same.
	(build_vector): Same.
	(check_vector_cst): Same.
	(check_vector_cst_duplicate): Same.
	(check_vector_cst_fill): Same.
	(check_vector_cst_stepped): Same.
	* tree.h (build_vector_from_ctor): Same.
This commit is contained in:
Martin Sebor 2021-07-20 11:14:19 -06:00
parent 7fcb33455c
commit 00dcc88a0e
45 changed files with 146 additions and 127 deletions

View File

@ -1115,8 +1115,8 @@ c_build_vec_perm_expr (location_t loc, tree v0, tree v1, tree mask,
and have vector types, V0 has the same element type as V1, and the
number of elements the result is that of MASK. */
tree
c_build_shufflevector (location_t loc, tree v0, tree v1, vec<tree> mask,
bool complain)
c_build_shufflevector (location_t loc, tree v0, tree v1,
const vec<tree> &mask, bool complain)
{
tree ret;
bool wrap = true;

View File

@ -1049,7 +1049,7 @@ extern bool vector_targets_convertible_p (const_tree t1, const_tree t2);
extern bool vector_types_convertible_p (const_tree t1, const_tree t2, bool emit_lax_note);
extern tree c_build_vec_perm_expr (location_t, tree, tree, tree, bool = true);
extern tree c_build_shufflevector (location_t, tree, tree,
vec<tree>, bool = true);
const vec<tree> &, bool = true);
extern tree c_build_vec_convert (location_t, tree, location_t, tree, bool = true);
extern void init_c_lex (void);

View File

@ -759,8 +759,9 @@ extern tree c_finish_omp_clauses (tree, enum c_omp_region_type);
extern tree c_build_va_arg (location_t, tree, location_t, tree);
extern tree c_finish_transaction (location_t, tree, int);
extern bool c_tree_equal (tree, tree);
extern tree c_build_function_call_vec (location_t, vec<location_t>, tree,
vec<tree, va_gc> *, vec<tree, va_gc> *);
extern tree c_build_function_call_vec (location_t, const vec<location_t>&,
tree, vec<tree, va_gc> *,
vec<tree, va_gc> *);
extern tree c_omp_clause_copy_ctor (tree, tree, tree);
/* Set to 0 at beginning of a function definition, set to 1 if

View File

@ -3240,7 +3240,7 @@ build_function_call_vec (location_t loc, vec<location_t> arg_loc,
/* Like build_function_call_vec, but call also resolve_overloaded_builtin. */
tree
c_build_function_call_vec (location_t loc, vec<location_t> arg_loc,
c_build_function_call_vec (location_t loc, const vec<location_t> &arg_loc,
tree function, vec<tree, va_gc> *params,
vec<tree, va_gc> *origtypes)
{

View File

@ -385,7 +385,7 @@ extern basic_block *get_loop_body_in_custom_order (const class loop *, void *,
extern auto_vec<edge> get_loop_exit_edges (const class loop *, basic_block * = NULL);
extern edge single_exit (const class loop *);
extern edge single_likely_exit (class loop *loop, vec<edge>);
extern edge single_likely_exit (class loop *loop, const vec<edge> &);
extern unsigned num_loop_branches (const class loop *);
extern edge loop_preheader_edge (const class loop *);

View File

@ -470,7 +470,7 @@ mark_loop_exit_edges (void)
to noreturn call. */
edge
single_likely_exit (class loop *loop, vec<edge> exits)
single_likely_exit (class loop *loop, const vec<edge> &exits)
{
edge found = single_exit (loop);
unsigned i;

View File

@ -949,7 +949,7 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
/* Create callgraph node clone with new declaration. The actual body will be
copied later at compilation stage. The name of the new clone will be
constructed from the name of the original node, SUFFIX and NUM_SUFFIX. */
cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
cgraph_node *create_virtual_clone (const vec<cgraph_edge *> &redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map,
ipa_param_adjustments *param_adjustments,
const char * suffix, unsigned num_suffix);

View File

@ -564,7 +564,7 @@ clone_function_name (tree decl, const char *suffix)
bitmap interface.
*/
cgraph_node *
cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
cgraph_node::create_virtual_clone (const vec<cgraph_edge *> &redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map,
ipa_param_adjustments *param_adjustments,
const char * suffix, unsigned num_suffix)

View File

@ -6137,7 +6137,7 @@ evaluate_equiv_classes (automaton_t automaton, vec<state_t> *equiv_classes)
/* The function merges equivalent states of AUTOMATON. */
static void
merge_states (automaton_t automaton, vec<state_t> equiv_classes)
merge_states (automaton_t automaton, const vec<state_t> &equiv_classes)
{
state_t curr_state;
state_t new_state;

View File

@ -214,7 +214,7 @@ VEC_safe_set_locstr (md_rtx_info *info, vec<locstr> *vp,
/* Another helper subroutine of walk_rtx: given a vec<char>, convert it
to a NUL-terminated string in malloc memory. */
static char *
VEC_char_to_string (vec<char> v)
VEC_char_to_string (const vec<char> &v)
{
size_t n = v.length ();
char *s = XNEWVEC (char, n + 1);

View File

@ -1632,8 +1632,9 @@ public:
void gen_kids (FILE *, int, bool, int);
void gen_kids_1 (FILE *, int, bool, int,
vec<dt_operand *>, vec<dt_operand *>, vec<dt_operand *>,
vec<dt_operand *>, vec<dt_operand *>, vec<dt_node *>);
const vec<dt_operand *> &, const vec<dt_operand *> &,
const vec<dt_operand *> &, const vec<dt_operand *> &,
const vec<dt_operand *> &, const vec<dt_node *> &);
void analyze (sinfo_map_t &);
};
@ -2983,12 +2984,12 @@ dt_node::gen_kids (FILE *f, int indent, bool gimple, int depth)
void
dt_node::gen_kids_1 (FILE *f, int indent, bool gimple, int depth,
vec<dt_operand *> gimple_exprs,
vec<dt_operand *> generic_exprs,
vec<dt_operand *> fns,
vec<dt_operand *> generic_fns,
vec<dt_operand *> preds,
vec<dt_node *> others)
const vec<dt_operand *> &gimple_exprs,
const vec<dt_operand *> &generic_exprs,
const vec<dt_operand *> &fns,
const vec<dt_operand *> &generic_fns,
const vec<dt_operand *> &preds,
const vec<dt_node *> &others)
{
char buf[128];
char *kid_opname = buf;
@ -5031,7 +5032,7 @@ parser::parse_pattern ()
recursively. */
static void
walk_captures (operand *op, vec<vec<capture *> > cpts)
walk_captures (operand *op, vec<vec<capture *> > &cpts)
{
if (! op)
return;

View File

@ -2654,7 +2654,8 @@ gather_bswap_load_refs (vec<tree> *refs, tree val)
go after the = _5 store and thus change behavior. */
static bool
check_no_overlap (vec<store_immediate_info *> m_store_info, unsigned int i,
check_no_overlap (const vec<store_immediate_info *> &m_store_info,
unsigned int i,
bool all_integer_cst_p, unsigned int first_order,
unsigned int last_order, unsigned HOST_WIDE_INT start,
unsigned HOST_WIDE_INT end, unsigned int first_earlier,

View File

@ -241,7 +241,7 @@ gimple_build_call_1 (tree fn, unsigned nargs)
specified in vector ARGS. */
gcall *
gimple_build_call_vec (tree fn, vec<tree> args)
gimple_build_call_vec (tree fn, const vec<tree> &args)
{
unsigned i;
unsigned nargs = args.length ();
@ -338,7 +338,7 @@ gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
specified in vector ARGS. */
gcall *
gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
gimple_build_call_internal_vec (enum internal_fn fn, const vec<tree> &args)
{
unsigned i, nargs;
gcall *call;
@ -802,7 +802,7 @@ gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
ARGS is a vector of labels excluding the default. */
gswitch *
gimple_build_switch (tree index, tree default_label, vec<tree> args)
gimple_build_switch (tree index, tree default_label, const vec<tree> &args)
{
unsigned i, nlabels = args.length ();
@ -3051,7 +3051,7 @@ compare_case_labels (const void *p1, const void *p2)
/* Sort the case labels in LABEL_VEC in place in ascending order. */
void
sort_case_labels (vec<tree> label_vec)
sort_case_labels (vec<tree> &label_vec)
{
label_vec.qsort (compare_case_labels);
}
@ -3076,7 +3076,7 @@ sort_case_labels (vec<tree> label_vec)
found or not. */
void
preprocess_case_label_vec_for_gimple (vec<tree> labels,
preprocess_case_label_vec_for_gimple (vec<tree> &labels,
tree index_type,
tree *default_casep)
{

View File

@ -1516,11 +1516,11 @@ void gimple_init (gimple *g, enum gimple_code code, unsigned num_ops);
gimple *gimple_alloc (enum gimple_code, unsigned CXX_MEM_STAT_INFO);
greturn *gimple_build_return (tree);
void gimple_call_reset_alias_info (gcall *);
gcall *gimple_build_call_vec (tree, vec<tree> );
gcall *gimple_build_call_vec (tree, const vec<tree> &);
gcall *gimple_build_call (tree, unsigned, ...);
gcall *gimple_build_call_valist (tree, unsigned, va_list);
gcall *gimple_build_call_internal (enum internal_fn, unsigned, ...);
gcall *gimple_build_call_internal_vec (enum internal_fn, vec<tree> );
gcall *gimple_build_call_internal_vec (enum internal_fn, const vec<tree> &);
gcall *gimple_build_call_from_tree (tree, tree);
gassign *gimple_build_assign (tree, tree CXX_MEM_STAT_INFO);
gassign *gimple_build_assign (tree, enum tree_code,
@ -1547,7 +1547,7 @@ gtry *gimple_build_try (gimple_seq, gimple_seq,
gimple *gimple_build_wce (gimple_seq);
gresx *gimple_build_resx (int);
gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
gswitch *gimple_build_switch (tree, tree, vec<tree> );
gswitch *gimple_build_switch (tree, tree, const vec<tree> &);
geh_dispatch *gimple_build_eh_dispatch (int);
gdebug *gimple_build_debug_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
gdebug *gimple_build_debug_source_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
@ -1626,8 +1626,8 @@ extern bool nonbarrier_call_p (gimple *);
extern bool infer_nonnull_range (gimple *, tree);
extern bool infer_nonnull_range_by_dereference (gimple *, tree);
extern bool infer_nonnull_range_by_attribute (gimple *, tree);
extern void sort_case_labels (vec<tree>);
extern void preprocess_case_label_vec_for_gimple (vec<tree>, tree, tree *);
extern void sort_case_labels (vec<tree> &);
extern void preprocess_case_label_vec_for_gimple (vec<tree> &, tree, tree *);
extern void gimple_seq_set_location (gimple_seq, location_t);
extern void gimple_seq_discard (gimple_seq);
extern void maybe_remove_unused_call_args (struct function *, gimple *);

View File

@ -891,7 +891,7 @@ static void move_block_after_check (rtx_insn *);
static void move_succs (vec<edge, va_gc> **, basic_block);
static void sched_remove_insn (rtx_insn *);
static void clear_priorities (rtx_insn *, rtx_vec_t *);
static void calc_priorities (rtx_vec_t);
static void calc_priorities (const rtx_vec_t &);
static void add_jump_dependencies (rtx_insn *, rtx_insn *);
#endif /* INSN_SCHEDULING */
@ -8923,7 +8923,7 @@ clear_priorities (rtx_insn *insn, rtx_vec_t *roots_ptr)
changed. ROOTS is a vector of instructions whose priority computation will
trigger initialization of all cleared priorities. */
static void
calc_priorities (rtx_vec_t roots)
calc_priorities (const rtx_vec_t &roots)
{
int i;
rtx_insn *insn;
@ -8988,7 +8988,7 @@ sched_init_insn_luid (rtx_insn *insn)
The hook common_sched_info->luid_for_non_insn () is used to determine
if notes, labels, etc. need luids. */
void
sched_init_luids (bb_vec_t bbs)
sched_init_luids (const bb_vec_t &bbs)
{
int i;
basic_block bb;
@ -9062,7 +9062,7 @@ init_h_i_d (rtx_insn *insn)
/* Initialize haifa_insn_data for BBS. */
void
haifa_init_h_i_d (bb_vec_t bbs)
haifa_init_h_i_d (const bb_vec_t &bbs)
{
int i;
basic_block bb;

View File

@ -2946,9 +2946,9 @@ propagate_constants_across_call (struct cgraph_edge *cs)
static tree
ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
vec<tree> known_csts,
vec<ipa_polymorphic_call_context> known_contexts,
vec<ipa_agg_value_set> known_aggs,
const vec<tree> &known_csts,
const vec<ipa_polymorphic_call_context> &known_contexts,
const vec<ipa_agg_value_set> &known_aggs,
struct ipa_agg_replacement_value *agg_reps,
bool *speculative)
{
@ -2985,7 +2985,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
}
if (!t)
{
struct ipa_agg_value_set *agg;
const ipa_agg_value_set *agg;
if (known_aggs.length () > (unsigned int) param_index)
agg = &known_aggs[param_index];
else
@ -3045,7 +3045,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
if (!t && known_aggs.length () > (unsigned int) param_index
&& !ie->indirect_info->by_ref)
{
struct ipa_agg_value_set *agg = &known_aggs[param_index];
const ipa_agg_value_set *agg = &known_aggs[param_index];
t = ipa_find_agg_cst_for_param (agg,
(unsigned) param_index
< known_csts.length ()
@ -4267,7 +4267,7 @@ get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
this kind of adjustment is possible. */
static bool
adjust_callers_for_value_intersection (vec<cgraph_edge *> callers,
adjust_callers_for_value_intersection (vec<cgraph_edge *> &callers,
cgraph_node *node)
{
for (unsigned i = 0; i < callers.length (); i++)
@ -4725,8 +4725,8 @@ self_recursive_agg_pass_through_p (cgraph_edge *cs, ipa_agg_jf_item *jfunc,
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
vec<tree> known_csts,
vec<cgraph_edge *> callers)
vec<tree> &known_csts,
const vec<cgraph_edge *> &callers)
{
ipa_node_params *info = ipa_node_params_sum->get (node);
int i, count = ipa_get_param_count (info);
@ -4818,7 +4818,7 @@ static void
find_more_contexts_for_caller_subset (cgraph_node *node,
vec<ipa_polymorphic_call_context>
*known_contexts,
vec<cgraph_edge *> callers)
const vec<cgraph_edge *> &callers)
{
ipa_node_params *info = ipa_node_params_sum->get (node);
int i, count = ipa_get_param_count (info);
@ -5179,7 +5179,7 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
vec<cgraph_edge *> callers)
const vec<cgraph_edge *> &callers)
{
ipa_node_params *dest_info = ipa_node_params_sum->get (node);
struct ipa_agg_replacement_value *res;
@ -5413,7 +5413,7 @@ known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
/* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
static vec<ipa_polymorphic_call_context>
copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
copy_useful_known_contexts (const vec<ipa_polymorphic_call_context> &known_contexts)
{
if (known_contexts_useful_p (known_contexts))
return known_contexts.copy ();

View File

@ -3967,8 +3967,8 @@ remap_edge_summaries (struct cgraph_edge *inlined_edge,
class ipa_fn_summary *info,
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
vec<int> operand_map,
vec<HOST_WIDE_INT> offset_map,
const vec<int> &operand_map,
const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
predicate *toplev_predicate)
{
@ -4028,8 +4028,8 @@ remap_freqcounting_predicate (class ipa_fn_summary *info,
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
vec<ipa_freqcounting_predicate, va_gc> *v,
vec<int> operand_map,
vec<HOST_WIDE_INT> offset_map,
const vec<int> &operand_map,
const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
predicate *toplev_predicate)

View File

@ -1774,7 +1774,7 @@ compute_max_insns (cgraph_node *node, int insns)
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
add_new_edges_to_heap (edge_heap_t *heap, vec<cgraph_edge *> new_edges)
add_new_edges_to_heap (edge_heap_t *heap, vec<cgraph_edge *> &new_edges)
{
while (new_edges.length () > 0)
{

View File

@ -507,8 +507,8 @@ predicate
predicate::remap_after_inlining (class ipa_fn_summary *info,
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
vec<int> operand_map,
vec<HOST_WIDE_INT> offset_map,
const vec<int> &operand_map,
const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
const predicate &toplev_predicate)
{

View File

@ -243,7 +243,7 @@ public:
predicate remap_after_inlining (class ipa_fn_summary *,
class ipa_node_params *params_summary,
class ipa_fn_summary *,
vec<int>, vec<HOST_WIDE_INT>,
const vec<int> &, const vec<HOST_WIDE_INT> &,
clause_t, const predicate &);
void stream_in (class lto_input_block *);

View File

@ -3562,7 +3562,7 @@ ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
initializer of a constant. */
tree
ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
HOST_WIDE_INT offset, bool by_ref,
bool *from_global_constant)
{

View File

@ -1092,7 +1092,7 @@ ipa_bits *ipa_get_ipa_bits_for_value (const widest_int &value,
void ipa_analyze_node (struct cgraph_node *);
/* Aggregate jump function related functions. */
tree ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
tree ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
HOST_WIDE_INT offset, bool by_ref,
bool *from_global_constant = NULL);
bool ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,

View File

@ -1672,7 +1672,7 @@ finish_cost_vectors (void)
static vec<ira_loop_tree_node_t>
ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
vec<ira_loop_tree_node_t> loop_preorder)
const vec<ira_loop_tree_node_t> &loop_preorder)
{
vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
unsigned int n_loop_preorder;

View File

@ -835,7 +835,7 @@ md_reader::handle_overloaded_name (rtx original, vec<mapping *> *iterators)
gives the iterator associated with argument I of ONAME. */
static void
add_overload_instance (overloaded_name *oname, vec<mapping *> iterators, rtx x)
add_overload_instance (overloaded_name *oname, const vec<mapping *> &iterators, rtx x)
{
/* Create the instance. */
overloaded_instance *instance = new overloaded_instance;

View File

@ -2416,9 +2416,9 @@ extern void get_full_rtx_cost (rtx, machine_mode, enum rtx_code, int,
struct full_rtx_costs *);
extern bool native_encode_rtx (machine_mode, rtx, vec<target_unit> &,
unsigned int, unsigned int);
extern rtx native_decode_rtx (machine_mode, vec<target_unit>,
extern rtx native_decode_rtx (machine_mode, const vec<target_unit> &,
unsigned int);
extern rtx native_decode_vector_rtx (machine_mode, vec<target_unit>,
extern rtx native_decode_vector_rtx (machine_mode, const vec<target_unit> &,
unsigned int, unsigned int, unsigned int);
extern poly_uint64 subreg_lsb (const_rtx);
extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64);

View File

@ -43,12 +43,12 @@ extern void sched_init_bbs (void);
extern void sched_extend_luids (void);
extern void sched_init_insn_luid (rtx_insn *);
extern void sched_init_luids (bb_vec_t);
extern void sched_init_luids (const bb_vec_t &);
extern void sched_finish_luids (void);
extern void sched_extend_target (void);
extern void haifa_init_h_i_d (bb_vec_t);
extern void haifa_init_h_i_d (const bb_vec_t &);
extern void haifa_finish_h_i_d (void);
/* Hooks that are common to all the schedulers. */

View File

@ -6752,7 +6752,7 @@ native_encode_rtx (machine_mode mode, rtx x, vec<target_unit> &bytes,
Return the vector on success, otherwise return NULL_RTX. */
rtx
native_decode_vector_rtx (machine_mode mode, vec<target_unit> bytes,
native_decode_vector_rtx (machine_mode mode, const vec<target_unit> &bytes,
unsigned int first_byte, unsigned int npatterns,
unsigned int nelts_per_pattern)
{
@ -6797,7 +6797,7 @@ native_decode_vector_rtx (machine_mode mode, vec<target_unit> bytes,
Return the rtx on success, otherwise return NULL_RTX. */
rtx
native_decode_rtx (machine_mode mode, vec<target_unit> bytes,
native_decode_rtx (machine_mode mode, const vec<target_unit> &bytes,
unsigned int first_byte)
{
if (VECTOR_MODE_P (mode))

View File

@ -761,7 +761,7 @@ get_no_error_domain (enum built_in_function fnc)
condition are separated by NULL tree in the vector. */
static void
gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple *> conds,
gen_shrink_wrap_conditions (gcall *bi_call, const vec<gimple *> &conds,
unsigned int *nconds)
{
gcall *call;
@ -797,7 +797,8 @@ gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple *> conds,
when it is non-null, it is called while all of the CONDS are true. */
static void
shrink_wrap_one_built_in_call_with_conds (gcall *bi_call, vec <gimple *> conds,
shrink_wrap_one_built_in_call_with_conds (gcall *bi_call,
const vec <gimple *> &conds,
unsigned int nconds,
gcall *bi_newcall = NULL)
{
@ -1132,7 +1133,7 @@ use_internal_fn (gcall *call)
wrapping transformation. */
static void
shrink_wrap_conditional_dead_built_in_calls (vec<gcall *> calls)
shrink_wrap_conditional_dead_built_in_calls (const vec<gcall *> &calls)
{
unsigned i = 0;

View File

@ -2643,7 +2643,7 @@ create_intersect_range_checks (class loop *loop, tree *cond_expr,
void
create_runtime_alias_checks (class loop *loop,
vec<dr_with_seg_len_pair_t> *alias_pairs,
const vec<dr_with_seg_len_pair_t> *alias_pairs,
tree * cond_expr)
{
tree part_cond_expr;
@ -5635,9 +5635,9 @@ compute_affine_dependence (struct data_dependence_relation *ddr,
is small enough to be handled. */
bool
compute_all_dependences (vec<data_reference_p> datarefs,
compute_all_dependences (const vec<data_reference_p> &datarefs,
vec<ddr_p> *dependence_relations,
vec<loop_p> loop_nest,
const vec<loop_p> &loop_nest,
bool compute_self_and_rr)
{
struct data_dependence_relation *ddr;

View File

@ -551,9 +551,9 @@ extern struct data_dependence_relation *initialize_data_dependence_relation
extern void compute_affine_dependence (struct data_dependence_relation *,
loop_p);
extern void compute_self_dependence (struct data_dependence_relation *);
extern bool compute_all_dependences (vec<data_reference_p> ,
extern bool compute_all_dependences (const vec<data_reference_p> &,
vec<ddr_p> *,
vec<loop_p>, bool);
const vec<loop_p> &, bool);
extern tree find_data_references_in_bb (class loop *, basic_block,
vec<data_reference_p> *);
extern unsigned int dr_alignment (innermost_loop_behavior *);
@ -578,7 +578,8 @@ extern int data_ref_compare_tree (tree, tree);
extern void prune_runtime_alias_test_list (vec<dr_with_seg_len_pair_t> *,
poly_uint64);
extern void create_runtime_alias_checks (class loop *,
vec<dr_with_seg_len_pair_t> *, tree*);
const vec<dr_with_seg_len_pair_t> *,
tree*);
extern tree dr_direction_indicator (struct data_reference *);
extern tree dr_zero_step_indicator (struct data_reference *);
extern bool dr_known_forward_stride_p (struct data_reference *);
@ -666,7 +667,7 @@ ddr_dependence_level (ddr_p ddr)
/* Return the index of the variable VAR in the LOOP_NEST array. */
static inline int
index_in_loop_nest (int var, vec<loop_p> loop_nest)
index_in_loop_nest (int var, const vec<loop_p> &loop_nest)
{
class loop *loopi;
int var_index;

View File

@ -2208,7 +2208,7 @@ insert_gimplified_predicates (loop_p loop)
mask if it was created for given SIZE and -1 otherwise. */
static int
mask_exists (int size, vec<int> vec)
mask_exists (int size, const vec<int> &vec)
{
unsigned int ix;
int v;

View File

@ -527,7 +527,8 @@ class loop_distribution
/* Build the vertices of the reduced dependence graph RDG. Return false
if that failed. */
bool create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts, loop_p loop);
bool create_rdg_vertices (struct graph *rdg, const vec<gimple *> &stmts,
loop_p loop);
/* Initialize STMTS with all the statements of LOOP. We use topological
order to discover all statements. The order is important because
@ -646,7 +647,7 @@ class loop_distribution
statements from STMTS into separate loops. Returns the number of
distributed loops. Set NB_CALLS to number of generated builtin calls.
Set *DESTROY_P to whether LOOP needs to be destroyed. */
int distribute_loop (class loop *loop, vec<gimple *> stmts,
int distribute_loop (class loop *loop, const vec<gimple *> &stmts,
control_dependences *cd, int *nb_calls, bool *destroy_p,
bool only_patterns_p);
@ -699,7 +700,8 @@ bb_top_order_cmp_r (const void *x, const void *y, void *loop)
}
bool
loop_distribution::create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts,
loop_distribution::create_rdg_vertices (struct graph *rdg,
const vec<gimple *> &stmts,
loop_p loop)
{
int i;
@ -1953,7 +1955,7 @@ loop_distribution::rdg_build_partitions (struct graph *rdg,
/* Dump to FILE the PARTITIONS. */
static void
dump_rdg_partitions (FILE *file, vec<partition *> partitions)
dump_rdg_partitions (FILE *file, const vec<partition *> &partitions)
{
int i;
partition *partition;
@ -1963,10 +1965,10 @@ dump_rdg_partitions (FILE *file, vec<partition *> partitions)
}
/* Debug PARTITIONS. */
extern void debug_rdg_partitions (vec<partition *> );
extern void debug_rdg_partitions (const vec<partition *> &);
DEBUG_FUNCTION void
debug_rdg_partitions (vec<partition *> partitions)
debug_rdg_partitions (const vec<partition *> &partitions)
{
dump_rdg_partitions (stderr, partitions);
}
@ -2017,7 +2019,7 @@ number_of_rw_in_partition (struct graph *rdg, partition *partition)
static bool
partition_contains_all_rw (struct graph *rdg,
vec<partition *> partitions)
const vec<partition *> &partitions)
{
int i;
partition *partition;
@ -2921,7 +2923,8 @@ loop_distribution::finalize_partitions (class loop *loop,
Set *DESTROY_P to whether LOOP needs to be destroyed. */
int
loop_distribution::distribute_loop (class loop *loop, vec<gimple *> stmts,
loop_distribution::distribute_loop (class loop *loop,
const vec<gimple *> &stmts,
control_dependences *cd, int *nb_calls, bool *destroy_p,
bool only_patterns_p)
{

View File

@ -3713,7 +3713,7 @@ ref_conflicts_with_region (gimple_stmt_iterator gsi, ao_ref *ref,
reduction results in REDUCTION_STORES. */
static bool
oacc_entry_exit_ok_1 (bitmap in_loop_bbs, vec<basic_block> region_bbs,
oacc_entry_exit_ok_1 (bitmap in_loop_bbs, const vec<basic_block> &region_bbs,
reduction_info_table_type *reduction_list,
bitmap reduction_stores)
{
@ -3828,7 +3828,8 @@ oacc_entry_exit_ok_1 (bitmap in_loop_bbs, vec<basic_block> region_bbs,
if any changes were made. */
static bool
oacc_entry_exit_single_gang (bitmap in_loop_bbs, vec<basic_block> region_bbs,
oacc_entry_exit_single_gang (bitmap in_loop_bbs,
const vec<basic_block> &region_bbs,
bitmap reduction_stores)
{
tree gang_pos = NULL_TREE;

View File

@ -2557,7 +2557,7 @@ sm_seq_valid_bb (class loop *loop, basic_block bb, tree vdef,
static void
hoist_memory_references (class loop *loop, bitmap mem_refs,
vec<edge> exits)
const vec<edge> &exits)
{
im_mem_ref *ref;
unsigned i;
@ -2970,7 +2970,7 @@ find_refs_for_sm (class loop *loop, bitmap sm_executed, bitmap refs_to_sm)
static bool
loop_suitable_for_sm (class loop *loop ATTRIBUTE_UNUSED,
vec<edge> exits)
const vec<edge> &exits)
{
unsigned i;
edge ex;

View File

@ -3929,7 +3929,7 @@ wide_int_cmp (const void *p1, const void *p2)
Lookup by binary search. */
static int
bound_index (vec<widest_int> bounds, const widest_int &bound)
bound_index (const vec<widest_int> &bounds, const widest_int &bound)
{
unsigned int end = bounds.length ();
unsigned int begin = 0;

View File

@ -4486,7 +4486,7 @@ get_ops (tree var, enum tree_code code, vec<operand_entry *> *ops,
stmts. */
static tree
update_ops (tree var, enum tree_code code, vec<operand_entry *> ops,
update_ops (tree var, enum tree_code code, const vec<operand_entry *> &ops,
unsigned int *pidx, class loop *loop)
{
gimple *stmt = SSA_NAME_DEF_STMT (var);
@ -5033,7 +5033,7 @@ remove_visited_stmt_chain (tree var)
cases, but it is unlikely to be worth it. */
static void
swap_ops_for_binary_stmt (vec<operand_entry *> ops,
swap_ops_for_binary_stmt (const vec<operand_entry *> &ops,
unsigned int opindex, gimple *stmt)
{
operand_entry *oe1, *oe2, *oe3;
@ -5104,7 +5104,8 @@ insert_stmt_before_use (gimple *stmt, gimple *stmt_to_insert)
static tree
rewrite_expr_tree (gimple *stmt, enum tree_code rhs_code, unsigned int opindex,
vec<operand_entry *> ops, bool changed, bool next_changed)
const vec<operand_entry *> &ops, bool changed,
bool next_changed)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
tree rhs2 = gimple_assign_rhs2 (stmt);
@ -5326,7 +5327,7 @@ get_reassociation_width (int ops_num, enum tree_code opc,
static void
rewrite_expr_tree_parallel (gassign *stmt, int width,
vec<operand_entry *> ops)
const vec<operand_entry *> &ops)
{
enum tree_code opcode = gimple_assign_rhs_code (stmt);
int op_num = ops.length ();

View File

@ -1042,9 +1042,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, alias_set_type base_set,
tree type, vec<vn_reference_op_s> ops)
tree type, const vec<vn_reference_op_s> &ops)
{
vn_reference_op_t op;
unsigned i;
tree base = NULL_TREE;
tree *op0_p = &base;
@ -1067,7 +1066,10 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
size = wi::to_poly_offset (size_tree);
/* Lower the final access size from the outermost expression. */
op = &ops[0];
const_vn_reference_op_t cst_op = &ops[0];
/* Cast away constness for the sake of the const-unsafe
FOR_EACH_VEC_ELT(). */
vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
size_tree = NULL_TREE;
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
@ -1098,7 +1100,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
vn_reference_op_t pop = &ops[i-1];
const_vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (known_eq (pop->off, -1))
{

View File

@ -254,7 +254,7 @@ tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
tree, tree *, tree, unsigned int);
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, alias_set_type,
tree, vec<vn_reference_op_s> );
tree, const vec<vn_reference_op_s> &);
vec<vn_reference_op_s> vn_reference_operands_for_lookup (tree);
tree vn_reference_lookup_pieces (tree, alias_set_type, alias_set_type, tree,
vec<vn_reference_op_s> ,

View File

@ -3713,8 +3713,8 @@ get_constraint_for_rhs (tree t, vec<ce_s> *results)
entries in *LHSC. */
static void
process_all_all_constraints (vec<ce_s> lhsc,
vec<ce_s> rhsc)
process_all_all_constraints (const vec<ce_s> &lhsc,
const vec<ce_s> &rhsc)
{
struct constraint_expr *lhsp, *rhsp;
unsigned i, j;
@ -3814,7 +3814,7 @@ do_structure_copy (tree lhsop, tree rhsop)
/* Create constraints ID = { rhsc }. */
static void
make_constraints_to (unsigned id, vec<ce_s> rhsc)
make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
{
struct constraint_expr *c;
struct constraint_expr includes;
@ -4162,7 +4162,7 @@ handle_rhs_call (gcall *stmt, vec<ce_s> *results)
the LHS point to global and escaped variables. */
static void
handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
tree fndecl)
{
auto_vec<ce_s> lhsc;
@ -4623,9 +4623,10 @@ find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
case BUILT_IN_REALLOC:
if (gimple_call_lhs (t))
{
auto_vec<ce_s> rhsc;
handle_lhs_call (t, gimple_call_lhs (t),
gimple_call_return_flags (t) | ERF_NOALIAS,
vNULL, fndecl);
rhsc, fndecl);
get_constraint_for_ptr_offset (gimple_call_lhs (t),
NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
@ -5696,7 +5697,7 @@ fieldoff_compare (const void *pa, const void *pb)
/* Sort a fieldstack according to the field offset and sizes. */
static void
sort_fieldstack (vec<fieldoff_s> fieldstack)
sort_fieldstack (vec<fieldoff_s> &fieldstack)
{
fieldstack.qsort (fieldoff_compare);
}
@ -6106,7 +6107,7 @@ create_function_info_for (tree decl, const char *name, bool add_id,
FIELDSTACK is assumed to be sorted by offset. */
static bool
check_for_overlaps (vec<fieldoff_s> fieldstack)
check_for_overlaps (const vec<fieldoff_s> &fieldstack)
{
fieldoff_s *fo = NULL;
unsigned int i;

View File

@ -3192,7 +3192,7 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
tree *cond_expr,
gimple_seq *cond_expr_stmt_list)
{
vec<stmt_vec_info> may_misalign_stmts
const vec<stmt_vec_info> &may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
stmt_vec_info stmt_info;
int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
@ -3283,7 +3283,8 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
static void
vect_create_cond_for_unequal_addrs (loop_vec_info loop_vinfo, tree *cond_expr)
{
vec<vec_object_pair> pairs = LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo);
const vec<vec_object_pair> &pairs
= LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo);
unsigned int i;
vec_object_pair *pair;
FOR_EACH_VEC_ELT (pairs, i, pair)
@ -3302,7 +3303,8 @@ vect_create_cond_for_unequal_addrs (loop_vec_info loop_vinfo, tree *cond_expr)
static void
vect_create_cond_for_lower_bounds (loop_vec_info loop_vinfo, tree *cond_expr)
{
vec<vec_lower_bound> lower_bounds = LOOP_VINFO_LOWER_BOUNDS (loop_vinfo);
const vec<vec_lower_bound> &lower_bounds
= LOOP_VINFO_LOWER_BOUNDS (loop_vinfo);
for (unsigned int i = 0; i < lower_bounds.length (); ++i)
{
tree expr = lower_bounds[i].expr;
@ -3344,7 +3346,7 @@ vect_create_cond_for_lower_bounds (loop_vec_info loop_vinfo, tree *cond_expr)
void
vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo, tree * cond_expr)
{
vec<dr_with_seg_len_pair_t> comp_alias_ddrs =
const vec<dr_with_seg_len_pair_t> &comp_alias_ddrs =
LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo);
if (comp_alias_ddrs.is_empty ())

View File

@ -790,7 +790,8 @@ is_eq_or_top (complex_perm_kinds_t perm, complex_perm_kinds_t kind)
static inline bool
vect_validate_multiplication (slp_tree_to_load_perm_map_t *perm_cache,
vec<slp_tree> left_op, vec<slp_tree> right_op,
const vec<slp_tree> &left_op,
const vec<slp_tree> &right_op,
bool neg_first, bool *conj_first_operand,
bool fms)
{
@ -862,7 +863,8 @@ vect_validate_multiplication (slp_tree_to_load_perm_map_t *perm_cache,
static inline bool
vect_validate_multiplication (slp_tree_to_load_perm_map_t *perm_cache,
vec<slp_tree> op, complex_perm_kinds_t permKind)
const vec<slp_tree> &op,
complex_perm_kinds_t permKind)
{
/* The left node is the more common case, test it first. */
if (!is_eq_or_top (linear_loads_p (perm_cache, op[0]), permKind))

View File

@ -3354,7 +3354,8 @@ vect_analyze_slp_instance (vec_info *vinfo,
else if (kind == slp_inst_kind_reduc_group)
{
/* Collect reduction statements. */
vec<stmt_vec_info> reductions = as_a <loop_vec_info> (vinfo)->reductions;
const vec<stmt_vec_info> &reductions
= as_a <loop_vec_info> (vinfo)->reductions;
scalar_stmts.create (reductions.length ());
for (i = 0; reductions.iterate (i, &next_info); i++)
if (STMT_VINFO_RELEVANT_P (next_info)
@ -4172,7 +4173,8 @@ vect_make_slp_decision (loop_vec_info loop_vinfo)
{
unsigned int i;
poly_uint64 unrolling_factor = 1;
vec<slp_instance> slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
const vec<slp_instance> &slp_instances
= LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
slp_instance instance;
int decided_to_slp = 0;
@ -5939,7 +5941,7 @@ vect_slp_region (vec<basic_block> bbs, vec<data_reference_p> datarefs,
true if anything in the basic-block was vectorized. */
static bool
vect_slp_bbs (vec<basic_block> bbs)
vect_slp_bbs (const vec<basic_block> &bbs)
{
vec<data_reference_p> datarefs = vNULL;
auto_vec<int> dataref_groups;
@ -6084,7 +6086,7 @@ vect_slp_function (function *fun)
void
duplicate_and_interleave (vec_info *vinfo, gimple_seq *seq, tree vector_type,
vec<tree> elts, unsigned int nresults,
const vec<tree> &elts, unsigned int nresults,
vec<tree> &results)
{
unsigned int nelts = elts.length ();
@ -6440,7 +6442,7 @@ vect_get_slp_defs (vec_info *,
bool
vect_transform_slp_perm_load (vec_info *vinfo,
slp_tree node, vec<tree> dr_chain,
slp_tree node, const vec<tree> &dr_chain,
gimple_stmt_iterator *gsi, poly_uint64 vf,
bool analyze_only, unsigned *n_perms,
unsigned int *n_loads, bool dce_chain)
@ -7469,7 +7471,7 @@ vect_schedule_scc (vec_info *vinfo, slp_tree node, slp_instance instance,
/* Generate vector code for SLP_INSTANCES in the loop/basic block. */
void
vect_schedule_slp (vec_info *vinfo, vec<slp_instance> slp_instances)
vect_schedule_slp (vec_info *vinfo, const vec<slp_instance> &slp_instances)
{
slp_instance instance;
unsigned int i;

View File

@ -2072,12 +2072,12 @@ extern tree cse_and_gimplify_to_preheader (loop_vec_info, tree);
extern void vect_slp_init (void);
extern void vect_slp_fini (void);
extern void vect_free_slp_instance (slp_instance);
extern bool vect_transform_slp_perm_load (vec_info *, slp_tree, vec<tree>,
extern bool vect_transform_slp_perm_load (vec_info *, slp_tree, const vec<tree> &,
gimple_stmt_iterator *, poly_uint64,
bool, unsigned *,
unsigned * = nullptr, bool = false);
extern bool vect_slp_analyze_operations (vec_info *);
extern void vect_schedule_slp (vec_info *, vec<slp_instance>);
extern void vect_schedule_slp (vec_info *, const vec<slp_instance> &);
extern opt_result vect_analyze_slp (vec_info *, unsigned);
extern bool vect_make_slp_decision (loop_vec_info);
extern void vect_detect_hybrid_slp (loop_vec_info);
@ -2095,7 +2095,7 @@ extern bool can_duplicate_and_interleave_p (vec_info *, unsigned int, tree,
unsigned int * = NULL,
tree * = NULL, tree * = NULL);
extern void duplicate_and_interleave (vec_info *, gimple_seq *, tree,
vec<tree>, unsigned int, vec<tree> &);
const vec<tree> &, unsigned int, vec<tree> &);
extern int vect_get_place_in_interleaving_chain (stmt_vec_info, stmt_vec_info);
extern bool vect_update_shared_vectype (stmt_vec_info, tree);
extern slp_tree vect_create_new_slp_node (unsigned, tree_code);

View File

@ -2047,7 +2047,7 @@ make_vector (unsigned log2_npatterns,
are extracted from V, a vector of CONSTRUCTOR_ELT. */
tree
build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
{
if (vec_safe_length (v) == 0)
return build_zero_cst (type);
@ -14428,7 +14428,7 @@ test_labels ()
are given by VALS. */
static tree
build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
{
gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
tree_vector_builder builder (type, vals.length (), 1);
@ -14439,7 +14439,7 @@ build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
/* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
static void
check_vector_cst (vec<tree> expected, tree actual)
check_vector_cst (const vec<tree> &expected, tree actual)
{
ASSERT_KNOWN_EQ (expected.length (),
TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
@ -14452,7 +14452,7 @@ check_vector_cst (vec<tree> expected, tree actual)
and that its elements match EXPECTED. */
static void
check_vector_cst_duplicate (vec<tree> expected, tree actual,
check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
@ -14468,7 +14468,7 @@ check_vector_cst_duplicate (vec<tree> expected, tree actual,
EXPECTED. */
static void
check_vector_cst_fill (vec<tree> expected, tree actual,
check_vector_cst_fill (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
@ -14483,7 +14483,7 @@ check_vector_cst_fill (vec<tree> expected, tree actual,
and that its elements match EXPECTED. */
static void
check_vector_cst_stepped (vec<tree> expected, tree actual,
check_vector_cst_stepped (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));

View File

@ -4475,7 +4475,7 @@ extern tree build_int_cst (tree, poly_int64);
extern tree build_int_cstu (tree type, poly_uint64);
extern tree build_int_cst_type (tree, poly_int64);
extern tree make_vector (unsigned, unsigned CXX_MEM_STAT_INFO);
extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_ctor (tree, const vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
extern tree build_uniform_cst (tree, tree);
extern tree build_vec_series (tree, tree, tree);