alias.c (rtx_equal_for_memref_p): Constify.

* alias.c (rtx_equal_for_memref_p): Constify.
	* basic-block.h (const_edge, const_basic_block): New.
	(reg_set_to_hard_reg_set, dfs_enumerate_from, single_succ_p,
	single_pred_p, single_succ_edge, single_pred_edge, single_succ,
	single_pred, maybe_hot_bb_p, probably_cold_bb_p,
	probably_never_executed_bb_p, edge_probability_reliable_p,
	br_prob_note_reliable_p, forwarder_block_p, flow_nodes_print,
	inside_basic_block_p, control_flow_insn_p, dominated_by_p):
	Likewise.
	* bb-reorder.c (better_edge_p, push_to_next_round_p): Likewise.
	* bt-load.c (basic_block_freq, insn_sets_btr_p, can_move_up):
	Likewise.
	* cfganal.c (flow_active_insn_p, forwarder_block_p,
	flow_nodes_print, dfs_enumerate_from): Likewise.
	* cfgbuild.c (count_basic_blocks, inside_basic_block_p,
	control_flow_insn_p, count_basic_blocks): Likewise.
	* cfgloop.c (flow_bb_inside_loop_p, glb_enum_p,
	get_loop_body_with_size, loop_exit_edge_p): Likewise.
	* cfgloop.h (flow_bb_inside_loop_p, num_loop_insns,
	average_num_loop_insns, loop_exit_edge_p,
	just_once_each_iteration_p, can_duplicate_loop_p): Likewise.
	* cfgloopanal.c (just_once_each_iteration_p, num_loop_insns,
	average_num_loop_insns, seq_cost): Likewise.
	* cfgloopmanip.c (rpe_enum_p, can_duplicate_loop_p): Likewise.
	* dominance.c (dominated_by_p): Likewise.
	* emit-rtl.c (validate_subreg): Likewise.
	* except.c (can_throw_internal, can_throw_external): Likewise.
	* except.h (can_throw_internal, can_throw_external): Likewise.
	* gcse.c (gcse_constant_p, oprs_unchanged_p, oprs_anticipatable_p,
	oprs_available_p, hash_expr, expr_equiv_p, oprs_not_set_p,
	compute_transp, load_killed_in_block_p, reg_killed_on_edge,
	simple_mem, store_ops_ok, load_kills_store, find_loads,
	store_killed_in_insn, store_killed_after, store_killed_before,
	gcse_mem_operand, implicit_set_cond_p, store_killed_in_pat):
	Likewise.
	* ifcvt.c (count_bb_insns, cheap_bb_rtx_cost_p, noce_operand_ok,
	noce_mem_write_may_trap_or_fault_p): Likewise.
	* pointer-set.c (pointer_set_contains, pointer_map_contains):
	Likewise.
	* pointer-set.h (pointer_set_contains, pointer_map_contains):
	Likewise.
	* predict.c (can_predict_insn_p, maybe_hot_bb_p,
	probably_cold_bb_p, probably_never_executed_bb_p,
	edge_probability_reliable_p, br_prob_note_reliable_p,
	can_predict_insn_p): Likewise.
	* regclass.c (reg_set_to_hard_reg_set): Likewise.
	* resource.c (return_insn_p): Likewise.
	* rtl.h (reg_set_between_p, reg_set_p, validate_subreg):
	Likewise.
	* rtlanal.c (reg_set_between_p, reg_set_p): Likewise.
	* tracer.c (count_insns, ignore_bb_p, better_p): Likewise.
	* tree-cfg.c (verify_gimple_unary_expr, verify_gimple_binary_expr,
	verify_gimple_modify_stmt): Likewise.
	* tree-chrec.c (is_not_constant_evolution,
	is_multivariate_chrec_rec, is_multivariate_chrec,
	chrec_contains_symbols, chrec_contains_undetermined,
	tree_contains_chrecs, evolution_function_is_affine_multivariate_p,
	evolution_function_is_univariate_p, avoid_arithmetics_in_type_p,
	eq_evolutions_p, scev_direction): Likewise.
	* tree-chrec.h (automatically_generated_chrec_p, tree_is_chrec,
	eq_evolutions_p, is_multivariate_chrec, chrec_contains_symbols,
	chrec_contains_symbols_defined_in_loop,
	chrec_contains_undetermined, tree_contains_chrecs,
	evolution_function_is_affine_multivariate_p,
	evolution_function_is_univariate_p, chrec_zerop,
	evolution_function_is_constant_p, evolution_function_is_affine_p,
	evolution_function_is_affine_or_constant_p,
	tree_does_not_contain_chrecs, chrec_type): Likewise.
	* tree-data-ref.c (tree_fold_divides_p,
	object_address_invariant_in_loop_p, dr_may_alias_p,
	ziv_subscript_p, siv_subscript_p, gcd_of_steps_may_divide_p,
	same_access_functions, constant_access_functions,
	access_functions_are_affine_or_constant_p, find_vertex_for_stmt):
	Likewise.
	* tree-flow.h (scev_direction): Likewise.
	* tree-gimple.c (is_gimple_stmt): Likewise.
	* tree-outof-ssa.c (identical_copies_p, identical_stmt_lists_p):
	Likewise.
	* tree-pretty-print.c (op_prio): Likewise.
	* tree-scalar-evolution.c (chrec_contains_symbols_defined_in_loop,
	analyzable_condition, backedge_phi_arg_p): Likewise.
	* tree-scalar-evolution.h (get_chrec_loop): Likewise.
	* tree-ssa-operands.c (get_name_decl, operand_build_cmp): Likewise.
	* tree-ssa-threadupdate.c (dbds_continue_enumeration_p):
	Likewise.

From-SVN: r127404
This commit is contained in:
Kaveh R. Ghazi 2007-08-14 04:44:35 +00:00 committed by Kaveh Ghazi
parent a0b7c7aef6
commit ed7a4b4b30
37 changed files with 296 additions and 211 deletions

View File

@ -1,3 +1,91 @@
2007-08-14 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
* alias.c (rtx_equal_for_memref_p): Constify.
* basic-block.h (const_edge, const_basic_block): New.
(reg_set_to_hard_reg_set, dfs_enumerate_from, single_succ_p,
single_pred_p, single_succ_edge, single_pred_edge, single_succ,
single_pred, maybe_hot_bb_p, probably_cold_bb_p,
probably_never_executed_bb_p, edge_probability_reliable_p,
br_prob_note_reliable_p, forwarder_block_p, flow_nodes_print,
inside_basic_block_p, control_flow_insn_p, dominated_by_p):
Likewise.
* bb-reorder.c (better_edge_p, push_to_next_round_p): Likewise.
* bt-load.c (basic_block_freq, insn_sets_btr_p, can_move_up):
Likewise.
* cfganal.c (flow_active_insn_p, forwarder_block_p,
flow_nodes_print, dfs_enumerate_from): Likewise.
* cfgbuild.c (count_basic_blocks, inside_basic_block_p,
control_flow_insn_p, count_basic_blocks): Likewise.
* cfgloop.c (flow_bb_inside_loop_p, glb_enum_p,
get_loop_body_with_size, loop_exit_edge_p): Likewise.
* cfgloop.h (flow_bb_inside_loop_p, num_loop_insns,
average_num_loop_insns, loop_exit_edge_p,
just_once_each_iteration_p, can_duplicate_loop_p): Likewise.
* cfgloopanal.c (just_once_each_iteration_p, num_loop_insns,
average_num_loop_insns, seq_cost): Likewise.
* cfgloopmanip.c (rpe_enum_p, can_duplicate_loop_p): Likewise.
* dominance.c (dominated_by_p): Likewise.
* emit-rtl.c (validate_subreg): Likewise.
* except.c (can_throw_internal, can_throw_external): Likewise.
* except.h (can_throw_internal, can_throw_external): Likewise.
* gcse.c (gcse_constant_p, oprs_unchanged_p, oprs_anticipatable_p,
oprs_available_p, hash_expr, expr_equiv_p, oprs_not_set_p,
compute_transp, load_killed_in_block_p, reg_killed_on_edge,
simple_mem, store_ops_ok, load_kills_store, find_loads,
store_killed_in_insn, store_killed_after, store_killed_before,
gcse_mem_operand, implicit_set_cond_p, store_killed_in_pat):
Likewise.
* ifcvt.c (count_bb_insns, cheap_bb_rtx_cost_p, noce_operand_ok,
noce_mem_write_may_trap_or_fault_p): Likewise.
* pointer-set.c (pointer_set_contains, pointer_map_contains):
Likewise.
* pointer-set.h (pointer_set_contains, pointer_map_contains):
Likewise.
* predict.c (can_predict_insn_p, maybe_hot_bb_p,
probably_cold_bb_p, probably_never_executed_bb_p,
edge_probability_reliable_p, br_prob_note_reliable_p,
can_predict_insn_p): Likewise.
* regclass.c (reg_set_to_hard_reg_set): Likewise.
* resource.c (return_insn_p): Likewise.
* rtl.h (reg_set_between_p, reg_set_p, validate_subreg):
Likewise.
* rtlanal.c (reg_set_between_p, reg_set_p): Likewise.
* tracer.c (count_insns, ignore_bb_p, better_p): Likewise.
* tree-cfg.c (verify_gimple_unary_expr, verify_gimple_binary_expr,
verify_gimple_modify_stmt): Likewise.
* tree-chrec.c (is_not_constant_evolution,
is_multivariate_chrec_rec, is_multivariate_chrec,
chrec_contains_symbols, chrec_contains_undetermined,
tree_contains_chrecs, evolution_function_is_affine_multivariate_p,
evolution_function_is_univariate_p, avoid_arithmetics_in_type_p,
eq_evolutions_p, scev_direction): Likewise.
* tree-chrec.h (automatically_generated_chrec_p, tree_is_chrec,
eq_evolutions_p, is_multivariate_chrec, chrec_contains_symbols,
chrec_contains_symbols_defined_in_loop,
chrec_contains_undetermined, tree_contains_chrecs,
evolution_function_is_affine_multivariate_p,
evolution_function_is_univariate_p, chrec_zerop,
evolution_function_is_constant_p, evolution_function_is_affine_p,
evolution_function_is_affine_or_constant_p,
tree_does_not_contain_chrecs, chrec_type): Likewise.
* tree-data-ref.c (tree_fold_divides_p,
object_address_invariant_in_loop_p, dr_may_alias_p,
ziv_subscript_p, siv_subscript_p, gcd_of_steps_may_divide_p,
same_access_functions, constant_access_functions,
access_functions_are_affine_or_constant_p, find_vertex_for_stmt):
Likewise.
* tree-flow.h (scev_direction): Likewise.
* tree-gimple.c (is_gimple_stmt): Likewise.
* tree-outof-ssa.c (identical_copies_p, identical_stmt_lists_p):
Likewise.
* tree-pretty-print.c (op_prio): Likewise.
* tree-scalar-evolution.c (chrec_contains_symbols_defined_in_loop,
analyzable_condition, backedge_phi_arg_p): Likewise.
* tree-scalar-evolution.h (get_chrec_loop): Likewise.
* tree-ssa-operands.c (get_name_decl, operand_build_cmp): Likewise.
* tree-ssa-threadupdate.c (dbds_continue_enumeration_p):
Likewise.
2007-08-13 Dan Hipschman <dsh@google.com>
PR c/32953

View File

@ -148,7 +148,7 @@ struct alias_set_entry GTY(())
};
typedef struct alias_set_entry *alias_set_entry;
static int rtx_equal_for_memref_p (rtx, rtx);
static int rtx_equal_for_memref_p (const_rtx, const_rtx);
static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
static void record_set (rtx, const_rtx, void *);
static int base_alias_check (rtx, rtx, enum machine_mode,
@ -1192,7 +1192,7 @@ canon_rtx (rtx x)
different numbers are, in fact, equivalent. */
static int
rtx_equal_for_memref_p (rtx x, rtx y)
rtx_equal_for_memref_p (const_rtx x, const_rtx y)
{
int i;
int j;

View File

@ -80,7 +80,7 @@ typedef bitmap regset;
#define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
/* Copy the hard registers in a register set to the hard register set. */
extern void reg_set_to_hard_reg_set (HARD_REG_SET *, bitmap);
extern void reg_set_to_hard_reg_set (HARD_REG_SET *, const_bitmap);
#define REG_SET_TO_HARD_REG_SET(TO, FROM) \
do { \
CLEAR_HARD_REG_SET (TO); \
@ -143,6 +143,7 @@ struct edge_def GTY(())
};
typedef struct edge_def *edge;
typedef const struct edge_def *const_edge;
DEF_VEC_P(edge);
DEF_VEC_ALLOC_P(edge,gc);
DEF_VEC_ALLOC_P(edge,heap);
@ -275,6 +276,7 @@ struct tree_bb_info GTY(())
};
typedef struct basic_block_def *basic_block;
typedef const struct basic_block_def *const_basic_block;
DEF_VEC_P(basic_block);
DEF_VEC_ALLOC_P(basic_block,gc);
@ -514,8 +516,8 @@ extern int post_order_compute (int *, bool, bool);
extern int inverted_post_order_compute (int *);
extern int pre_and_rev_post_order_compute (int *, int *, bool);
extern int dfs_enumerate_from (basic_block, int,
bool (*)(basic_block, void *),
basic_block *, int, void *);
bool (*)(const_basic_block, const void *),
basic_block *, int, const void *);
extern void compute_dominance_frontiers (bitmap *);
extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
extern void dump_edge_info (FILE *, edge, int);
@ -607,7 +609,7 @@ struct edge_list
/* Returns true if BB has precisely one successor. */
static inline bool
single_succ_p (basic_block bb)
single_succ_p (const_basic_block bb)
{
return EDGE_COUNT (bb->succs) == 1;
}
@ -615,7 +617,7 @@ single_succ_p (basic_block bb)
/* Returns true if BB has precisely one predecessor. */
static inline bool
single_pred_p (basic_block bb)
single_pred_p (const_basic_block bb)
{
return EDGE_COUNT (bb->preds) == 1;
}
@ -624,7 +626,7 @@ single_pred_p (basic_block bb)
BB does not have exactly one successor. */
static inline edge
single_succ_edge (basic_block bb)
single_succ_edge (const_basic_block bb)
{
gcc_assert (single_succ_p (bb));
return EDGE_SUCC (bb, 0);
@ -634,7 +636,7 @@ single_succ_edge (basic_block bb)
if BB does not have exactly one predecessor. */
static inline edge
single_pred_edge (basic_block bb)
single_pred_edge (const_basic_block bb)
{
gcc_assert (single_pred_p (bb));
return EDGE_PRED (bb, 0);
@ -644,7 +646,7 @@ single_pred_edge (basic_block bb)
if BB does not have exactly one successor. */
static inline basic_block
single_succ (basic_block bb)
single_succ (const_basic_block bb)
{
return single_succ_edge (bb)->dest;
}
@ -653,7 +655,7 @@ single_succ (basic_block bb)
if BB does not have exactly one predecessor.*/
static inline basic_block
single_pred (basic_block bb)
single_pred (const_basic_block bb)
{
return single_pred_edge (bb)->src;
}
@ -825,9 +827,9 @@ extern struct edge_list *pre_edge_rev_lcm (int, sbitmap *,
extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
/* In predict.c */
extern bool maybe_hot_bb_p (basic_block);
extern bool probably_cold_bb_p (basic_block);
extern bool probably_never_executed_bb_p (basic_block);
extern bool maybe_hot_bb_p (const_basic_block);
extern bool probably_cold_bb_p (const_basic_block);
extern bool probably_never_executed_bb_p (const_basic_block);
extern bool tree_predicted_by_p (basic_block, enum br_predictor);
extern bool rtl_predicted_by_p (basic_block, enum br_predictor);
extern void tree_predict_edge (edge, enum br_predictor, int);
@ -835,8 +837,8 @@ extern void rtl_predict_edge (edge, enum br_predictor, int);
extern void predict_edge_def (edge, enum br_predictor, enum prediction);
extern void guess_outgoing_edge_probabilities (basic_block);
extern void remove_predictions_associated_with_edge (edge);
extern bool edge_probability_reliable_p (edge);
extern bool br_prob_note_reliable_p (rtx);
extern bool edge_probability_reliable_p (const_edge);
extern bool br_prob_note_reliable_p (const_rtx);
/* In cfg.c */
extern void dump_regset (regset, FILE *);
@ -862,10 +864,10 @@ extern void free_aux_for_edges (void);
/* In cfganal.c */
extern void find_unreachable_blocks (void);
extern bool forwarder_block_p (basic_block);
extern bool forwarder_block_p (const_basic_block);
extern bool can_fallthru (basic_block, basic_block);
extern bool could_fall_through (basic_block, basic_block);
extern void flow_nodes_print (const char *, const sbitmap, FILE *);
extern void flow_nodes_print (const char *, const_sbitmap, FILE *);
extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
/* In cfgrtl.c */
@ -887,8 +889,8 @@ extern bool mark_dfs_back_edges (void);
extern void set_edge_can_fallthru_flag (void);
extern void update_br_prob_note (basic_block);
extern void fixup_abnormal_edges (void);
extern bool inside_basic_block_p (rtx);
extern bool control_flow_insn_p (rtx);
extern bool inside_basic_block_p (const_rtx);
extern bool control_flow_insn_p (const_rtx);
extern rtx get_last_bb_insn (basic_block);
/* In bb-reorder.c */
@ -914,7 +916,7 @@ extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
extern void set_immediate_dominator (enum cdi_direction, basic_block,
basic_block);
extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
extern bool dominated_by_p (enum cdi_direction, basic_block, basic_block);
extern bool dominated_by_p (enum cdi_direction, const_basic_block, const_basic_block);
extern VEC (basic_block, heap) *get_dominated_by (enum cdi_direction, basic_block);
extern VEC (basic_block, heap) *get_dominated_by_region (enum cdi_direction,
basic_block *,

View File

@ -174,11 +174,11 @@ static void find_traces_1_round (int, int, gcov_type, struct trace *, int *,
int, fibheap_t *, int);
static basic_block copy_bb (basic_block, edge, basic_block, int);
static fibheapkey_t bb_to_key (basic_block);
static bool better_edge_p (basic_block, edge, int, int, int, int, edge);
static bool better_edge_p (const_basic_block, const_edge, int, int, int, int, const_edge);
static void connect_traces (int, struct trace *);
static bool copy_bb_p (basic_block, int);
static int get_uncond_jump_length (void);
static bool push_to_next_round_p (basic_block, int, int, int, gcov_type);
static bool push_to_next_round_p (const_basic_block, int, int, int, gcov_type);
static void find_rarely_executed_basic_blocks_and_crossing_edges (edge **,
int *,
int *);
@ -198,7 +198,7 @@ static void fix_crossing_unconditional_branches (void);
current round of trace collection. */
static bool
push_to_next_round_p (basic_block bb, int round, int number_of_rounds,
push_to_next_round_p (const_basic_block bb, int round, int number_of_rounds,
int exec_th, gcov_type count_th)
{
bool there_exists_another_round;
@ -847,8 +847,8 @@ bb_to_key (basic_block bb)
BEST_PROB; similarly for frequency. */
static bool
better_edge_p (basic_block bb, edge e, int prob, int freq, int best_prob,
int best_freq, edge cur_best_edge)
better_edge_p (const_basic_block bb, const_edge e, int prob, int freq, int best_prob,
int best_freq, const_edge cur_best_edge)
{
bool is_better_edge;

View File

@ -111,8 +111,8 @@ typedef struct btr_def_s
static int issue_rate;
static int basic_block_freq (basic_block);
static int insn_sets_btr_p (rtx, int, int *);
static int basic_block_freq (const_basic_block);
static int insn_sets_btr_p (const_rtx, int, int *);
static rtx *find_btr_use (rtx);
static int btr_referenced_p (rtx, rtx *);
static int find_btr_reference (rtx *, void *);
@ -140,7 +140,7 @@ static void btr_def_live_range (btr_def, HARD_REG_SET *);
static void move_btr_def (basic_block, int, btr_def, bitmap, HARD_REG_SET *);
static int migrate_btr_def (btr_def, int);
static void migrate_btr_defs (enum reg_class, int);
static int can_move_up (basic_block, rtx, int);
static int can_move_up (const_basic_block, const_rtx, int);
static void note_btr_set (rtx, const_rtx, void *);
/* The following code performs code motion of target load instructions
@ -179,7 +179,7 @@ static int first_btr, last_btr;
/* Return an estimate of the frequency of execution of block bb. */
static int
basic_block_freq (basic_block bb)
basic_block_freq (const_basic_block bb)
{
return bb->frequency;
}
@ -222,7 +222,7 @@ btr_referenced_p (rtx x, rtx *excludep)
If such a set is found and REGNO is nonzero, assign the register number
of the destination register to *REGNO. */
static int
insn_sets_btr_p (rtx insn, int check_const, int *regno)
insn_sets_btr_p (const_rtx insn, int check_const, int *regno)
{
rtx set;
@ -1235,7 +1235,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
/* We anticipate intra-block scheduling to be done. See if INSN could move
up within BB by N_INSNS. */
static int
can_move_up (basic_block bb, rtx insn, int n_insns)
can_move_up (const_basic_block bb, const_rtx insn, int n_insns)
{
while (insn != BB_HEAD (bb) && n_insns > 0)
{

View File

@ -53,13 +53,13 @@ static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
basic_block);
static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
static bool flow_active_insn_p (rtx);
static bool flow_active_insn_p (const_rtx);
/* Like active_insn_p, except keep the return value clobber around
even after reload. */
static bool
flow_active_insn_p (rtx insn)
flow_active_insn_p (const_rtx insn)
{
if (active_insn_p (insn))
return true;
@ -81,7 +81,7 @@ flow_active_insn_p (rtx insn)
its single destination. */
bool
forwarder_block_p (basic_block bb)
forwarder_block_p (const_basic_block bb)
{
rtx insn;
@ -518,7 +518,7 @@ find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ
/* Dump the list of basic blocks in the bitmap NODES. */
void
flow_nodes_print (const char *str, const sbitmap nodes, FILE *file)
flow_nodes_print (const char *str, const_sbitmap nodes, FILE *file)
{
unsigned int node = 0;
sbitmap_iterator sbi;
@ -1147,8 +1147,8 @@ flow_dfs_compute_reverse_finish (depth_first_search_ds data)
found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
int
dfs_enumerate_from (basic_block bb, int reverse,
bool (*predicate) (basic_block, void *),
basic_block *rslt, int rslt_max, void *data)
bool (*predicate) (const_basic_block, const void *),
basic_block *rslt, int rslt_max, const void *data)
{
basic_block *st, lbb;
int sp = 0, tv = 0;

View File

@ -45,7 +45,7 @@ along with GCC; see the file COPYING3. If not see
#include "toplev.h"
#include "timevar.h"
static int count_basic_blocks (rtx);
static int count_basic_blocks (const_rtx);
static void find_basic_blocks_1 (rtx);
static void make_edges (basic_block, basic_block, int);
static void make_label_edge (sbitmap, basic_block, rtx, int);
@ -56,7 +56,7 @@ static void compute_outgoing_frequencies (basic_block);
block. */
bool
inside_basic_block_p (rtx insn)
inside_basic_block_p (const_rtx insn)
{
switch (GET_CODE (insn))
{
@ -88,7 +88,7 @@ inside_basic_block_p (rtx insn)
the basic block. */
bool
control_flow_insn_p (rtx insn)
control_flow_insn_p (const_rtx insn)
{
rtx note;
@ -140,11 +140,11 @@ control_flow_insn_p (rtx insn)
/* Count the basic blocks of the function. */
static int
count_basic_blocks (rtx f)
count_basic_blocks (const_rtx f)
{
int count = NUM_FIXED_BLOCKS;
bool saw_insn = false;
rtx insn;
const_rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
{

View File

@ -765,7 +765,7 @@ disambiguate_loops_with_multiple_latches (void)
/* Return nonzero if basic block BB belongs to LOOP. */
bool
flow_bb_inside_loop_p (const struct loop *loop, const basic_block bb)
flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
{
struct loop *source_loop;
@ -778,9 +778,9 @@ flow_bb_inside_loop_p (const struct loop *loop, const basic_block bb)
/* Enumeration predicate for get_loop_body_with_size. */
static bool
glb_enum_p (basic_block bb, void *glb_loop)
glb_enum_p (const_basic_block bb, const void *glb_loop)
{
struct loop *loop = (struct loop *) glb_loop;
const struct loop *const loop = (const struct loop *) glb_loop;
return (bb != loop->header
&& dominated_by_p (CDI_DOMINATORS, bb, loop->header));
}
@ -797,7 +797,7 @@ get_loop_body_with_size (const struct loop *loop, basic_block *body,
unsigned max_size)
{
return dfs_enumerate_from (loop->header, 1, glb_enum_p,
body, max_size, (void *) loop);
body, max_size, loop);
}
/* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
@ -1581,7 +1581,7 @@ loop_preheader_edge (const struct loop *loop)
/* Returns true if E is an exit of LOOP. */
bool
loop_exit_edge_p (const struct loop *loop, edge e)
loop_exit_edge_p (const struct loop *loop, const_edge e)
{
return (flow_bb_inside_loop_p (loop, e->src)
&& !flow_bb_inside_loop_p (loop, e->dest));

View File

@ -218,15 +218,15 @@ extern void flow_loop_tree_node_add (struct loop *, struct loop *);
extern void flow_loop_tree_node_remove (struct loop *);
extern void add_loop (struct loop *, struct loop *);
extern bool flow_loop_nested_p (const struct loop *, const struct loop *);
extern bool flow_bb_inside_loop_p (const struct loop *, const basic_block);
extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
extern struct loop * find_common_loop (struct loop *, struct loop *);
struct loop *superloop_at_depth (struct loop *, unsigned);
struct eni_weights_d;
extern unsigned tree_num_loop_insns (struct loop *, struct eni_weights_d *);
extern int num_loop_insns (struct loop *);
extern int average_num_loop_insns (struct loop *);
extern int num_loop_insns (const struct loop *);
extern int average_num_loop_insns (const struct loop *);
extern unsigned get_loop_level (const struct loop *);
extern bool loop_exit_edge_p (const struct loop *, edge);
extern bool loop_exit_edge_p (const struct loop *, const_edge);
extern void mark_loop_exit_edges (void);
/* Loops & cfg manipulation. */
@ -260,7 +260,7 @@ extern void force_single_succ_latches (void);
extern void verify_loop_structure (void);
/* Loop analysis. */
extern bool just_once_each_iteration_p (const struct loop *, basic_block);
extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
gcov_type expected_loop_iterations_unbounded (const struct loop *);
extern unsigned expected_loop_iterations (const struct loop *);
extern rtx doloop_condition_get (rtx);
@ -270,7 +270,7 @@ HOST_WIDE_INT estimated_loop_iterations_int (struct loop *, bool);
bool estimated_loop_iterations (struct loop *, bool, double_int *);
/* Loop manipulation. */
extern bool can_duplicate_loop_p (struct loop *loop);
extern bool can_duplicate_loop_p (const struct loop *loop);
#define DLTHE_FLAG_UPDATE_FREQ 1 /* Update frequencies in
duplicate_loop_to_header_edge. */

View File

@ -33,7 +33,7 @@ along with GCC; see the file COPYING3. If not see
/* Checks whether BB is executed exactly once in each LOOP iteration. */
bool
just_once_each_iteration_p (const struct loop *loop, basic_block bb)
just_once_each_iteration_p (const struct loop *loop, const_basic_block bb)
{
/* It must be executed at least once each iteration. */
if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
@ -161,7 +161,7 @@ mark_irreducible_loops (void)
/* Counts number of insns inside LOOP. */
int
num_loop_insns (struct loop *loop)
num_loop_insns (const struct loop *loop)
{
basic_block *bbs, bb;
unsigned i, ninsns = 0;
@ -183,7 +183,7 @@ num_loop_insns (struct loop *loop)
/* Counts number of insns executed on average per iteration LOOP. */
int
average_num_loop_insns (struct loop *loop)
average_num_loop_insns (const struct loop *loop)
{
basic_block *bbs, bb;
unsigned i, binsns, ninsns, ratio;
@ -294,7 +294,7 @@ get_loop_level (const struct loop *loop)
/* Returns estimate on cost of computing SEQ. */
static unsigned
seq_cost (rtx seq)
seq_cost (const_rtx seq)
{
unsigned cost = 0;
rtx set;

View File

@ -35,7 +35,7 @@ static void copy_loops_to (struct loop **, int,
struct loop *);
static void loop_redirect_edge (edge, basic_block);
static void remove_bbs (basic_block *, int);
static bool rpe_enum_p (basic_block, void *);
static bool rpe_enum_p (const_basic_block, const void *);
static int find_path (edge, basic_block **);
static void fix_loop_placements (struct loop *, bool *);
static bool fix_bb_placement (basic_block);
@ -46,9 +46,9 @@ static void unloop (struct loop *, bool *);
/* Checks whether basic block BB is dominated by DATA. */
static bool
rpe_enum_p (basic_block bb, void *data)
rpe_enum_p (const_basic_block bb, const void *data)
{
return dominated_by_p (CDI_DOMINATORS, bb, (basic_block) data);
return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
}
/* Remove basic blocks BBS. NBBS is the number of the basic blocks. */
@ -712,7 +712,7 @@ loop_redirect_edge (edge e, basic_block dest)
/* Check whether LOOP's body can be duplicated. */
bool
can_duplicate_loop_p (struct loop *loop)
can_duplicate_loop_p (const struct loop *loop)
{
int ret;
basic_block *bbs = get_loop_body (loop);

View File

@ -922,7 +922,7 @@ nearest_common_dominator_for_set (enum cdi_direction dir, bitmap blocks)
/* Return TRUE in case BB1 is dominated by BB2. */
bool
dominated_by_p (enum cdi_direction dir, basic_block bb1, basic_block bb2)
dominated_by_p (enum cdi_direction dir, const_basic_block bb1, const_basic_block bb2)
{
unsigned int dir_index = dom_convert_dir_to_idx (dir);
struct et_node *n1 = bb1->dom[dir_index], *n2 = bb2->dom[dir_index];

View File

@ -622,7 +622,7 @@ gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
bool
validate_subreg (enum machine_mode omode, enum machine_mode imode,
rtx reg, unsigned int offset)
const_rtx reg, unsigned int offset)
{
unsigned int isize = GET_MODE_SIZE (imode);
unsigned int osize = GET_MODE_SIZE (omode);

View File

@ -2698,7 +2698,7 @@ can_throw_internal_1 (int region_number, bool is_resx)
}
bool
can_throw_internal (rtx insn)
can_throw_internal (const_rtx insn)
{
rtx note;
@ -2752,7 +2752,7 @@ can_throw_external_1 (int region_number, bool is_resx)
}
bool
can_throw_external (rtx insn)
can_throw_external (const_rtx insn)
{
rtx note;

View File

@ -46,9 +46,9 @@ extern void for_each_eh_region (void (*) (struct eh_region *));
/* Determine if the given INSN can throw an exception. */
extern bool can_throw_internal_1 (int, bool);
extern bool can_throw_internal (rtx);
extern bool can_throw_internal (const_rtx);
extern bool can_throw_external_1 (int, bool);
extern bool can_throw_external (rtx);
extern bool can_throw_external (const_rtx);
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
extern unsigned int set_nothrow_function_flags (void);

View File

@ -531,16 +531,16 @@ static void hash_scan_clobber (rtx, rtx, struct hash_table *);
static void hash_scan_call (rtx, rtx, struct hash_table *);
static int want_to_gcse_p (rtx);
static bool can_assign_to_reg_p (rtx);
static bool gcse_constant_p (rtx);
static int oprs_unchanged_p (rtx, rtx, int);
static int oprs_anticipatable_p (rtx, rtx);
static int oprs_available_p (rtx, rtx);
static bool gcse_constant_p (const_rtx);
static int oprs_unchanged_p (const_rtx, const_rtx, int);
static int oprs_anticipatable_p (const_rtx, const_rtx);
static int oprs_available_p (const_rtx, const_rtx);
static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
struct hash_table *);
static void insert_set_in_table (rtx, rtx, struct hash_table *);
static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
static unsigned int hash_set (int, int);
static int expr_equiv_p (rtx, rtx);
static int expr_equiv_p (const_rtx, const_rtx);
static void record_last_reg_set_info (rtx, int);
static void record_last_mem_set_info (rtx);
static void record_last_set_info (rtx, const_rtx, void *);
@ -552,14 +552,14 @@ static void dump_hash_table (FILE *, const char *, struct hash_table *);
static struct expr *lookup_set (unsigned int, struct hash_table *);
static struct expr *next_set (unsigned int, struct expr *);
static void reset_opr_set_tables (void);
static int oprs_not_set_p (rtx, rtx);
static int oprs_not_set_p (const_rtx, const_rtx);
static void mark_call (rtx);
static void mark_set (rtx, rtx);
static void mark_clobber (rtx, rtx);
static void mark_oprs_set (rtx);
static void alloc_cprop_mem (int, int);
static void free_cprop_mem (void);
static void compute_transp (rtx, int, sbitmap *, int);
static void compute_transp (const_rtx, int, sbitmap *, int);
static void compute_transpout (void);
static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
struct hash_table *);
@ -569,7 +569,7 @@ static int try_replace_reg (rtx, rtx, rtx);
static struct expr *find_avail_set (int, rtx);
static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
static int load_killed_in_block_p (basic_block, int, rtx, int);
static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
static void canon_list_insert (rtx, const_rtx, void *);
static int cprop_insn (rtx, int);
static int cprop (int);
@ -577,7 +577,7 @@ static void find_implicit_sets (void);
static int one_cprop_pass (int, bool, bool);
static bool constprop_register (rtx, rtx, rtx, bool);
static struct expr *find_bypass_set (int, int);
static bool reg_killed_on_edge (rtx, edge);
static bool reg_killed_on_edge (const_rtx, const_edge);
static int bypass_block (basic_block, rtx, rtx);
static int bypass_conditional_jumps (void);
static void alloc_pre_mem (int, int);
@ -611,23 +611,23 @@ static struct ls_expr * find_rtx_in_ldst (rtx);
static int enumerate_ldsts (void);
static inline struct ls_expr * first_ls_expr (void);
static inline struct ls_expr * next_ls_expr (struct ls_expr *);
static int simple_mem (rtx);
static int simple_mem (const_rtx);
static void invalidate_any_buried_refs (rtx);
static void compute_ld_motion_mems (void);
static void trim_ld_motion_mems (void);
static void update_ld_motion_stores (struct expr *);
static void reg_set_info (rtx, const_rtx, void *);
static void reg_clear_last_set (rtx, const_rtx, void *);
static bool store_ops_ok (rtx, int *);
static bool store_ops_ok (const_rtx, int *);
static rtx extract_mentioned_regs (rtx);
static rtx extract_mentioned_regs_helper (rtx, rtx);
static void find_moveable_store (rtx, int *, int *);
static int compute_store_table (void);
static bool load_kills_store (rtx, rtx, int);
static bool find_loads (rtx, rtx, int);
static bool store_killed_in_insn (rtx, rtx, rtx, int);
static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
static bool load_kills_store (const_rtx, const_rtx, int);
static bool find_loads (const_rtx, const_rtx, int);
static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
static void build_store_vectors (void);
static void insert_insn_start_basic_block (rtx, basic_block);
static int insert_store (struct ls_expr *, edge);
@ -1239,7 +1239,7 @@ can_assign_to_reg_p (rtx x)
or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
static int
oprs_unchanged_p (rtx x, rtx insn, int avail_p)
oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
{
int i, j;
enum rtx_code code;
@ -1325,7 +1325,7 @@ static int gcse_mems_conflict_p;
load_killed_in_block_p. A memory reference for a load instruction,
mems_conflict_for_gcse_p will see if a memory store conflicts with
this memory load. */
static rtx gcse_mem_operand;
static const_rtx gcse_mem_operand;
/* DEST is the output of an instruction. If it is a memory reference, and
possibly conflicts with the load found in gcse_mem_operand, then set
@ -1370,7 +1370,7 @@ mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
AVAIL_P to 0. */
static int
load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
{
rtx list_entry = modify_mem_list[bb->index];
@ -1418,7 +1418,7 @@ load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
the start of INSN's basic block up to but not including INSN. */
static int
oprs_anticipatable_p (rtx x, rtx insn)
oprs_anticipatable_p (const_rtx x, const_rtx insn)
{
return oprs_unchanged_p (x, insn, 0);
}
@ -1427,7 +1427,7 @@ oprs_anticipatable_p (rtx x, rtx insn)
INSN to the end of INSN's basic block. */
static int
oprs_available_p (rtx x, rtx insn)
oprs_available_p (const_rtx x, const_rtx insn)
{
return oprs_unchanged_p (x, insn, 1);
}
@ -1440,7 +1440,7 @@ oprs_available_p (rtx x, rtx insn)
the current size of the hash table to be probed. */
static unsigned int
hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
int hash_table_size)
{
unsigned int hash;
@ -1471,7 +1471,7 @@ hash_set (int regno, int hash_table_size)
/* Return nonzero if exp1 is equivalent to exp2. */
static int
expr_equiv_p (rtx x, rtx y)
expr_equiv_p (const_rtx x, const_rtx y)
{
return exp_equiv_p (x, y, 0, true);
}
@ -1660,7 +1660,7 @@ insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
the purposes of GCSE's constant propagation. */
static bool
gcse_constant_p (rtx x)
gcse_constant_p (const_rtx x)
{
/* Consider a COMPARE of two integers constant. */
if (GET_CODE (x) == COMPARE
@ -2248,7 +2248,7 @@ reset_opr_set_tables (void)
INSN's basic block. */
static int
oprs_not_set_p (rtx x, rtx insn)
oprs_not_set_p (const_rtx x, const_rtx insn)
{
int i, j;
enum rtx_code code;
@ -2426,7 +2426,7 @@ free_cprop_mem (void)
bit in BMAP. */
static void
compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
{
int i, j;
basic_block bb;
@ -3312,10 +3312,10 @@ fis_get_condition (rtx jump)
it. COND is either an EQ or NE comparison. */
static bool
implicit_set_cond_p (rtx cond)
implicit_set_cond_p (const_rtx cond)
{
enum machine_mode mode = GET_MODE (XEXP (cond, 0));
rtx cst = XEXP (cond, 1);
const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
const_rtx cst = XEXP (cond, 1);
/* We can't perform this optimization if either operand might be or might
contain a signed zero. */
@ -3503,7 +3503,7 @@ find_bypass_set (int regno, int bb)
valid prior to commit_edge_insertions. */
static bool
reg_killed_on_edge (rtx reg, edge e)
reg_killed_on_edge (const_rtx reg, const_edge e)
{
rtx insn;
@ -5206,7 +5206,7 @@ next_ls_expr (struct ls_expr * ptr)
ld_motion list, otherwise we let the usual aliasing take care of it. */
static int
simple_mem (rtx x)
simple_mem (const_rtx x)
{
if (! MEM_P (x))
return 0;
@ -5508,9 +5508,9 @@ reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
due to set of registers in bitmap REGS_SET. */
static bool
store_ops_ok (rtx x, int *regs_set)
store_ops_ok (const_rtx x, int *regs_set)
{
rtx reg;
const_rtx reg;
for (; x; x = XEXP (x, 1))
{
@ -5864,7 +5864,7 @@ compute_store_table (void)
after the X. */
static bool
load_kills_store (rtx x, rtx store_pattern, int after)
load_kills_store (const_rtx x, const_rtx store_pattern, int after)
{
if (after)
return anti_dependence (x, store_pattern);
@ -5879,7 +5879,7 @@ load_kills_store (rtx x, rtx store_pattern, int after)
after the insn X. */
static bool
find_loads (rtx x, rtx store_pattern, int after)
find_loads (const_rtx x, const_rtx store_pattern, int after)
{
const char * fmt;
int i, j;
@ -5912,7 +5912,7 @@ find_loads (rtx x, rtx store_pattern, int after)
}
static inline bool
store_killed_in_pat (rtx x, rtx pat, int after)
store_killed_in_pat (const_rtx x, const_rtx pat, int after)
{
if (GET_CODE (pat) == SET)
{
@ -5949,9 +5949,9 @@ store_killed_in_pat (rtx x, rtx pat, int after)
after the insn. Return true if it does. */
static bool
store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
{
rtx reg, base, note, pat;
const_rtx reg, base, note, pat;
if (!INSN_P (insn))
return false;
@ -6017,7 +6017,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
is killed, return the last insn in that it occurs in FAIL_INSN. */
static bool
store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
int *regs_set_after, rtx *fail_insn)
{
rtx last = BB_END (bb), act;
@ -6046,7 +6046,7 @@ store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
within basic block BB. X_REGS is list of registers mentioned in X.
REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
static bool
store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
int *regs_set_before)
{
rtx first = BB_HEAD (bb);

View File

@ -87,15 +87,15 @@ static int num_true_changes;
static int cond_exec_changed_p;
/* Forward references. */
static int count_bb_insns (basic_block);
static bool cheap_bb_rtx_cost_p (basic_block, int);
static int count_bb_insns (const_basic_block);
static bool cheap_bb_rtx_cost_p (const_basic_block, int);
static rtx first_active_insn (basic_block);
static rtx last_active_insn (basic_block, int);
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
static rtx cond_exec_get_condition (rtx);
static rtx noce_get_condition (rtx, rtx *, bool);
static int noce_operand_ok (rtx);
static int noce_operand_ok (const_rtx);
static void merge_if_block (ce_if_block_t *);
static int find_cond_trap (basic_block, edge, edge);
static basic_block find_if_header (basic_block, int);
@ -113,7 +113,7 @@ static rtx block_has_only_trap (basic_block);
/* Count the number of non-jump active insns in BB. */
static int
count_bb_insns (basic_block bb)
count_bb_insns (const_basic_block bb)
{
int count = 0;
rtx insn = BB_HEAD (bb);
@ -136,7 +136,7 @@ count_bb_insns (basic_block bb)
false if the cost of any instruction could not be estimated. */
static bool
cheap_bb_rtx_cost_p (basic_block bb, int max_cost)
cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
{
int count = 0;
rtx insn = BB_HEAD (bb);
@ -2071,7 +2071,7 @@ noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
/* Return true if OP is ok for if-then-else processing. */
static int
noce_operand_ok (rtx op)
noce_operand_ok (const_rtx op)
{
/* We special-case memories, so handle any of them with
no address side effects. */
@ -2087,7 +2087,7 @@ noce_operand_ok (rtx op)
/* Return true if a write into MEM may trap or fault. */
static bool
noce_mem_write_may_trap_or_fault_p (rtx mem)
noce_mem_write_may_trap_or_fault_p (const_rtx mem)
{
rtx addr;

View File

@ -93,7 +93,7 @@ pointer_set_destroy (struct pointer_set_t *pset)
Collisions are resolved by linear probing. */
int
pointer_set_contains (struct pointer_set_t *pset, void *p)
pointer_set_contains (struct pointer_set_t *pset, const void *p)
{
size_t n = hash1 (p, pset->n_slots, pset->log_slots);
@ -226,7 +226,7 @@ void pointer_map_destroy (struct pointer_map_t *pmap)
Collisions are resolved by linear probing. */
void **
pointer_map_contains (struct pointer_map_t *pmap, void *p)
pointer_map_contains (struct pointer_map_t *pmap, const void *p)
{
size_t n = hash1 (p, pmap->n_slots, pmap->log_slots);

View File

@ -24,7 +24,7 @@ struct pointer_set_t;
struct pointer_set_t *pointer_set_create (void);
void pointer_set_destroy (struct pointer_set_t *pset);
int pointer_set_contains (struct pointer_set_t *pset, void *p);
int pointer_set_contains (struct pointer_set_t *pset, const void *p);
int pointer_set_insert (struct pointer_set_t *pset, void *p);
void pointer_set_traverse (struct pointer_set_t *, bool (*) (void *, void *),
void *);
@ -33,7 +33,7 @@ struct pointer_map_t;
struct pointer_map_t *pointer_map_create (void);
void pointer_map_destroy (struct pointer_map_t *pmap);
void **pointer_map_contains (struct pointer_map_t *pmap, void *p);
void **pointer_map_contains (struct pointer_map_t *pmap, const void *p);
void **pointer_map_insert (struct pointer_map_t *pmap, void *p);
void pointer_map_traverse (struct pointer_map_t *,
bool (*) (void *, void **, void *), void *);

View File

@ -77,7 +77,7 @@ static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
static void compute_function_frequency (void);
static void choose_function_section (void);
static bool can_predict_insn_p (rtx);
static bool can_predict_insn_p (const_rtx);
/* Information we hold about each branch predictor.
Filled using information from predict.def. */
@ -111,7 +111,7 @@ static const struct predictor_info predictor_info[]= {
for maximal performance. */
bool
maybe_hot_bb_p (basic_block bb)
maybe_hot_bb_p (const_basic_block bb)
{
if (profile_info && flag_branch_probabilities
&& (bb->count
@ -132,7 +132,7 @@ maybe_hot_bb_p (basic_block bb)
/* Return true in case BB is cold and should be optimized for size. */
bool
probably_cold_bb_p (basic_block bb)
probably_cold_bb_p (const_basic_block bb)
{
if (profile_info && flag_branch_probabilities
&& (bb->count
@ -148,7 +148,7 @@ probably_cold_bb_p (basic_block bb)
/* Return true in case BB is probably never executed. */
bool
probably_never_executed_bb_p (basic_block bb)
probably_never_executed_bb_p (const_basic_block bb)
{
if (profile_info && flag_branch_probabilities)
return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
@ -223,14 +223,14 @@ probability_reliable_p (int prob)
/* Same predicate as above, working on edges. */
bool
edge_probability_reliable_p (edge e)
edge_probability_reliable_p (const_edge e)
{
return probability_reliable_p (e->probability);
}
/* Same predicate as edge_probability_reliable_p, working on notes. */
bool
br_prob_note_reliable_p (rtx note)
br_prob_note_reliable_p (const_rtx note)
{
gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
return probability_reliable_p (INTVAL (XEXP (note, 0)));
@ -358,7 +358,7 @@ clear_bb_predictions (basic_block bb)
At the moment we represent predictions only on conditional
jumps, not at computed jump or other complicated cases. */
static bool
can_predict_insn_p (rtx insn)
can_predict_insn_p (const_rtx insn)
{
return (JUMP_P (insn)
&& any_condjump_p (insn)

View File

@ -262,7 +262,7 @@ unsigned char hard_regno_nregs[FIRST_PSEUDO_REGISTER][MAX_MACHINE_MODE];
with moving single words, but probably isn't worth the trouble. */
void
reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
reg_set_to_hard_reg_set (HARD_REG_SET *to, const_bitmap from)
{
unsigned i;
bitmap_iterator bi;

View File

@ -823,7 +823,7 @@ mark_set_resources (rtx x, struct resources *res, int in_dest,
/* Return TRUE if INSN is a return, possibly with a filled delay slot. */
static bool
return_insn_p (rtx insn)
return_insn_p (const_rtx insn)
{
if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
return true;

View File

@ -1688,13 +1688,13 @@ extern int reg_mentioned_p (const_rtx, const_rtx);
extern int count_occurrences (const_rtx, const_rtx, int);
extern int reg_referenced_p (const_rtx, const_rtx);
extern int reg_used_between_p (const_rtx, const_rtx, const_rtx);
extern int reg_set_between_p (rtx, rtx, rtx);
extern int reg_set_between_p (const_rtx, const_rtx, const_rtx);
extern int commutative_operand_precedence (rtx);
extern bool swap_commutative_operands_p (rtx, rtx);
extern int modified_between_p (rtx, rtx, rtx);
extern int no_labels_between_p (const_rtx, const_rtx);
extern int modified_in_p (rtx, rtx);
extern int reg_set_p (rtx, rtx);
extern int reg_set_p (const_rtx, const_rtx);
extern rtx single_set_2 (const_rtx, const_rtx);
extern int multiple_sets (const_rtx);
extern int set_noop_p (const_rtx);
@ -2076,7 +2076,7 @@ extern rtx gen_const_mem (enum machine_mode, rtx);
extern rtx gen_frame_mem (enum machine_mode, rtx);
extern rtx gen_tmp_stack_mem (enum machine_mode, rtx);
extern bool validate_subreg (enum machine_mode, enum machine_mode,
rtx, unsigned int);
const_rtx, unsigned int);
/* In combine.c */
extern unsigned int extended_count (const_rtx, enum machine_mode, int);

View File

@ -802,9 +802,9 @@ reg_referenced_p (const_rtx x, const_rtx body)
FROM_INSN and TO_INSN (exclusive of those two). */
int
reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
{
rtx insn;
const_rtx insn;
if (from_insn == to_insn)
return 0;
@ -817,7 +817,7 @@ reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
/* Internals of reg_set_between_p. */
int
reg_set_p (rtx reg, rtx insn)
reg_set_p (const_rtx reg, const_rtx insn)
{
/* We can be passed an insn or part of one. If we are passed an insn,
check if a side-effect of the insn clobbers REG. */

View File

@ -50,9 +50,9 @@
#include "coverage.h"
#include "tree-pass.h"
static int count_insns (basic_block);
static bool ignore_bb_p (basic_block);
static bool better_p (edge, edge);
static int count_insns (const_basic_block);
static bool ignore_bb_p (const_basic_block);
static bool better_p (const_edge, const_edge);
static edge find_best_successor (basic_block);
static edge find_best_predecessor (basic_block);
static int find_trace (basic_block, basic_block *);
@ -70,7 +70,7 @@ static int branch_ratio_cutoff;
/* Return true if we should ignore the basic block for purposes of tracing. */
static bool
ignore_bb_p (basic_block bb)
ignore_bb_p (const_basic_block bb)
{
if (bb->index < NUM_FIXED_BLOCKS)
return true;
@ -82,9 +82,9 @@ ignore_bb_p (basic_block bb)
/* Return number of instructions in the block. */
static int
count_insns (basic_block bb)
count_insns (const_basic_block bb)
{
rtx insn;
const_rtx insn;
int n = 0;
for (insn = BB_HEAD (bb);
@ -97,7 +97,7 @@ count_insns (basic_block bb)
/* Return true if E1 is more frequent than E2. */
static bool
better_p (edge e1, edge e2)
better_p (const_edge e1, const_edge e2)
{
if (e1->count != e2->count)
return e1->count > e2->count;

View File

@ -3365,7 +3365,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
if there is an error, otherwise false. */
static bool
verify_gimple_unary_expr (tree expr)
verify_gimple_unary_expr (const_tree expr)
{
tree op = TREE_OPERAND (expr, 0);
tree type = TREE_TYPE (expr);
@ -3395,7 +3395,7 @@ verify_gimple_unary_expr (tree expr)
if there is an error, otherwise false. */
static bool
verify_gimple_binary_expr (tree expr)
verify_gimple_binary_expr (const_tree expr)
{
tree op0 = TREE_OPERAND (expr, 0);
tree op1 = TREE_OPERAND (expr, 1);
@ -3906,7 +3906,7 @@ verify_gimple_expr (tree expr)
is an error, otherwise false. */
static bool
verify_gimple_modify_stmt (tree stmt)
verify_gimple_modify_stmt (const_tree stmt)
{
tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);

View File

@ -45,7 +45,7 @@ along with GCC; see the file COPYING3. If not see
/* Determines whether CST is not a constant evolution. */
static inline bool
is_not_constant_evolution (tree cst)
is_not_constant_evolution (const_tree cst)
{
return (TREE_CODE (cst) == POLYNOMIAL_CHREC);
}
@ -835,7 +835,7 @@ chrec_merge (tree chrec1,
/* Helper function for is_multivariate_chrec. */
static bool
is_multivariate_chrec_rec (tree chrec, unsigned int rec_var)
is_multivariate_chrec_rec (const_tree chrec, unsigned int rec_var)
{
if (chrec == NULL_TREE)
return false;
@ -855,7 +855,7 @@ is_multivariate_chrec_rec (tree chrec, unsigned int rec_var)
/* Determine whether the given chrec is multivariate or not. */
bool
is_multivariate_chrec (tree chrec)
is_multivariate_chrec (const_tree chrec)
{
if (chrec == NULL_TREE)
return false;
@ -872,7 +872,7 @@ is_multivariate_chrec (tree chrec)
/* Determines whether the chrec contains symbolic names or not. */
bool
chrec_contains_symbols (tree chrec)
chrec_contains_symbols (const_tree chrec)
{
int i, n;
@ -898,7 +898,7 @@ chrec_contains_symbols (tree chrec)
/* Determines whether the chrec contains undetermined coefficients. */
bool
chrec_contains_undetermined (tree chrec)
chrec_contains_undetermined (const_tree chrec)
{
int i, n;
@ -920,7 +920,7 @@ chrec_contains_undetermined (tree chrec)
the tree. */
bool
tree_contains_chrecs (tree expr, int *size)
tree_contains_chrecs (const_tree expr, int *size)
{
int i, n;
@ -995,7 +995,7 @@ evolution_function_is_invariant_p (tree chrec, int loopnum)
evolution. */
bool
evolution_function_is_affine_multivariate_p (tree chrec, int loopnum)
evolution_function_is_affine_multivariate_p (const_tree chrec, int loopnum)
{
if (chrec == NULL_TREE)
return false;
@ -1040,7 +1040,7 @@ evolution_function_is_affine_multivariate_p (tree chrec, int loopnum)
variables. */
bool
evolution_function_is_univariate_p (tree chrec)
evolution_function_is_univariate_p (const_tree chrec)
{
if (chrec == NULL_TREE)
return true;
@ -1103,7 +1103,7 @@ nb_vars_in_chrec (tree chrec)
arithmetics, even though it is a scalar type. */
static bool
avoid_arithmetics_in_type_p (tree type)
avoid_arithmetics_in_type_p (const_tree type)
{
/* Ada frontend uses subtypes -- an arithmetic cannot be directly performed
in the subtype, but a base type must be used, and the result then can
@ -1362,8 +1362,7 @@ chrec_convert_aggressive (tree type, tree chrec)
/* Returns true when CHREC0 == CHREC1. */
bool
eq_evolutions_p (tree chrec0,
tree chrec1)
eq_evolutions_p (const_tree chrec0, const_tree chrec1)
{
if (chrec0 == NULL_TREE
|| chrec1 == NULL_TREE
@ -1392,9 +1391,9 @@ eq_evolutions_p (tree chrec0,
which of these cases happens. */
enum ev_direction
scev_direction (tree chrec)
scev_direction (const_tree chrec)
{
tree step;
const_tree step;
if (!evolution_function_is_affine_p (chrec))
return EV_DIR_UNKNOWN;

View File

@ -33,7 +33,7 @@ extern GTY(()) tree chrec_known;
include it in the following function. */
static inline bool
automatically_generated_chrec_p (tree chrec)
automatically_generated_chrec_p (const_tree chrec)
{
return (chrec == chrec_dont_know
|| chrec == chrec_known);
@ -42,7 +42,7 @@ automatically_generated_chrec_p (tree chrec)
/* The tree nodes aka. CHRECs. */
static inline bool
tree_is_chrec (tree expr)
tree_is_chrec (const_tree expr)
{
if (TREE_CODE (expr) == POLYNOMIAL_CHREC
|| automatically_generated_chrec_p (expr))
@ -72,22 +72,22 @@ extern tree reset_evolution_in_loop (unsigned, tree, tree);
extern tree chrec_merge (tree, tree);
/* Observers. */
extern bool eq_evolutions_p (tree, tree);
extern bool is_multivariate_chrec (tree);
extern bool eq_evolutions_p (const_tree, const_tree);
extern bool is_multivariate_chrec (const_tree);
extern bool chrec_is_positive (tree, bool *);
extern bool chrec_contains_symbols (tree);
extern bool chrec_contains_symbols_defined_in_loop (tree, unsigned);
extern bool chrec_contains_undetermined (tree);
extern bool tree_contains_chrecs (tree, int *);
extern bool evolution_function_is_affine_multivariate_p (tree, int);
extern bool evolution_function_is_univariate_p (tree);
extern bool chrec_contains_symbols (const_tree);
extern bool chrec_contains_symbols_defined_in_loop (const_tree, unsigned);
extern bool chrec_contains_undetermined (const_tree);
extern bool tree_contains_chrecs (const_tree, int *);
extern bool evolution_function_is_affine_multivariate_p (const_tree, int);
extern bool evolution_function_is_univariate_p (const_tree);
extern unsigned nb_vars_in_chrec (tree);
extern bool evolution_function_is_invariant_p (tree, int);
/* Determines whether CHREC is equal to zero. */
static inline bool
chrec_zerop (tree chrec)
chrec_zerop (const_tree chrec)
{
if (chrec == NULL_TREE)
return false;
@ -147,7 +147,7 @@ build_polynomial_chrec (unsigned loop_num,
/* Determines whether the expression CHREC is a constant. */
static inline bool
evolution_function_is_constant_p (tree chrec)
evolution_function_is_constant_p (const_tree chrec)
{
if (chrec == NULL_TREE)
return false;
@ -166,7 +166,7 @@ evolution_function_is_constant_p (tree chrec)
/* Determine whether the given tree is an affine evolution function or not. */
static inline bool
evolution_function_is_affine_p (tree chrec)
evolution_function_is_affine_p (const_tree chrec)
{
if (chrec == NULL_TREE)
return false;
@ -191,7 +191,7 @@ evolution_function_is_affine_p (tree chrec)
function. */
static inline bool
evolution_function_is_affine_or_constant_p (tree chrec)
evolution_function_is_affine_or_constant_p (const_tree chrec)
{
return evolution_function_is_affine_p (chrec)
|| evolution_function_is_constant_p (chrec);
@ -200,7 +200,7 @@ evolution_function_is_affine_or_constant_p (tree chrec)
/* Determines whether EXPR does not contains chrec expressions. */
static inline bool
tree_does_not_contain_chrecs (tree expr)
tree_does_not_contain_chrecs (const_tree expr)
{
return !tree_contains_chrecs (expr, NULL);
}
@ -208,7 +208,7 @@ tree_does_not_contain_chrecs (tree expr)
/* Returns the type of the chrec. */
static inline tree
chrec_type (tree chrec)
chrec_type (const_tree chrec)
{
if (automatically_generated_chrec_p (chrec))
return NULL_TREE;

View File

@ -128,7 +128,7 @@ static bool subscript_dependence_tester_1 (struct data_dependence_relation *,
/* Returns true iff A divides B. */
static inline bool
tree_fold_divides_p (tree a, tree b)
tree_fold_divides_p (const_tree a, const_tree b)
{
gcc_assert (TREE_CODE (a) == INTEGER_CST);
gcc_assert (TREE_CODE (b) == INTEGER_CST);
@ -1040,7 +1040,7 @@ conflict_fn_no_dependence (void)
/* Returns true if the address of OBJ is invariant in LOOP. */
static bool
object_address_invariant_in_loop_p (struct loop *loop, tree obj)
object_address_invariant_in_loop_p (const struct loop *loop, const_tree obj)
{
while (handled_component_p (obj))
{
@ -1159,12 +1159,12 @@ disjoint_objects_p (tree a, tree b)
true otherwise. */
static bool
dr_may_alias_p (struct data_reference *a, struct data_reference *b)
dr_may_alias_p (const struct data_reference *a, const struct data_reference *b)
{
tree addr_a = DR_BASE_ADDRESS (a);
tree addr_b = DR_BASE_ADDRESS (b);
tree type_a, type_b;
tree decl_a = NULL_TREE, decl_b = NULL_TREE;
const_tree addr_a = DR_BASE_ADDRESS (a);
const_tree addr_b = DR_BASE_ADDRESS (b);
const_tree type_a, type_b;
const_tree decl_a = NULL_TREE, decl_b = NULL_TREE;
/* If the sets of virtual operands are disjoint, the memory references do not
alias. */
@ -1355,8 +1355,7 @@ non_affine_dependence_relation (struct data_dependence_relation *ddr)
variables, i.e., if the ZIV (Zero Index Variable) test is true. */
static inline bool
ziv_subscript_p (tree chrec_a,
tree chrec_b)
ziv_subscript_p (const_tree chrec_a, const_tree chrec_b)
{
return (evolution_function_is_constant_p (chrec_a)
&& evolution_function_is_constant_p (chrec_b));
@ -1366,8 +1365,7 @@ ziv_subscript_p (tree chrec_a,
variable, i.e., if the SIV (Single Index Variable) test is true. */
static bool
siv_subscript_p (tree chrec_a,
tree chrec_b)
siv_subscript_p (const_tree chrec_a, const_tree chrec_b)
{
if ((evolution_function_is_constant_p (chrec_a)
&& evolution_function_is_univariate_p (chrec_b))
@ -2416,7 +2414,7 @@ analyze_siv_subscript (tree chrec_a,
of CHREC does not divide CST, false otherwise. */
static bool
gcd_of_steps_may_divide_p (tree chrec, tree cst)
gcd_of_steps_may_divide_p (const_tree chrec, const_tree cst)
{
HOST_WIDE_INT cd = 0, val;
tree step;
@ -2790,7 +2788,7 @@ build_classic_dist_vector_1 (struct data_dependence_relation *ddr,
same access functions. */
static bool
same_access_functions (struct data_dependence_relation *ddr)
same_access_functions (const struct data_dependence_relation *ddr)
{
unsigned i;
@ -2805,7 +2803,7 @@ same_access_functions (struct data_dependence_relation *ddr)
/* Return true when the DDR contains only constant access functions. */
static bool
constant_access_functions (struct data_dependence_relation *ddr)
constant_access_functions (const struct data_dependence_relation *ddr)
{
unsigned i;
@ -3212,8 +3210,8 @@ subscript_dependence_tester (struct data_dependence_relation *ddr,
constant with respect to LOOP_NEST. */
static bool
access_functions_are_affine_or_constant_p (struct data_reference *a,
struct loop *loop_nest)
access_functions_are_affine_or_constant_p (const struct data_reference *a,
const struct loop *loop_nest)
{
unsigned int i;
VEC(tree,heap) *fns = DR_ACCESS_FNS (a);
@ -4341,7 +4339,7 @@ free_data_refs (VEC (data_reference_p, heap) *datarefs)
/* Returns the index of STMT in RDG. */
static int
find_vertex_for_stmt (struct graph *rdg, tree stmt)
find_vertex_for_stmt (const struct graph *rdg, const_tree stmt)
{
int i;

View File

@ -984,7 +984,7 @@ bool convert_affine_scev (struct loop *, tree, tree *, tree *, tree, bool);
bool nowrap_type_p (tree);
enum ev_direction {EV_DIR_GROWS, EV_DIR_DECREASES, EV_DIR_UNKNOWN};
enum ev_direction scev_direction (tree);
enum ev_direction scev_direction (const_tree);
void free_numbers_of_iterations_estimates (void);
void free_numbers_of_iterations_estimates_loop (struct loop *);

View File

@ -202,7 +202,7 @@ is_gimple_min_invariant (const_tree t)
bool
is_gimple_stmt (tree t)
{
enum tree_code code = TREE_CODE (t);
const enum tree_code code = TREE_CODE (t);
switch (code)
{

View File

@ -795,7 +795,7 @@ same_stmt_list_p (edge e)
/* Return TRUE if S1 and S2 are equivalent copies. */
static inline bool
identical_copies_p (tree s1, tree s2)
identical_copies_p (const_tree s1, const_tree s2)
{
#ifdef ENABLE_CHECKING
gcc_assert (TREE_CODE (s1) == GIMPLE_MODIFY_STMT);
@ -821,7 +821,7 @@ identical_copies_p (tree s1, tree s2)
contain the same sequence of copies. */
static inline bool
identical_stmt_lists_p (edge e1, edge e2)
identical_stmt_lists_p (const_edge e1, const_edge e2)
{
tree t1 = PENDING_STMT (e1);
tree t2 = PENDING_STMT (e2);

View File

@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see
#include "value-prof.h"
/* Local functions, macros and variables. */
static int op_prio (tree);
static int op_prio (const_tree);
static const char *op_symbol (tree);
static void pretty_print_string (pretty_printer *, const char*);
static void print_call_name (pretty_printer *, tree);
@ -2291,7 +2291,7 @@ print_struct_decl (pretty_printer *buffer, tree node, int spc, int flags)
operators. */
static int
op_prio (tree op)
op_prio (const_tree op)
{
if (op == NULL)
return 9999;

View File

@ -353,7 +353,7 @@ find_var_scev_info (tree var)
LOOP_NB. */
bool
chrec_contains_symbols_defined_in_loop (tree chrec, unsigned loop_nb)
chrec_contains_symbols_defined_in_loop (const_tree chrec, unsigned loop_nb)
{
int i, n;
@ -898,7 +898,7 @@ set_nb_iterations_in_loop (struct loop *loop,
EXPR. */
static bool
analyzable_condition (tree expr)
analyzable_condition (const_tree expr)
{
tree condition;
@ -1201,9 +1201,9 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
/* Checks whether the I-th argument of a PHI comes from a backedge. */
static bool
backedge_phi_arg_p (tree phi, int i)
backedge_phi_arg_p (const_tree phi, int i)
{
edge e = PHI_ARG_EDGE (phi, i);
const_edge e = PHI_ARG_EDGE (phi, i);
/* We would in fact like to test EDGE_DFS_BACK here, but we do not care
about updating it anywhere, and this should work as well most of the

View File

@ -40,7 +40,7 @@ extern bool simple_iv (struct loop *, tree, tree, affine_iv *, bool);
/* Returns the loop of the polynomial chrec CHREC. */
static inline struct loop *
get_chrec_loop (tree chrec)
get_chrec_loop (const_tree chrec)
{
return get_loop (CHREC_VARIABLE (chrec));
}

View File

@ -199,7 +199,7 @@ static VEC(scb_t,heap) *scb_stack;
/* Return the DECL_UID of the base variable of T. */
static inline unsigned
get_name_decl (tree t)
get_name_decl (const_tree t)
{
if (TREE_CODE (t) != SSA_NAME)
return DECL_UID (t);
@ -213,12 +213,10 @@ get_name_decl (tree t)
static int
operand_build_cmp (const void *p, const void *q)
{
tree e1 = *((const tree *)p);
tree e2 = *((const tree *)q);
unsigned int u1,u2;
u1 = get_name_decl (e1);
u2 = get_name_decl (e2);
const_tree const e1 = *((const_tree const *)p);
const_tree const e2 = *((const_tree const *)q);
const unsigned int u1 = get_name_decl (e1);
const unsigned int u2 = get_name_decl (e2);
/* We want to sort in ascending order. They can never be equal. */
#ifdef ENABLE_CHECKING

View File

@ -671,9 +671,9 @@ thread_single_edge (edge e)
static basic_block dbds_ce_stop;
static bool
dbds_continue_enumeration_p (basic_block bb, void *stop)
dbds_continue_enumeration_p (const_basic_block bb, const void *stop)
{
return (bb != (basic_block) stop
return (bb != (const_basic_block) stop
&& bb != dbds_ce_stop);
}