This patch normalizes more bitmap function names.

sbitmap.h

    TEST_BIT -> bitmap_bit_p
    SET_BIT -> bitmap_set_bit
    SET_BIT_WITH_POPCOUNT -> bitmap_set_bit_with_popcount
    RESET_BIT -> bitmap_clear_bit
    RESET_BIT_WITH_POPCOUNT -> bitmap_clear_bit_with_popcount

  basic-block.h

    sbitmap_intersection_of_succs -> bitmap_intersection_of_succs
    sbitmap_intersection_of_preds -> bitmap_intersection_of_preds
    sbitmap_union_of_succs -> bitmap_union_of_succs
    sbitmap_union_of_preds -> bitmap_union_of_preds

The sbitmap.h functions also needed their numeric paramter changed
from unsigned int to int to match the bitmap functions.

Callers updated to match.

Tested on x86-64, config-list.mk testing.


Index: gcc/ChangeLog

2012-11-01  Lawrence Crowl  <crowl@google.com>

	* sbitmap.h (TEST_BIT): Rename bitmap_bit_p, normalizing parameter
	type. Update callers to match.
	(SET_BIT): Rename bitmap_set_bit, normalizing parameter type. Update
	callers to match.
	(SET_BIT_WITH_POPCOUNT): Rename bitmap_set_bit_with_popcount,
	normalizing parameter type. Update callers to match.
	(RESET_BIT): Rename bitmap_clear_bit, normalizing parameter type.
	Update callers to match.
	(RESET_BIT_WITH_POPCOUNT): Rename bitmap_clear_bit_with_popcount,
	normalizing parameter type. Update callers to match.
	* basic-block.h (sbitmap_intersection_of_succs): Rename
	bitmap_intersection_of_succs. Update callers to match.
	* basic-block.h (sbitmap_intersection_of_preds): Rename
	bitmap_intersection_of_preds. Update callers to match.
	* basic-block.h (sbitmap_union_of_succs): Rename
	bitmap_union_of_succs. Update callers to match.
	* basic-block.h (sbitmap_union_of_preds): Rename
	bitmap_union_of_preds. Update callers to match.

From-SVN: r193066
This commit is contained in:
Lawrence Crowl 2012-11-01 19:23:35 +00:00 committed by Lawrence Crowl
parent 6cd1dd2675
commit d7c028c07b
66 changed files with 484 additions and 467 deletions

View File

@ -1,3 +1,24 @@
2012-11-01 Lawrence Crowl <crowl@google.com>
* sbitmap.h (TEST_BIT): Rename bitmap_bit_p, normalizing parameter
type. Update callers to match.
(SET_BIT): Rename bitmap_set_bit, normalizing parameter type. Update
callers to match.
(SET_BIT_WITH_POPCOUNT): Rename bitmap_set_bit_with_popcount,
normalizing parameter type. Update callers to match.
(RESET_BIT): Rename bitmap_clear_bit, normalizing parameter type.
Update callers to match.
(RESET_BIT_WITH_POPCOUNT): Rename bitmap_clear_bit_with_popcount,
normalizing parameter type. Update callers to match.
* basic-block.h (sbitmap_intersection_of_succs): Rename
bitmap_intersection_of_succs. Update callers to match.
* basic-block.h (sbitmap_intersection_of_preds): Rename
bitmap_intersection_of_preds. Update callers to match.
* basic-block.h (sbitmap_union_of_succs): Rename
bitmap_union_of_succs. Update callers to match.
* basic-block.h (sbitmap_union_of_preds): Rename
bitmap_union_of_preds. Update callers to match.
2012-11-01 Vladimir Makarov <vmakarov@redhat.com>
PR middle-end/55150

View File

@ -1245,7 +1245,7 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
{
while (--n >= 0)
{
SET_BIT (reg_seen, regno + n);
bitmap_set_bit (reg_seen, regno + n);
new_reg_base_value[regno + n] = 0;
}
return;
@ -1266,12 +1266,12 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
else
{
/* There's a REG_NOALIAS note against DEST. */
if (TEST_BIT (reg_seen, regno))
if (bitmap_bit_p (reg_seen, regno))
{
new_reg_base_value[regno] = 0;
return;
}
SET_BIT (reg_seen, regno);
bitmap_set_bit (reg_seen, regno);
new_reg_base_value[regno] = unique_base_value (unique_id++);
return;
}
@ -1327,10 +1327,10 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
}
/* If this is the first set of a register, record the value. */
else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
&& ! TEST_BIT (reg_seen, regno) && new_reg_base_value[regno] == 0)
&& ! bitmap_bit_p (reg_seen, regno) && new_reg_base_value[regno] == 0)
new_reg_base_value[regno] = find_base_value (src);
SET_BIT (reg_seen, regno);
bitmap_set_bit (reg_seen, regno);
}
/* Return REG_BASE_VALUE for REGNO. Selective scheduler uses this to avoid
@ -1377,7 +1377,7 @@ get_reg_known_equiv_p (unsigned int regno)
{
regno -= FIRST_PSEUDO_REGISTER;
if (regno < VEC_length (rtx, reg_known_value))
return TEST_BIT (reg_known_equiv_p, regno);
return bitmap_bit_p (reg_known_equiv_p, regno);
}
return false;
}
@ -1391,9 +1391,9 @@ set_reg_known_equiv_p (unsigned int regno, bool val)
if (regno < VEC_length (rtx, reg_known_value))
{
if (val)
SET_BIT (reg_known_equiv_p, regno);
bitmap_set_bit (reg_known_equiv_p, regno);
else
RESET_BIT (reg_known_equiv_p, regno);
bitmap_clear_bit (reg_known_equiv_p, regno);
}
}
}

View File

@ -717,10 +717,10 @@ ei_cond (edge_iterator ei, edge *p)
#define CLEANUP_CFG_CHANGED 64 /* The caller changed the CFG. */
/* In cfganal.c */
extern void sbitmap_intersection_of_succs (sbitmap, sbitmap *, basic_block);
extern void sbitmap_intersection_of_preds (sbitmap, sbitmap *, basic_block);
extern void sbitmap_union_of_succs (sbitmap, sbitmap *, basic_block);
extern void sbitmap_union_of_preds (sbitmap, sbitmap *, basic_block);
extern void bitmap_intersection_of_succs (sbitmap, sbitmap *, basic_block);
extern void bitmap_intersection_of_preds (sbitmap, sbitmap *, basic_block);
extern void bitmap_union_of_succs (sbitmap, sbitmap *, basic_block);
extern void bitmap_union_of_preds (sbitmap, sbitmap *, basic_block);
/* In lcm.c */
extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,

View File

@ -500,10 +500,10 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
SET_HARD_REG_BIT (info.btrs_live_in_block, regno);
bitmap_and_compl (bb_gen[i], bb_gen[i],
btr_defset[regno - first_btr]);
SET_BIT (bb_gen[i], insn_uid);
bitmap_set_bit (bb_gen[i], insn_uid);
def->next_this_bb = defs_this_bb;
defs_this_bb = def;
SET_BIT (btr_defset[regno - first_btr], insn_uid);
bitmap_set_bit (btr_defset[regno - first_btr], insn_uid);
note_other_use_this_block (regno, info.users_this_bb);
}
/* Check for the blockage emitted by expand_nl_goto_receiver. */
@ -652,7 +652,7 @@ compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
changed = 0;
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
sbitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
bb_in, bb_kill[i]);
}
@ -675,7 +675,7 @@ link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
rtx insn;
rtx last;
sbitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK (i));
bitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK (i));
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
insn != last;
insn = NEXT_INSN (insn))
@ -692,7 +692,7 @@ link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
for this one. */
bitmap_and_compl (reaching_defs, reaching_defs,
btr_defset[def->btr - first_btr]);
SET_BIT(reaching_defs, insn_uid);
bitmap_set_bit(reaching_defs, insn_uid);
}
if (user != NULL)

View File

@ -1846,7 +1846,7 @@ mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
for (k = 0; k < size; k++)
if (i + k < SBITMAP_SIZE (stored_args_map)
&& TEST_BIT (stored_args_map, i + k))
&& bitmap_bit_p (stored_args_map, i + k))
return true;
}
@ -2133,7 +2133,7 @@ check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_
#endif
for (high = low + arg->locate.size.constant; low < high; low++)
SET_BIT (stored_args_map, low);
bitmap_set_bit (stored_args_map, low);
}
return insn != NULL_RTX;
}

View File

@ -289,11 +289,11 @@ cached_make_edge (sbitmap edge_cache, basic_block src, basic_block dst, int flag
return make_edge (src, dst, flags);
/* Does the requested edge already exist? */
if (! TEST_BIT (edge_cache, dst->index))
if (! bitmap_bit_p (edge_cache, dst->index))
{
/* The edge does not exist. Create one and update the
cache. */
SET_BIT (edge_cache, dst->index);
bitmap_set_bit (edge_cache, dst->index);
return unchecked_make_edge (src, dst, flags);
}

View File

@ -102,10 +102,10 @@ mark_dfs_back_edges (void)
ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
/* Check if the edge destination has been visited yet. */
if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
SET_BIT (visited, dest->index);
bitmap_set_bit (visited, dest->index);
pre[dest->index] = prenum++;
if (EDGE_COUNT (dest->succs) > 0)
@ -518,10 +518,10 @@ post_order_compute (int *post_order, bool include_entry_exit,
dest = ei_edge (ei)->dest;
/* Check if the edge destination has been visited yet. */
if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
SET_BIT (visited, dest->index);
bitmap_set_bit (visited, dest->index);
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
@ -560,7 +560,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
{
next_bb = b->next_bb;
if (!(TEST_BIT (visited, b->index)))
if (!(bitmap_bit_p (visited, b->index)))
delete_basic_block (b);
}
@ -664,7 +664,7 @@ inverted_post_order_compute (int *post_order)
if (EDGE_COUNT (bb->preds) > 0)
{
stack[sp++] = ei_start (bb->preds);
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
}
}
@ -684,10 +684,10 @@ inverted_post_order_compute (int *post_order)
pred = ei_edge (ei)->src;
/* Check if the predecessor has been visited yet. */
if (! TEST_BIT (visited, pred->index))
if (! bitmap_bit_p (visited, pred->index))
{
/* Mark that we have visited the destination. */
SET_BIT (visited, pred->index);
bitmap_set_bit (visited, pred->index);
if (EDGE_COUNT (pred->preds) > 0)
/* Since the predecessor node has been visited for the first
@ -712,7 +712,7 @@ inverted_post_order_compute (int *post_order)
Note that this doesn't check EXIT_BLOCK itself
since EXIT_BLOCK is always added after the outer do-while loop. */
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
if (!TEST_BIT (visited, bb->index))
if (!bitmap_bit_p (visited, bb->index))
{
has_unvisited_bb = true;
@ -725,7 +725,7 @@ inverted_post_order_compute (int *post_order)
/* Find an already visited predecessor. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (TEST_BIT (visited, e->src->index))
if (bitmap_bit_p (visited, e->src->index))
visited_pred = e->src;
}
@ -733,7 +733,7 @@ inverted_post_order_compute (int *post_order)
{
basic_block be = dfs_find_deadend (bb);
gcc_assert (be != NULL);
SET_BIT (visited, be->index);
bitmap_set_bit (visited, be->index);
stack[sp++] = ei_start (be->preds);
break;
}
@ -746,7 +746,7 @@ inverted_post_order_compute (int *post_order)
Find a dead-end from the ENTRY, and restart the iteration. */
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
gcc_assert (be != NULL);
SET_BIT (visited, be->index);
bitmap_set_bit (visited, be->index);
stack[sp++] = ei_start (be->preds);
}
@ -820,10 +820,10 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
dest = ei_edge (ei)->dest;
/* Check if the edge destination has been visited yet. */
if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
SET_BIT (visited, dest->index);
bitmap_set_bit (visited, dest->index);
if (pre_order)
pre_order[pre_order_num] = dest->index;
@ -929,7 +929,7 @@ static void
flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
{
data->stack[data->sp++] = bb;
SET_BIT (data->visited_blocks, bb->index);
bitmap_set_bit (data->visited_blocks, bb->index);
}
/* Continue the depth-first search through the reverse graph starting with the
@ -951,13 +951,13 @@ flow_dfs_compute_reverse_execute (depth_first_search_ds data,
/* Perform depth-first search on adjacent vertices. */
FOR_EACH_EDGE (e, ei, bb->preds)
if (!TEST_BIT (data->visited_blocks, e->src->index))
if (!bitmap_bit_p (data->visited_blocks, e->src->index))
flow_dfs_compute_reverse_add_bb (data, e->src);
}
/* Determine if there are unvisited basic blocks. */
FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
if (!TEST_BIT (data->visited_blocks, bb->index))
if (!bitmap_bit_p (data->visited_blocks, bb->index))
return dfs_find_deadend (bb);
return NULL;
@ -993,9 +993,9 @@ dfs_enumerate_from (basic_block bb, int reverse,
static sbitmap visited;
static unsigned v_size;
#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index))
#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
#define UNMARK_VISITED(BB) (bitmap_clear_bit (visited, (BB)->index))
#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
/* Resize the VISITED sbitmap if necessary. */
size = last_basic_block;
@ -1193,8 +1193,7 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
basic block B. */
void
sbitmap_intersection_of_succs (sbitmap dst, sbitmap *src,
basic_block b)
bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
@ -1235,8 +1234,7 @@ sbitmap_intersection_of_succs (sbitmap dst, sbitmap *src,
basic block B. */
void
sbitmap_intersection_of_preds (sbitmap dst, sbitmap *src,
basic_block b)
bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
@ -1277,8 +1275,7 @@ sbitmap_intersection_of_preds (sbitmap dst, sbitmap *src,
basic block B. */
void
sbitmap_union_of_succs (sbitmap dst, sbitmap *src,
basic_block b)
bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
@ -1319,8 +1316,7 @@ sbitmap_union_of_succs (sbitmap dst, sbitmap *src,
basic block B. */
void
sbitmap_union_of_preds (sbitmap dst, sbitmap *src,
basic_block b)
bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;

View File

@ -241,7 +241,7 @@ make_edges (basic_block min, basic_block max, int update_p)
{
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR)
SET_BIT (edge_cache, e->dest->index);
bitmap_set_bit (edge_cache, e->dest->index);
}
}
@ -605,7 +605,7 @@ find_many_sub_basic_blocks (sbitmap blocks)
FOR_EACH_BB (bb)
SET_STATE (bb,
TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
FOR_EACH_BB (bb)
if (STATE (bb) == BLOCK_TO_SPLIT)

View File

@ -420,7 +420,7 @@ flow_loops_find (struct loops *loops)
&& dominated_by_p (CDI_DOMINATORS, latch, header))
{
/* Shared headers should be eliminated by now. */
SET_BIT (headers, header->index);
bitmap_set_bit (headers, header->index);
num_loops++;
}
}
@ -451,7 +451,7 @@ flow_loops_find (struct loops *loops)
/* Search the nodes of the CFG in reverse completion order
so that we can find outer loops first. */
if (!TEST_BIT (headers, rc_order[b]))
if (!bitmap_bit_p (headers, rc_order[b]))
continue;
header = BASIC_BLOCK (rc_order[b]);
@ -1361,9 +1361,9 @@ verify_loop_structure (void)
bb = bbs[j];
/* Ignore this block if it is in an inner loop. */
if (TEST_BIT (visited, bb->index))
if (bitmap_bit_p (visited, bb->index))
continue;
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
if (bb->loop_father != loop)
{
@ -1426,9 +1426,9 @@ verify_loop_structure (void)
{
edge_iterator ei;
if (bb->flags & BB_IRREDUCIBLE_LOOP)
SET_BIT (irreds, bb->index);
bitmap_set_bit (irreds, bb->index);
else
RESET_BIT (irreds, bb->index);
bitmap_clear_bit (irreds, bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
e->flags |= EDGE_ALL_FLAGS + 1;
@ -1443,13 +1443,13 @@ verify_loop_structure (void)
edge_iterator ei;
if ((bb->flags & BB_IRREDUCIBLE_LOOP)
&& !TEST_BIT (irreds, bb->index))
&& !bitmap_bit_p (irreds, bb->index))
{
error ("basic block %d should be marked irreducible", bb->index);
err = 1;
}
else if (!(bb->flags & BB_IRREDUCIBLE_LOOP)
&& TEST_BIT (irreds, bb->index))
&& bitmap_bit_p (irreds, bb->index))
{
error ("basic block %d should not be marked irreducible", bb->index);
err = 1;

View File

@ -191,9 +191,9 @@ fix_bb_placements (basic_block from,
in_queue = sbitmap_alloc (last_basic_block);
bitmap_clear (in_queue);
SET_BIT (in_queue, from->index);
bitmap_set_bit (in_queue, from->index);
/* Prevent us from going out of the base_loop. */
SET_BIT (in_queue, base_loop->header->index);
bitmap_set_bit (in_queue, base_loop->header->index);
queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
qtop = queue + base_loop->num_nodes + 1;
@ -208,7 +208,7 @@ fix_bb_placements (basic_block from,
qbeg++;
if (qbeg == qtop)
qbeg = queue;
RESET_BIT (in_queue, from->index);
bitmap_clear_bit (in_queue, from->index);
if (from->loop_father->header == from)
{
@ -242,7 +242,7 @@ fix_bb_placements (basic_block from,
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
*irred_invalidated = true;
if (TEST_BIT (in_queue, pred->index))
if (bitmap_bit_p (in_queue, pred->index))
continue;
/* If it is subloop, then it either was not moved, or
@ -262,7 +262,7 @@ fix_bb_placements (basic_block from,
continue;
}
if (TEST_BIT (in_queue, pred->index))
if (bitmap_bit_p (in_queue, pred->index))
continue;
/* Schedule the basic block. */
@ -270,7 +270,7 @@ fix_bb_placements (basic_block from,
qend++;
if (qend == qtop)
qend = queue;
SET_BIT (in_queue, pred->index);
bitmap_set_bit (in_queue, pred->index);
}
}
free (in_queue);
@ -331,19 +331,19 @@ remove_path (edge e)
/* Find "border" hexes -- i.e. those with predecessor in removed path. */
for (i = 0; i < nrem; i++)
SET_BIT (seen, rem_bbs[i]->index);
bitmap_set_bit (seen, rem_bbs[i]->index);
if (!irred_invalidated)
FOR_EACH_EDGE (ae, ei, e->src->succs)
if (ae != e && ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index)
if (ae != e && ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index)
&& ae->flags & EDGE_IRREDUCIBLE_LOOP)
irred_invalidated = true;
for (i = 0; i < nrem; i++)
{
bb = rem_bbs[i];
FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
if (ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index))
if (ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index))
{
SET_BIT (seen, ae->dest->index);
bitmap_set_bit (seen, ae->dest->index);
bord_bbs[n_bord_bbs++] = ae->dest;
if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
@ -371,9 +371,9 @@ remove_path (edge e)
basic_block ldom;
bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
if (TEST_BIT (seen, bb->index))
if (bitmap_bit_p (seen, bb->index))
continue;
SET_BIT (seen, bb->index);
bitmap_set_bit (seen, bb->index);
for (ldom = first_dom_son (CDI_DOMINATORS, bb);
ldom;
@ -598,7 +598,7 @@ update_dominators_in_loop (struct loop *loop)
body = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
SET_BIT (seen, body[i]->index);
bitmap_set_bit (seen, body[i]->index);
for (i = 0; i < loop->num_nodes; i++)
{
@ -607,9 +607,9 @@ update_dominators_in_loop (struct loop *loop)
for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
ldom;
ldom = next_dom_son (CDI_DOMINATORS, ldom))
if (!TEST_BIT (seen, ldom->index))
if (!bitmap_bit_p (seen, ldom->index))
{
SET_BIT (seen, ldom->index);
bitmap_set_bit (seen, ldom->index);
VEC_safe_push (basic_block, heap, dom_bbs, ldom);
}
}
@ -1206,7 +1206,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
scale_step = XNEWVEC (int, ndupl);
for (i = 1; i <= ndupl; i++)
scale_step[i - 1] = TEST_BIT (wont_exit, i)
scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
? prob_pass_wont_exit
: prob_pass_thru;
@ -1233,7 +1233,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
}
else if (is_latch)
{
prob_pass_main = TEST_BIT (wont_exit, 0)
prob_pass_main = bitmap_bit_p (wont_exit, 0)
? prob_pass_wont_exit
: prob_pass_thru;
p = prob_pass_main;
@ -1342,7 +1342,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
}
/* Record exit edge in this copy. */
if (orig && TEST_BIT (wont_exit, j + 1))
if (orig && bitmap_bit_p (wont_exit, j + 1))
{
if (to_remove)
VEC_safe_push (edge, heap, *to_remove, new_spec_edges[SE_ORIG]);
@ -1378,7 +1378,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
free (orig_loops);
/* Record the exit edge in the original loop body, and update the frequencies. */
if (orig && TEST_BIT (wont_exit, 0))
if (orig && bitmap_bit_p (wont_exit, 0))
{
if (to_remove)
VEC_safe_push (edge, heap, *to_remove, orig);

View File

@ -3734,7 +3734,7 @@ break_superblocks (void)
if (bb->flags & BB_SUPERBLOCK)
{
bb->flags &= ~BB_SUPERBLOCK;
SET_BIT (superblocks, bb->index);
bitmap_set_bit (superblocks, bb->index);
need = true;
}
@ -4255,7 +4255,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
if (! blocks)
check_last_block = true;
else
check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
@ -4305,7 +4305,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
if (!bb)
continue;
if (blocks && !TEST_BIT (blocks, i))
if (blocks && !bitmap_bit_p (blocks, i))
continue;
for (insn = BB_END (bb); ; insn = prev_insn)

View File

@ -99,7 +99,7 @@ resolve_sw_modes (void)
selected_mode = (enum attr_fp_mode) epiphany_normal_fp_rounding;
VEC_quick_push (basic_block, todo, bb);
SET_BIT (pushed, bb->index);
bitmap_set_bit (pushed, bb->index);
}
XVECEXP (XEXP (src, 0), 0, 0) = GEN_INT (selected_mode);
SET_SRC (XVECEXP (PATTERN (insn), 0, 1)) = copy_rtx (src);
@ -114,8 +114,8 @@ resolve_sw_modes (void)
edge e;
edge_iterator ei;
SET_BIT (pushed, bb->index);
SET_BIT (pushed, bb->index);
bitmap_set_bit (pushed, bb->index);
bitmap_set_bit (pushed, bb->index);
if (epiphany_normal_fp_rounding == FP_MODE_ROUND_NEAREST)
{
@ -139,10 +139,10 @@ resolve_sw_modes (void)
continue;
if (REGNO_REG_SET_P (DF_LIVE_IN (succ), selected_reg))
{
if (TEST_BIT (pushed, succ->index))
if (bitmap_bit_p (pushed, succ->index))
continue;
VEC_quick_push (basic_block, todo, succ);
SET_BIT (pushed, bb->index);
bitmap_set_bit (pushed, bb->index);
continue;
}
start_sequence ();

View File

@ -407,7 +407,7 @@ move_or_delete_vzeroupper (void)
bitmap_ones (in_pending);
FOR_EACH_BB (bb)
if (BLOCK_INFO (bb)->processed)
RESET_BIT (in_pending, bb->index);
bitmap_clear_bit (in_pending, bb->index);
else
{
move_or_delete_vzeroupper_1 (bb, false);
@ -433,13 +433,13 @@ move_or_delete_vzeroupper (void)
while (!fibheap_empty (worklist))
{
bb = (basic_block) fibheap_extract_min (worklist);
RESET_BIT (in_worklist, bb->index);
gcc_assert (!TEST_BIT (visited, bb->index));
if (!TEST_BIT (visited, bb->index))
bitmap_clear_bit (in_worklist, bb->index);
gcc_assert (!bitmap_bit_p (visited, bb->index));
if (!bitmap_bit_p (visited, bb->index))
{
edge_iterator ei;
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
if (move_or_delete_vzeroupper_1 (bb, false))
FOR_EACH_EDGE (e, ei, bb->succs)
@ -448,21 +448,21 @@ move_or_delete_vzeroupper (void)
|| BLOCK_INFO (e->dest)->processed)
continue;
if (TEST_BIT (visited, e->dest->index))
if (bitmap_bit_p (visited, e->dest->index))
{
if (!TEST_BIT (in_pending, e->dest->index))
if (!bitmap_bit_p (in_pending, e->dest->index))
{
/* Send E->DEST to next round. */
SET_BIT (in_pending, e->dest->index);
bitmap_set_bit (in_pending, e->dest->index);
fibheap_insert (pending,
bb_order[e->dest->index],
e->dest);
}
}
else if (!TEST_BIT (in_worklist, e->dest->index))
else if (!bitmap_bit_p (in_worklist, e->dest->index))
{
/* Add E->DEST to current round. */
SET_BIT (in_worklist, e->dest->index);
bitmap_set_bit (in_worklist, e->dest->index);
fibheap_insert (worklist, bb_order[e->dest->index],
e->dest);
}
@ -24740,7 +24740,7 @@ core2i7_first_cycle_multipass_filter_ready_try
ready_try[n_ready] = 1;
if (data->ready_try_change)
SET_BIT (data->ready_try_change, n_ready);
bitmap_set_bit (data->ready_try_change, n_ready);
}
}
}

View File

@ -14799,7 +14799,7 @@ r10k_protected_bb_p (basic_block bb, sbitmap protected_bbs)
FOR_EACH_EDGE (e, ei, bb->preds)
if (!single_succ_p (e->src)
|| !TEST_BIT (protected_bbs, e->src->index)
|| !bitmap_bit_p (protected_bbs, e->src->index)
|| (e->flags & EDGE_COMPLEX) != 0)
return false;
return true;
@ -14893,7 +14893,7 @@ r10k_insert_cache_barriers (void)
/* Record whether the end of this block is protected. */
if (unprotected_region == NULL_RTX)
SET_BIT (protected_bbs, bb->index);
bitmap_set_bit (protected_bbs, bb->index);
}
XDELETEVEC (rev_post_order);

View File

@ -2102,7 +2102,7 @@ spu_emit_branch_hint (rtx before, rtx branch, rtx target,
LABEL_PRESERVE_P (branch_label) = 1;
insn = emit_label_before (branch_label, branch);
branch_label = gen_rtx_LABEL_REF (VOIDmode, branch_label);
SET_BIT (blocks, BLOCK_FOR_INSN (branch)->index);
bitmap_set_bit (blocks, BLOCK_FOR_INSN (branch)->index);
hint = emit_insn_before (gen_hbr (branch_label, target), before);
recog_memoized (hint);

View File

@ -617,20 +617,20 @@ compute_local_properties (sbitmap *kill, sbitmap *comp,
is killed in the block where the definition is. */
for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
def; def = DF_REF_NEXT_REG (def))
SET_BIT (kill[DF_REF_BB (def)->index], indx);
bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
/* If the source is a pseudo-reg, for each definition of the source,
the expression is killed in the block where the definition is. */
if (REG_P (expr->src))
for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
def; def = DF_REF_NEXT_REG (def))
SET_BIT (kill[DF_REF_BB (def)->index], indx);
bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
/* The occurrences recorded in avail_occr are exactly those that
are locally available in the block where they are. */
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
}
}
}
@ -658,7 +658,7 @@ compute_cprop_data (void)
{
int index = implicit_set_indexes[bb->index];
if (index != -1)
SET_BIT (cprop_avin[bb->index], index);
bitmap_set_bit (cprop_avin[bb->index], index);
}
}
@ -830,7 +830,7 @@ find_avail_set (int regno, rtx insn)
which contains INSN. */
while (set)
{
if (TEST_BIT (cprop_avin[BLOCK_FOR_INSN (insn)->index],
if (bitmap_bit_p (cprop_avin[BLOCK_FOR_INSN (insn)->index],
set->bitmap_index))
break;
set = next_set (regno, set);
@ -1447,7 +1447,7 @@ find_bypass_set (int regno, int bb)
while (set)
{
if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
if (bitmap_bit_p (cprop_avout[bb], set->bitmap_index))
break;
set = next_set (regno, set);
}

View File

@ -6151,7 +6151,7 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
edge e;
int path_size;
SET_BIT (cse_visited_basic_blocks, first_bb->index);
bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
/* See if there is a previous path. */
path_size = data->path_size;
@ -6208,9 +6208,9 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
We still want to visit each basic block only once, so
halt the path here if we have already visited BB. */
&& !TEST_BIT (cse_visited_basic_blocks, bb->index))
&& !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
{
SET_BIT (cse_visited_basic_blocks, bb->index);
bitmap_set_bit (cse_visited_basic_blocks, bb->index);
data->path[path_size++].bb = bb;
break;
}
@ -6253,10 +6253,10 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
&& single_pred_p (e->dest)
/* Avoid visiting basic blocks twice. The large comment
above explains why this can happen. */
&& !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
&& !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
{
basic_block bb2 = e->dest;
SET_BIT (cse_visited_basic_blocks, bb2->index);
bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
data->path[path_size++].bb = bb2;
bb = bb2;
}
@ -6468,7 +6468,7 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
/* If we truncate the path, we must also reset the
visited bit on the remaining blocks in the path,
or we will never visit them at all. */
RESET_BIT (cse_visited_basic_blocks,
bitmap_clear_bit (cse_visited_basic_blocks,
ebb_data->path[path_size].bb->index);
ebb_data->path[path_size].bb = NULL;
}
@ -6560,7 +6560,7 @@ cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
{
bb = BASIC_BLOCK (rc_order[i++]);
}
while (TEST_BIT (cse_visited_basic_blocks, bb->index)
while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
&& i < n_blocks);
/* Find all paths starting with BB, and process them. */

View File

@ -163,7 +163,7 @@ marked_insn_p (rtx insn)
/* Artificial defs are always needed and they do not have an insn.
We should never see them here. */
gcc_assert (insn);
return TEST_BIT (marked, INSN_UID (insn));
return bitmap_bit_p (marked, INSN_UID (insn));
}
@ -177,7 +177,7 @@ mark_insn (rtx insn, bool fast)
{
if (!fast)
VEC_safe_push (rtx, heap, worklist, insn);
SET_BIT (marked, INSN_UID (insn));
bitmap_set_bit (marked, INSN_UID (insn));
if (dump_file)
fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
if (CALL_P (insn)

View File

@ -562,7 +562,7 @@ build_intra_loop_deps (ddg_ptr g)
{
/* Don't bother calculating inter-loop dep if an intra-loop dep
already exists. */
if (! TEST_BIT (dest_node->successors, j))
if (! bitmap_bit_p (dest_node->successors, j))
add_inter_loop_mem_dep (g, dest_node, j_node);
/* If -fmodulo-sched-allow-regmoves
is set certain anti-dep edges are not created.
@ -572,7 +572,7 @@ build_intra_loop_deps (ddg_ptr g)
memory dependencies. Thus we add intra edges between
every two memory instructions in this case. */
if (flag_modulo_sched_allow_regmoves
&& !TEST_BIT (dest_node->predecessors, j))
&& !bitmap_bit_p (dest_node->predecessors, j))
add_intra_loop_mem_dep (g, j_node, dest_node);
}
}
@ -838,8 +838,8 @@ add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
/* Should have allocated the sbitmaps. */
gcc_assert (src->successors && dest->predecessors);
SET_BIT (src->successors, dest->cuid);
SET_BIT (dest->predecessors, src->cuid);
bitmap_set_bit (src->successors, dest->cuid);
bitmap_set_bit (dest->predecessors, src->cuid);
e->next_in = dest->in;
dest->in = e;
e->next_out = src->out;
@ -899,7 +899,7 @@ create_scc (ddg_ptr g, sbitmap nodes)
ddg_node_ptr n = &g->nodes[u];
for (e = n->out; e; e = e->next_out)
if (TEST_BIT (nodes, e->dest->cuid))
if (bitmap_bit_p (nodes, e->dest->cuid))
{
e->aux.count = IN_SCC;
if (e->distance > 0)
@ -1079,8 +1079,8 @@ create_ddg_all_sccs (ddg_ptr g)
bitmap_clear (scc_nodes);
bitmap_clear (from);
bitmap_clear (to);
SET_BIT (from, dest->cuid);
SET_BIT (to, src->cuid);
bitmap_set_bit (from, dest->cuid);
bitmap_set_bit (to, src->cuid);
if (find_nodes_on_paths (scc_nodes, g, from, to))
{
@ -1151,10 +1151,10 @@ find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
ddg_node_ptr v_node = e->dest;
int v = v_node->cuid;
if (!TEST_BIT (reachable_from, v))
if (!bitmap_bit_p (reachable_from, v))
{
SET_BIT (reachable_from, v);
SET_BIT (tmp, v);
bitmap_set_bit (reachable_from, v);
bitmap_set_bit (tmp, v);
change = 1;
}
}
@ -1180,10 +1180,10 @@ find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
ddg_node_ptr v_node = e->src;
int v = v_node->cuid;
if (!TEST_BIT (reach_to, v))
if (!bitmap_bit_p (reach_to, v))
{
SET_BIT (reach_to, v);
SET_BIT (tmp, v);
bitmap_set_bit (reach_to, v);
bitmap_set_bit (tmp, v);
change = 1;
}
}
@ -1214,12 +1214,12 @@ update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
ddg_node_ptr v_node = e->dest;
int v = v_node->cuid;
if (TEST_BIT (nodes, v)
if (bitmap_bit_p (nodes, v)
&& (e->distance == 0)
&& (v_node->aux.count < u_node->aux.count + e->latency))
{
v_node->aux.count = u_node->aux.count + e->latency;
SET_BIT (tmp, v);
bitmap_set_bit (tmp, v);
result = 1;
}
}
@ -1248,7 +1248,7 @@ longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
g->nodes[src].aux.count = 0;
bitmap_clear (tmp);
SET_BIT (tmp, src);
bitmap_set_bit (tmp, src);
while (change)
{

View File

@ -891,7 +891,7 @@ df_worklist_propagate_forward (struct dataflow *dataflow,
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (age <= BB_LAST_CHANGE_AGE (e->src)
&& TEST_BIT (considered, e->src->index))
&& bitmap_bit_p (considered, e->src->index))
changed |= dataflow->problem->con_fun_n (e);
}
else if (dataflow->problem->con_fun_0)
@ -906,7 +906,7 @@ df_worklist_propagate_forward (struct dataflow *dataflow,
{
unsigned ob_index = e->dest->index;
if (TEST_BIT (considered, ob_index))
if (bitmap_bit_p (considered, ob_index))
bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
}
return true;
@ -936,7 +936,7 @@ df_worklist_propagate_backward (struct dataflow *dataflow,
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (age <= BB_LAST_CHANGE_AGE (e->dest)
&& TEST_BIT (considered, e->dest->index))
&& bitmap_bit_p (considered, e->dest->index))
changed |= dataflow->problem->con_fun_n (e);
}
else if (dataflow->problem->con_fun_0)
@ -951,7 +951,7 @@ df_worklist_propagate_backward (struct dataflow *dataflow,
{
unsigned ob_index = e->src->index;
if (TEST_BIT (considered, ob_index))
if (bitmap_bit_p (considered, ob_index))
bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
}
return true;
@ -1086,7 +1086,7 @@ df_worklist_dataflow (struct dataflow *dataflow,
bitmap_clear (considered);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_consider, 0, index, bi)
{
SET_BIT (considered, index);
bitmap_set_bit (considered, index);
}
/* Initialize the mapping of block index to postorder. */

View File

@ -146,7 +146,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
int sp = 0;
sbitmap visited = sbitmap_alloc (last_basic_block + 1);
bitmap_clear (visited);
SET_BIT (visited, ENTRY_BLOCK_PTR->index);
bitmap_set_bit (visited, ENTRY_BLOCK_PTR->index);
while (true)
{
@ -187,7 +187,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
if (walk_data->before_dom_children)
(*walk_data->before_dom_children) (walk_data, bb);
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
/* Mark the current BB to be popped out of the recursion stack
once children are processed. */
@ -233,7 +233,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (!dominated_by_p (CDI_DOMINATORS, e->src, e->dest)
&& !TEST_BIT (visited, e->src->index))
&& !bitmap_bit_p (visited, e->src->index))
{
found = false;
break;

View File

@ -3363,9 +3363,9 @@ mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
edge e;
edge_iterator ei;
if (TEST_BIT (unreachable_blocks, bb->index))
if (bitmap_bit_p (unreachable_blocks, bb->index))
{
RESET_BIT (unreachable_blocks, bb->index);
bitmap_clear_bit (unreachable_blocks, bb->index);
FOR_EACH_EDGE (e, ei, bb->preds)
{
mark_reachable_blocks (unreachable_blocks, e->src);

View File

@ -227,7 +227,7 @@ bitmap_clear_bit (ebitmap map, unsigned int bit)
return;
if (wordindex >= map->wordmask->n_bits
|| !TEST_BIT (map->wordmask, wordindex))
|| !bitmap_bit_p (map->wordmask, wordindex))
return;
if (map->cache != NULL && map->cacheindex == wordindex)
@ -258,7 +258,7 @@ bitmap_clear_bit (ebitmap map, unsigned int bit)
map->cache = map->cache - 1;
}
RESET_BIT_WITH_POPCOUNT (map->wordmask, wordindex);
bitmap_clear_bit_with_popcount (map->wordmask, wordindex);
memmove(&map->elts[eltwordindex], &map->elts[eltwordindex + 1],
sizeof (EBITMAP_ELT_TYPE) * (map->numwords - eltwordindex));
@ -288,12 +288,12 @@ bitmap_set_bit (ebitmap map, unsigned int bit)
/* Allocate a new word in the array and move whatever is in it's
place, if necessary. */
if (!TEST_BIT (map->wordmask, wordindex))
if (!bitmap_bit_p (map->wordmask, wordindex))
{
unsigned long count;
unsigned int i;
SET_BIT_WITH_POPCOUNT (map->wordmask, wordindex);
bitmap_set_bit_with_popcount (map->wordmask, wordindex);
count = sbitmap_popcount (map->wordmask, wordindex);
gcc_assert (count <= map->numwords);
@ -341,7 +341,7 @@ bitmap_bit_p (ebitmap map, unsigned int bit)
it's not set in the wordmask, this bit can't exist in our
ebitmap. */
if (wordindex >= map->wordmask->n_bits
|| !TEST_BIT (map->wordmask, wordindex))
|| !bitmap_bit_p (map->wordmask, wordindex))
return false;
/* Find the bit and test it. */
@ -449,7 +449,7 @@ bitmap_and_into (ebitmap dst, ebitmap src)
*dstplace = tmpword;
}
else
RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
#ifdef EBITMAP_DEBUGGING
{
@ -494,8 +494,8 @@ bitmap_and (ebitmap dst, ebitmap src1, ebitmap src2)
{
bool src1hasword, src2hasword;
src1hasword = TEST_BIT (src1->wordmask, i);
src2hasword = TEST_BIT (src2->wordmask, i);
src1hasword = bitmap_bit_p (src1->wordmask, i);
src2hasword = bitmap_bit_p (src2->wordmask, i);
if (src1hasword && src2hasword)
{
@ -508,7 +508,7 @@ bitmap_and (ebitmap dst, ebitmap src1, ebitmap src2)
*dstplace = tmpword;
}
else
RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
else if (src1hasword)
src1eltindex++;
@ -603,9 +603,9 @@ bitmap_ior_into (ebitmap dst, ebitmap src)
bool dsthasword, srchasword;
dsthasword = (i < dst->wordmask->n_bits
&& TEST_BIT (dst->wordmask, i));
&& bitmap_bit_p (dst->wordmask, i));
srchasword = (i < src->wordmask->n_bits
&& TEST_BIT (src->wordmask, i));
&& bitmap_bit_p (src->wordmask, i));
if (dsthasword && srchasword)
{
@ -624,7 +624,7 @@ bitmap_ior_into (ebitmap dst, ebitmap src)
{
newarray [neweltindex++] = ebitmap_array_get (src, srceltindex++);
gcc_assert (i < dst->wordmask->n_bits);
SET_BIT_WITH_POPCOUNT (dst->wordmask, i);
bitmap_set_bit_with_popcount (dst->wordmask, i);
changed |= true;
}
}
@ -712,9 +712,9 @@ bitmap_ior (ebitmap dst, ebitmap src1, ebitmap src2)
bool src1hasword, src2hasword;
EBITMAP_ELT_TYPE tmpword;
src1hasword = (i < src1->wordmask->n_bits
&& TEST_BIT (src1->wordmask, i));
&& bitmap_bit_p (src1->wordmask, i));
src2hasword = (i < src2->wordmask->n_bits
&& TEST_BIT (src2->wordmask, i));
&& bitmap_bit_p (src2->wordmask, i));
if (src1hasword && src2hasword)
{
@ -733,7 +733,7 @@ bitmap_ior (ebitmap dst, ebitmap src1, ebitmap src2)
newarray [neweltindex++] = tmpword;
}
if (i >= dst->wordmask->n_bits || !TEST_BIT (dst->wordmask, i))
if (i >= dst->wordmask->n_bits || !bitmap_bit_p (dst->wordmask, i))
{
changed = true;
}
@ -808,7 +808,7 @@ bitmap_and_compl_into (ebitmap dst, ebitmap src)
bool srchasword;
srchasword = (i < src->wordmask->n_bits
&& TEST_BIT (src->wordmask, i));
&& bitmap_bit_p (src->wordmask, i));
if (srchasword)
{
@ -825,7 +825,7 @@ bitmap_and_compl_into (ebitmap dst, ebitmap src)
*dstplace = tmpword;
}
else
RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
else
{
@ -892,7 +892,7 @@ bitmap_and_compl (ebitmap dst, ebitmap src1, ebitmap src2)
EBITMAP_ELT_TYPE tmpword;
src2hasword = (i < src2->wordmask->n_bits
&& TEST_BIT (src2->wordmask, i));
&& bitmap_bit_p (src2->wordmask, i));
if (src2hasword)
{
@ -904,7 +904,7 @@ bitmap_and_compl (ebitmap dst, ebitmap src1, ebitmap src2)
newarray[neweltindex++] = tmpword;
}
else
RESET_BIT_WITH_POPCOUNT (tempmask, i);
bitmap_clear_bit_with_popcount (tempmask, i);
}
else
@ -914,7 +914,7 @@ bitmap_and_compl (ebitmap dst, ebitmap src1, ebitmap src2)
newarray[neweltindex++] = tmpword;
}
if (i >= dst->wordmask->n_bits || !TEST_BIT (dst->wordmask, i))
if (i >= dst->wordmask->n_bits || !bitmap_bit_p (dst->wordmask, i))
{
changed = true;
}

View File

@ -115,7 +115,7 @@ ebitmap_iter_init (ebitmap_iterator *i, ebitmap bmp, unsigned int min)
}
else
{
if (TEST_BIT (bmp->wordmask, min / EBITMAP_ELT_BITS) == 0)
if (bitmap_bit_p (bmp->wordmask, min / EBITMAP_ELT_BITS) == 0)
i->word = 0;
else
{

View File

@ -614,14 +614,14 @@ eh_region_outermost (struct function *ifun, eh_region region_a,
do
{
SET_BIT (b_outer, region_b->index);
bitmap_set_bit (b_outer, region_b->index);
region_b = region_b->outer;
}
while (region_b);
do
{
if (TEST_BIT (b_outer, region_a->index))
if (bitmap_bit_p (b_outer, region_a->index))
break;
region_a = region_a->outer;
}

View File

@ -6491,8 +6491,8 @@ epilogue_done:
/* Look for basic blocks within the prologue insns. */
blocks = sbitmap_alloc (last_basic_block);
bitmap_clear (blocks);
SET_BIT (blocks, entry_edge->dest->index);
SET_BIT (blocks, orig_entry_edge->dest->index);
bitmap_set_bit (blocks, entry_edge->dest->index);
bitmap_set_bit (blocks, orig_entry_edge->dest->index);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);

View File

@ -686,7 +686,7 @@ compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
if (antloc)
for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
{
SET_BIT (antloc[BLOCK_FOR_INSN (occr->insn)->index], indx);
bitmap_set_bit (antloc[BLOCK_FOR_INSN (occr->insn)->index], indx);
/* While we're scanning the table, this is a good place to
initialize this. */
@ -698,7 +698,7 @@ compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
if (comp)
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
/* While we're scanning the table, this is a good place to
initialize this. */
@ -1662,7 +1662,7 @@ compute_transp (const_rtx x, int indx, sbitmap *bmap)
for (def = DF_REG_DEF_CHAIN (REGNO (x));
def;
def = DF_REF_NEXT_REG (def))
RESET_BIT (bmap[DF_REF_BB (def)->index], indx);
bitmap_clear_bit (bmap[DF_REF_BB (def)->index], indx);
}
return;
@ -1677,7 +1677,7 @@ compute_transp (const_rtx x, int indx, sbitmap *bmap)
do any list walking for them. */
EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
{
RESET_BIT (bmap[bb_index], indx);
bitmap_clear_bit (bmap[bb_index], indx);
}
/* Now iterate over the blocks which have memory modifications
@ -1698,7 +1698,7 @@ compute_transp (const_rtx x, int indx, sbitmap *bmap)
if (canon_true_dependence (dest, GET_MODE (dest),
dest_addr, x, NULL_RTX))
RESET_BIT (bmap[bb_index], indx);
bitmap_clear_bit (bmap[bb_index], indx);
}
}
}
@ -1831,7 +1831,7 @@ prune_expressions (bool pre_p)
/* Note potentially trapping expressions. */
if (may_trap_p (expr->expr))
{
SET_BIT (prune_exprs, expr->bitmap_index);
bitmap_set_bit (prune_exprs, expr->bitmap_index);
continue;
}
@ -1857,7 +1857,7 @@ prune_expressions (bool pre_p)
analysis to determine if this mem is actually killed
by this call. */
SET_BIT (prune_exprs, expr->bitmap_index);
bitmap_set_bit (prune_exprs, expr->bitmap_index);
}
}
}
@ -1949,16 +1949,16 @@ prune_insertions_deletions (int n_elems)
for (j = 0; j < (unsigned) n_elems; j++)
if (deletions[j]
&& ((unsigned) insertions[j] / deletions[j]) > MAX_GCSE_INSERTION_RATIO)
SET_BIT (prune_exprs, j);
bitmap_set_bit (prune_exprs, j);
/* Now prune PRE_INSERT_MAP and PRE_DELETE_MAP based on PRUNE_EXPRS. */
EXECUTE_IF_SET_IN_SBITMAP (prune_exprs, 0, j, sbi)
{
for (i = 0; i < (unsigned) n_edges; i++)
RESET_BIT (pre_insert_map[i], j);
bitmap_clear_bit (pre_insert_map[i], j);
for (i = 0; i < (unsigned) last_basic_block; i++)
RESET_BIT (pre_delete_map[i], j);
bitmap_clear_bit (pre_delete_map[i], j);
}
sbitmap_free (prune_exprs);
@ -2033,7 +2033,7 @@ pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
;/* Nothing to do. */
/* Does this predecessor generate this expression? */
else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
else if (bitmap_bit_p (comp[pred_bb->index], expr->bitmap_index))
{
/* Is this the occurrence we're looking for?
Note that there's only one generating occurrence per block
@ -2044,7 +2044,7 @@ pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
visited[pred_bb->index] = 1;
}
/* Ignore this predecessor if it kills the expression. */
else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
visited[pred_bb->index] = 1;
/* Neither gen nor kill. */
@ -2262,7 +2262,7 @@ pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
/* Insert this expression on this edge if it would
reach the deleted occurrence in BB. */
if (!TEST_BIT (inserted[e], j))
if (!bitmap_bit_p (inserted[e], j))
{
rtx insn;
edge eg = INDEX_EDGE (edge_list, e);
@ -2292,7 +2292,7 @@ pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
}
update_ld_motion_stores (expr);
SET_BIT (inserted[e], j);
bitmap_set_bit (inserted[e], j);
did_insert = 1;
gcse_create_count++;
}
@ -2532,7 +2532,7 @@ pre_delete (void)
basic_block bb = BLOCK_FOR_INSN (insn);
/* We only delete insns that have a single_set. */
if (TEST_BIT (pre_delete_map[bb->index], indx)
if (bitmap_bit_p (pre_delete_map[bb->index], indx)
&& (set = single_set (insn)) != 0
&& dbg_cnt (pre_insn))
{
@ -2802,8 +2802,8 @@ compute_code_hoist_vbeinout (void)
{
if (bb->next_bb != EXIT_BLOCK_PTR)
{
sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
hoist_vbein, bb);
bitmap_intersection_of_succs (hoist_vbeout[bb->index],
hoist_vbein, bb);
/* Include expressions in VBEout that are calculated
in BB and available at its end. */
@ -2922,14 +2922,14 @@ should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
break;
else if (pred_bb == expr_bb)
continue;
else if (TEST_BIT (visited, pred_bb->index))
else if (bitmap_bit_p (visited, pred_bb->index))
continue;
else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
break;
/* Not killed. */
else
{
SET_BIT (visited, pred_bb->index);
bitmap_set_bit (visited, pred_bb->index);
if (! should_hoist_expr_to_dom (expr_bb, expr, pred_bb,
visited, distance, bb_size,
pressure_class, nregs, hoisted_bbs))
@ -3084,7 +3084,7 @@ hoist_code (void)
block. These are the potentially hoistable expressions. */
for (i = 0; i < SBITMAP_SIZE (hoist_vbeout[bb->index]); i++)
{
if (TEST_BIT (hoist_vbeout[bb->index], i))
if (bitmap_bit_p (hoist_vbeout[bb->index], i))
{
int nregs = 0;
enum reg_class pressure_class = NO_REGS;
@ -3101,7 +3101,7 @@ hoist_code (void)
/* If an expression is computed in BB and is available at end of
BB, hoist all occurrences dominated by BB to BB. */
if (TEST_BIT (comp[bb->index], i))
if (bitmap_bit_p (comp[bb->index], i))
{
occr = find_occr_in_bb (expr->antic_occr, bb);
@ -3132,7 +3132,7 @@ hoist_code (void)
/* We've found a dominated block, now see if it computes
the busy expression and whether or not moving that
expression to the "beginning" of that block is safe. */
if (!TEST_BIT (antloc[dominated->index], i))
if (!bitmap_bit_p (antloc[dominated->index], i))
continue;
occr = find_occr_in_bb (expr->antic_occr, dominated);

View File

@ -3424,7 +3424,7 @@ finish_alt_states (void)
/* Set bit number bitno in the bit string. The macro is not side
effect proof. */
#define SET_BIT(bitstring, bitno) \
#define bitmap_set_bit(bitstring, bitno) \
((bitstring)[(bitno) / (sizeof (*(bitstring)) * CHAR_BIT)] |= \
(HOST_WIDE_INT)1 << (bitno) % (sizeof (*(bitstring)) * CHAR_BIT))
@ -3434,7 +3434,7 @@ finish_alt_states (void)
/* Test if bit number bitno in the bitstring is set. The macro is not
side effect proof. */
#define TEST_BIT(bitstring, bitno) \
#define bitmap_bit_p(bitstring, bitno) \
((bitstring)[(bitno) / (sizeof (*(bitstring)) * CHAR_BIT)] >> \
(bitno) % (sizeof (*(bitstring)) * CHAR_BIT) & 1)
@ -3563,7 +3563,7 @@ static void
set_unit_reserv (reserv_sets_t reservs, int cycle_num, int unit_num)
{
gcc_assert (cycle_num < max_cycles_num);
SET_BIT (reservs, cycle_num * els_in_cycle_reserv
bitmap_set_bit (reservs, cycle_num * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num);
}
@ -3573,7 +3573,7 @@ static int
test_unit_reserv (reserv_sets_t reservs, int cycle_num, int unit_num)
{
gcc_assert (cycle_num < max_cycles_num);
return TEST_BIT (reservs, cycle_num * els_in_cycle_reserv
return bitmap_bit_p (reservs, cycle_num * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num);
}
@ -3677,7 +3677,7 @@ output_cycle_reservs (FILE *f, reserv_sets_t reservs, int start_cycle,
reserved_units_num = 0;
for (unit_num = 0; unit_num < description->units_num; unit_num++)
if (TEST_BIT (reservs, start_cycle * els_in_cycle_reserv
if (bitmap_bit_p (reservs, start_cycle * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num))
reserved_units_num++;
gcc_assert (repetition_num > 0);
@ -3687,7 +3687,7 @@ output_cycle_reservs (FILE *f, reserv_sets_t reservs, int start_cycle,
for (unit_num = 0;
unit_num < description->units_num;
unit_num++)
if (TEST_BIT (reservs, start_cycle * els_in_cycle_reserv
if (bitmap_bit_p (reservs, start_cycle * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num))
{
if (reserved_units_num != 0)
@ -4279,7 +4279,7 @@ initiate_excl_sets (void)
el != NULL;
el = el->next_unit_set_el)
{
SET_BIT (unit_excl_set, el->unit_decl->unit_num);
bitmap_set_bit (unit_excl_set, el->unit_decl->unit_num);
el->unit_decl->in_set_p = TRUE;
}
unit_excl_set_table [DECL_UNIT (decl)->unit_num] = unit_excl_set;
@ -4344,7 +4344,7 @@ form_reserv_sets_list (pattern_set_el_t pattern_list)
curr->next_pattern_reserv = NULL;
for (i = 0; i < el->units_num; i++)
{
SET_BIT (curr->reserv, el->unit_decls [i]->unit_num);
bitmap_set_bit (curr->reserv, el->unit_decls [i]->unit_num);
el->unit_decls [i]->in_set_p = TRUE;
}
if (prev != NULL)

View File

@ -321,7 +321,7 @@ all_non_dominated_preds_marked_p (basic_block bb, sbitmap map)
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
if (!TEST_BIT (map, e->src->index)
if (!bitmap_bit_p (map, e->src->index)
&& !dominated_by_p (CDI_DOMINATORS, e->src, bb))
return false;
@ -365,13 +365,13 @@ build_scop_bbs_1 (scop_p scop, sbitmap visited, basic_block bb)
VEC (basic_block, heap) *dom;
poly_bb_p pbb;
if (TEST_BIT (visited, bb->index)
if (bitmap_bit_p (visited, bb->index)
|| !bb_in_sese_p (bb, region))
return;
pbb = new_poly_bb (scop, try_generate_gimple_bb (scop, bb));
VEC_safe_push (poly_bb_p, heap, SCOP_BBS (scop), pbb);
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
dom = get_dominated_by (CDI_DOMINATORS, bb);

View File

@ -1471,8 +1471,8 @@ remove_some_program_points_and_update_live_ranges (void)
for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
{
ira_assert (r->start <= r->finish);
SET_BIT (born, r->start);
SET_BIT (dead, r->finish);
bitmap_set_bit (born, r->start);
bitmap_set_bit (dead, r->finish);
}
born_or_dead = sbitmap_alloc (ira_max_point);
@ -1482,8 +1482,8 @@ remove_some_program_points_and_update_live_ranges (void)
prev_born_p = prev_dead_p = false;
EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi)
{
born_p = TEST_BIT (born, i);
dead_p = TEST_BIT (dead, i);
born_p = bitmap_bit_p (born, i);
dead_p = bitmap_bit_p (dead, i);
if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
|| (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
map[i] = n;

View File

@ -3620,7 +3620,7 @@ build_insn_chain (void)
while (start < last)
{
RESET_BIT (live_subregs[regno], start);
bitmap_clear_bit (live_subregs[regno], start);
start++;
}
@ -3706,7 +3706,7 @@ build_insn_chain (void)
while (start < last)
{
SET_BIT (live_subregs[regno], start);
bitmap_set_bit (live_subregs[regno], start);
start++;
}
}

View File

@ -145,7 +145,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
bb->aux = NULL;
sbitmap_intersection_of_succs (antout[bb->index], antin, bb);
bitmap_intersection_of_succs (antout[bb->index], antin, bb);
}
if (bitmap_or_and (antin[bb->index], antloc[bb->index],
@ -526,7 +526,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
bb->aux = NULL;
sbitmap_intersection_of_preds (avin[bb->index], avout, bb);
bitmap_intersection_of_preds (avin[bb->index], avout, bb);
}
if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],

View File

@ -466,9 +466,9 @@ peel_loop_completely (struct loop *loop)
wont_exit = sbitmap_alloc (npeel + 1);
bitmap_ones (wont_exit);
RESET_BIT (wont_exit, 0);
bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
RESET_BIT (wont_exit, 1);
bitmap_clear_bit (wont_exit, 1);
remove_edges = NULL;
@ -672,9 +672,9 @@ unroll_loop_constant_iterations (struct loop *loop)
fprintf (dump_file, ";; Condition at beginning of loop.\n");
/* Peel exit_mod iterations. */
RESET_BIT (wont_exit, 0);
bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
RESET_BIT (wont_exit, 1);
bitmap_clear_bit (wont_exit, 1);
if (exit_mod)
{
@ -703,7 +703,7 @@ unroll_loop_constant_iterations (struct loop *loop)
loop->any_estimate = false;
}
SET_BIT (wont_exit, 1);
bitmap_set_bit (wont_exit, 1);
}
else
{
@ -719,9 +719,9 @@ unroll_loop_constant_iterations (struct loop *loop)
if (exit_mod != max_unroll
|| desc->noloop_assumptions)
{
RESET_BIT (wont_exit, 0);
bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
RESET_BIT (wont_exit, 1);
bitmap_clear_bit (wont_exit, 1);
opt_info_start_duplication (opt_info);
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
@ -747,11 +747,11 @@ unroll_loop_constant_iterations (struct loop *loop)
loop->any_estimate = false;
desc->noloop_assumptions = NULL_RTX;
SET_BIT (wont_exit, 0);
SET_BIT (wont_exit, 1);
bitmap_set_bit (wont_exit, 0);
bitmap_set_bit (wont_exit, 1);
}
RESET_BIT (wont_exit, max_unroll);
bitmap_clear_bit (wont_exit, max_unroll);
}
/* Now unroll the loop. */
@ -1069,7 +1069,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
bitmap_clear (wont_exit);
if (extra_zero_check
&& !desc->noloop_assumptions)
SET_BIT (wont_exit, 1);
bitmap_set_bit (wont_exit, 1);
ezc_swtch = loop_preheader_edge (loop)->src;
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
1, wont_exit, desc->out_edge,
@ -1085,7 +1085,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
/* Peel the copy. */
bitmap_clear (wont_exit);
if (i != n_peel - 1 || !last_may_exit)
SET_BIT (wont_exit, 1);
bitmap_set_bit (wont_exit, 1);
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
1, wont_exit, desc->out_edge,
&remove_edges,
@ -1140,7 +1140,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
/* And unroll loop. */
bitmap_ones (wont_exit);
RESET_BIT (wont_exit, may_exit_copy);
bitmap_clear_bit (wont_exit, may_exit_copy);
opt_info_start_duplication (opt_info);
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),

View File

@ -1544,7 +1544,7 @@ decompose_multiword_subregs (bool decompose_copies)
extract_insn (insn);
if (cfi)
SET_BIT (sub_blocks, bb->index);
bitmap_set_bit (sub_blocks, bb->index);
}
}
else

View File

@ -780,8 +780,8 @@ remove_some_program_points_and_update_live_ranges (void)
for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
{
lra_assert (r->start <= r->finish);
SET_BIT (born, r->start);
SET_BIT (dead, r->finish);
bitmap_set_bit (born, r->start);
bitmap_set_bit (dead, r->finish);
}
}
born_or_dead = sbitmap_alloc (lra_live_max_point);
@ -791,8 +791,8 @@ remove_some_program_points_and_update_live_ranges (void)
prev_born_p = prev_dead_p = false;
EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi)
{
born_p = TEST_BIT (born, i);
dead_p = TEST_BIT (dead, i);
born_p = bitmap_bit_p (born, i);
dead_p = bitmap_bit_p (dead, i);
if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
|| (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
{

View File

@ -1751,9 +1751,9 @@ lra_push_insn_1 (rtx insn, bool always_update)
if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
lra_constraint_insn_stack_bitmap =
sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
if (TEST_BIT (lra_constraint_insn_stack_bitmap, uid))
if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
return;
SET_BIT (lra_constraint_insn_stack_bitmap, uid);
bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
if (! always_update)
lra_update_insn_regno_info (insn);
VEC_safe_push (rtx, heap, lra_constraint_insn_stack, insn);
@ -1785,7 +1785,7 @@ rtx
lra_pop_insn (void)
{
rtx insn = VEC_pop (rtx, lra_constraint_insn_stack);
RESET_BIT (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
return insn;
}

View File

@ -148,10 +148,10 @@ make_preds_opaque (basic_block b, int j)
{
basic_block pb = e->src;
if (e->aux || ! TEST_BIT (transp[pb->index], j))
if (e->aux || ! bitmap_bit_p (transp[pb->index], j))
continue;
RESET_BIT (transp[pb->index], j);
bitmap_clear_bit (transp[pb->index], j);
make_preds_opaque (pb, j);
}
}
@ -513,7 +513,7 @@ optimize_mode_switching (void)
{
ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
add_seginfo (info + bb->index, ptr);
RESET_BIT (transp[bb->index], j);
bitmap_clear_bit (transp[bb->index], j);
}
}
@ -530,7 +530,7 @@ optimize_mode_switching (void)
last_mode = mode;
ptr = new_seginfo (mode, insn, bb->index, live_now);
add_seginfo (info + bb->index, ptr);
RESET_BIT (transp[bb->index], j);
bitmap_clear_bit (transp[bb->index], j);
}
#ifdef MODE_AFTER
last_mode = MODE_AFTER (e, last_mode, insn);
@ -569,7 +569,7 @@ optimize_mode_switching (void)
an extra check in make_preds_opaque. We also
need this to avoid confusing pre_edge_lcm when
antic is cleared but transp and comp are set. */
RESET_BIT (transp[bb->index], j);
bitmap_clear_bit (transp[bb->index], j);
/* Insert a fake computing definition of MODE into entry
blocks which compute no mode. This represents the mode on
@ -601,10 +601,10 @@ optimize_mode_switching (void)
FOR_EACH_BB (bb)
{
if (info[bb->index].seginfo->mode == m)
SET_BIT (antic[bb->index], j);
bitmap_set_bit (antic[bb->index], j);
if (info[bb->index].computing == m)
SET_BIT (comp[bb->index], j);
bitmap_set_bit (comp[bb->index], j);
}
}
@ -638,7 +638,7 @@ optimize_mode_switching (void)
eg->aux = 0;
if (! TEST_BIT (insert[e], j))
if (! bitmap_bit_p (insert[e], j))
continue;
eg->aux = (void *)1;
@ -665,7 +665,7 @@ optimize_mode_switching (void)
}
FOR_EACH_BB_REVERSE (bb)
if (TEST_BIT (del[bb->index], j))
if (bitmap_bit_p (del[bb->index], j))
{
make_preds_opaque (bb, j);
/* Cancel the 'deleted' mode set. */

View File

@ -619,7 +619,7 @@ schedule_reg_move (partial_schedule_ptr ps, int i_reg_move,
{
this_insn = ps_rtl_insn (ps, u);
this_latency = insn_latency (move->insn, this_insn);
if (distance1_uses && !TEST_BIT (distance1_uses, u))
if (distance1_uses && !bitmap_bit_p (distance1_uses, u))
this_distance = -1;
else
this_distance = 0;
@ -644,7 +644,7 @@ schedule_reg_move (partial_schedule_ptr ps, int i_reg_move,
}
bitmap_clear (must_follow);
SET_BIT (must_follow, move->def);
bitmap_set_bit (must_follow, move->def);
start = MAX (start, end - (ii - 1));
for (c = end; c >= start; c--)
@ -796,9 +796,9 @@ schedule_reg_moves (partial_schedule_ptr ps)
ps_reg_move_info *move;
move = ps_reg_move (ps, first_move + dest_copy - 1);
SET_BIT (move->uses, e->dest->cuid);
bitmap_set_bit (move->uses, e->dest->cuid);
if (e->distance == 1)
SET_BIT (distance1_uses, e->dest->cuid);
bitmap_set_bit (distance1_uses, e->dest->cuid);
}
}
@ -1911,7 +1911,7 @@ get_sched_window (partial_schedule_ptr ps, ddg_node_ptr u_node,
{
int v = e->src->cuid;
if (TEST_BIT (sched_nodes, v))
if (bitmap_bit_p (sched_nodes, v))
{
int p_st = SCHED_TIME (v);
int earliest = p_st + e->latency - (e->distance * ii);
@ -1939,7 +1939,7 @@ get_sched_window (partial_schedule_ptr ps, ddg_node_ptr u_node,
{
int v = e->dest->cuid;
if (TEST_BIT (sched_nodes, v))
if (bitmap_bit_p (sched_nodes, v))
{
int s_st = SCHED_TIME (v);
int earliest = (e->data_type == MEM_DEP ? s_st - ii + 1 : INT_MIN);
@ -2068,14 +2068,14 @@ calculate_must_precede_follow (ddg_node_ptr u_node, int start, int end,
and check only if
SCHED_TIME (e->src) - (e->distance * ii) == first_cycle_in_window */
for (e = u_node->in; e != 0; e = e->next_in)
if (TEST_BIT (sched_nodes, e->src->cuid)
if (bitmap_bit_p (sched_nodes, e->src->cuid)
&& ((SCHED_TIME (e->src->cuid) - (e->distance * ii)) ==
first_cycle_in_window))
{
if (dump_file)
fprintf (dump_file, "%d ", e->src->cuid);
SET_BIT (must_precede, e->src->cuid);
bitmap_set_bit (must_precede, e->src->cuid);
}
if (dump_file)
@ -2093,14 +2093,14 @@ calculate_must_precede_follow (ddg_node_ptr u_node, int start, int end,
and check only if
SCHED_TIME (e->dest) + (e->distance * ii) == last_cycle_in_window */
for (e = u_node->out; e != 0; e = e->next_out)
if (TEST_BIT (sched_nodes, e->dest->cuid)
if (bitmap_bit_p (sched_nodes, e->dest->cuid)
&& ((SCHED_TIME (e->dest->cuid) + (e->distance * ii)) ==
last_cycle_in_window))
{
if (dump_file)
fprintf (dump_file, "%d ", e->dest->cuid);
SET_BIT (must_follow, e->dest->cuid);
bitmap_set_bit (must_follow, e->dest->cuid);
}
if (dump_file)
@ -2131,7 +2131,7 @@ try_scheduling_node_in_cycle (partial_schedule_ptr ps,
if (psi)
{
SCHED_TIME (u) = cycle;
SET_BIT (sched_nodes, u);
bitmap_set_bit (sched_nodes, u);
success = 1;
*num_splits = 0;
if (dump_file)
@ -2178,11 +2178,11 @@ sms_schedule_by_order (ddg_ptr g, int mii, int maxii, int *nodes_order)
if (!NONDEBUG_INSN_P (insn))
{
RESET_BIT (tobe_scheduled, u);
bitmap_clear_bit (tobe_scheduled, u);
continue;
}
if (TEST_BIT (sched_nodes, u))
if (bitmap_bit_p (sched_nodes, u))
continue;
/* Try to get non-empty scheduling window. */
@ -2379,7 +2379,7 @@ compute_split_row (sbitmap sched_nodes, int low, int up, int ii,
{
int v = e->src->cuid;
if (TEST_BIT (sched_nodes, v)
if (bitmap_bit_p (sched_nodes, v)
&& (low == SCHED_TIME (v) + e->latency - (e->distance * ii)))
if (SCHED_TIME (v) > lower)
{
@ -2398,7 +2398,7 @@ compute_split_row (sbitmap sched_nodes, int low, int up, int ii,
{
int v = e->dest->cuid;
if (TEST_BIT (sched_nodes, v)
if (bitmap_bit_p (sched_nodes, v)
&& (up == SCHED_TIME (v) - e->latency + (e->distance * ii)))
if (SCHED_TIME (v) < upper)
{
@ -2434,7 +2434,7 @@ verify_partial_schedule (partial_schedule_ptr ps, sbitmap sched_nodes)
int u = crr_insn->id;
length++;
gcc_assert (TEST_BIT (sched_nodes, u));
gcc_assert (bitmap_bit_p (sched_nodes, u));
/* ??? Test also that all nodes of sched_nodes are in ps, perhaps by
popcount (sched_nodes) == number of insns in ps. */
gcc_assert (SCHED_TIME (u) >= ps->min_cycle);
@ -2493,9 +2493,9 @@ check_nodes_order (int *node_order, int num_nodes)
if (dump_file)
fprintf (dump_file, "%d ", u);
gcc_assert (u < num_nodes && u >= 0 && !TEST_BIT (tmp, u));
gcc_assert (u < num_nodes && u >= 0 && !bitmap_bit_p (tmp, u));
SET_BIT (tmp, u);
bitmap_set_bit (tmp, u);
}
if (dump_file)
@ -2774,7 +2774,7 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
bitmap_clear (workset);
if ((u = find_max_asap (g, scc)) >= 0)
SET_BIT (workset, u);
bitmap_set_bit (workset, u);
dir = BOTTOMUP;
}
@ -2799,8 +2799,8 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
/* Don't consider the already ordered successors again. */
bitmap_and_compl (tmp, tmp, nodes_ordered);
bitmap_ior (workset, workset, tmp);
RESET_BIT (workset, v);
SET_BIT (nodes_ordered, v);
bitmap_clear_bit (workset, v);
bitmap_set_bit (nodes_ordered, v);
}
dir = BOTTOMUP;
bitmap_clear (predecessors);
@ -2820,8 +2820,8 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
/* Don't consider the already ordered predecessors again. */
bitmap_and_compl (tmp, tmp, nodes_ordered);
bitmap_ior (workset, workset, tmp);
RESET_BIT (workset, v);
SET_BIT (nodes_ordered, v);
bitmap_clear_bit (workset, v);
bitmap_set_bit (nodes_ordered, v);
}
dir = TOPDOWN;
bitmap_clear (successors);
@ -3019,10 +3019,10 @@ ps_insn_find_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
next_ps_i = next_ps_i->next_in_row)
{
if (must_follow
&& TEST_BIT (must_follow, next_ps_i->id)
&& bitmap_bit_p (must_follow, next_ps_i->id)
&& ! first_must_follow)
first_must_follow = next_ps_i;
if (must_precede && TEST_BIT (must_precede, next_ps_i->id))
if (must_precede && bitmap_bit_p (must_precede, next_ps_i->id))
{
/* If we have already met a node that must follow, then
there is no possible column. */
@ -3033,7 +3033,7 @@ ps_insn_find_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
}
/* The closing branch must be the last in the row. */
if (must_precede
&& TEST_BIT (must_precede, next_ps_i->id)
&& bitmap_bit_p (must_precede, next_ps_i->id)
&& JUMP_P (ps_rtl_insn (ps, next_ps_i->id)))
return false;
@ -3105,7 +3105,7 @@ ps_insn_advance_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
/* Check if next_in_row is dependent on ps_i, both having same sched
times (typically ANTI_DEP). If so, ps_i cannot skip over it. */
if (must_follow && TEST_BIT (must_follow, ps_i->next_in_row->id))
if (must_follow && bitmap_bit_p (must_follow, ps_i->next_in_row->id))
return false;
/* Advance PS_I over its next_in_row in the doubly linked list. */

View File

@ -2910,7 +2910,7 @@ split_all_insns (void)
{
if (split_insn (insn))
{
SET_BIT (blocks, bb->index);
bitmap_set_bit (blocks, bb->index);
changed = true;
}
}

View File

@ -1054,14 +1054,14 @@ copyprop_hardreg_forward (void)
FOR_EACH_BB (bb)
{
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
/* If a block has a single predecessor, that we've already
processed, begin with the value data that was live at
the end of the predecessor block. */
/* ??? Ought to use more intelligent queuing of blocks. */
if (single_pred_p (bb)
&& TEST_BIT (visited, single_pred (bb)->index)
&& bitmap_bit_p (visited, single_pred (bb)->index)
&& ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
{
all_vd[bb->index] = all_vd[single_pred (bb)->index];
@ -1089,7 +1089,7 @@ copyprop_hardreg_forward (void)
if (MAY_HAVE_DEBUG_INSNS)
{
FOR_EACH_BB (bb)
if (TEST_BIT (visited, bb->index)
if (bitmap_bit_p (visited, bb->index)
&& all_vd[bb->index].n_debug_insn_changes)
{
unsigned int regno;

View File

@ -722,7 +722,7 @@ dump_bitmap_file (FILE *file, const_sbitmap bmap)
fprintf (file, "n_bits = %d, set = {", bmap->n_bits);
for (pos = 30, i = 0; i < bmap->n_bits; i++)
if (TEST_BIT (bmap, i))
if (bitmap_bit_p (bmap, i))
{
if (pos > 70)
{

View File

@ -34,9 +34,9 @@ along with GCC; see the file COPYING3. If not see
The following operations can be performed in O(1) time:
* set_size : SBITMAP_SIZE
* member_p : TEST_BIT
* add_member : SET_BIT
* remove_member : RESET_BIT
* member_p : bitmap_bit_p
* add_member : bitmap_set_bit
* remove_member : bitmap_clear_bit
Most other operations on this set representation are O(U) where U is
the size of the set universe:
@ -100,7 +100,7 @@ struct simple_bitmap_def
/* Test if bit number bitno in the bitmap is set. */
static inline SBITMAP_ELT_TYPE
TEST_BIT (const_sbitmap map, unsigned int bitno)
bitmap_bit_p (const_sbitmap map, int bitno)
{
size_t i = bitno / SBITMAP_ELT_BITS;
unsigned int s = bitno % SBITMAP_ELT_BITS;
@ -110,21 +110,21 @@ TEST_BIT (const_sbitmap map, unsigned int bitno)
/* Set bit number BITNO in the sbitmap MAP. */
static inline void
SET_BIT (sbitmap map, unsigned int bitno)
bitmap_set_bit (sbitmap map, int bitno)
{
gcc_checking_assert (! map->popcount);
map->elms[bitno / SBITMAP_ELT_BITS]
|= (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
}
/* Like SET_BIT, but updates population count. */
/* Like bitmap_set_bit, but updates population count. */
static inline void
SET_BIT_WITH_POPCOUNT (sbitmap map, unsigned int bitno)
bitmap_set_bit_with_popcount (sbitmap map, int bitno)
{
bool oldbit;
gcc_checking_assert (map->popcount);
oldbit = TEST_BIT (map, bitno);
oldbit = bitmap_bit_p (map, bitno);
if (!oldbit)
map->popcount[bitno / SBITMAP_ELT_BITS]++;
map->elms[bitno / SBITMAP_ELT_BITS]
@ -134,21 +134,21 @@ SET_BIT_WITH_POPCOUNT (sbitmap map, unsigned int bitno)
/* Reset bit number BITNO in the sbitmap MAP. */
static inline void
RESET_BIT (sbitmap map, unsigned int bitno)
bitmap_clear_bit (sbitmap map, int bitno)
{
gcc_checking_assert (! map->popcount);
map->elms[bitno / SBITMAP_ELT_BITS]
&= ~((SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS);
}
/* Like RESET_BIT, but updates population count. */
/* Like bitmap_clear_bit, but updates population count. */
static inline void
RESET_BIT_WITH_POPCOUNT (sbitmap map, unsigned int bitno)
bitmap_clear_bit_with_popcount (sbitmap map, int bitno)
{
bool oldbit;
gcc_checking_assert (map->popcount);
oldbit = TEST_BIT (map, bitno);
oldbit = bitmap_bit_p (map, bitno);
if (oldbit)
map->popcount[bitno / SBITMAP_ELT_BITS]--;
map->elms[bitno / SBITMAP_ELT_BITS]

View File

@ -188,7 +188,7 @@ static sbitmap *dom;
/* Is bb_src dominated by bb_trg. */
#define IS_DOMINATED(bb_src, bb_trg) \
( TEST_BIT (dom[bb_src], bb_trg) )
( bitmap_bit_p (dom[bb_src], bb_trg) )
/* Probability: Prob[i] is an int in [0, REG_BR_PROB_BASE] which is
the probability of bb i relative to the region entry. */
@ -566,10 +566,10 @@ too_large (int block, int *num_bbs, int *num_insns)
if (max_hdr[blk] == -1) \
max_hdr[blk] = hdr; \
else if (dfs_nr[max_hdr[blk]] > dfs_nr[hdr]) \
RESET_BIT (inner, hdr); \
bitmap_clear_bit (inner, hdr); \
else if (dfs_nr[max_hdr[blk]] < dfs_nr[hdr]) \
{ \
RESET_BIT (inner,max_hdr[blk]); \
bitmap_clear_bit (inner,max_hdr[blk]); \
max_hdr[blk] = hdr; \
} \
}
@ -681,8 +681,8 @@ haifa_find_rgns (void)
gcc_assert (node != ENTRY_BLOCK);
child = ei_edge (current_edge)->dest->index;
gcc_assert (child != EXIT_BLOCK);
RESET_BIT (in_stack, child);
if (max_hdr[child] >= 0 && TEST_BIT (in_stack, max_hdr[child]))
bitmap_clear_bit (in_stack, child);
if (max_hdr[child] >= 0 && bitmap_bit_p (in_stack, max_hdr[child]))
UPDATE_LOOP_RELATIONS (node, max_hdr[child]);
ei_next (&current_edge);
}
@ -698,7 +698,7 @@ haifa_find_rgns (void)
/* Process a node. */
node = ei_edge (current_edge)->src->index;
gcc_assert (node != ENTRY_BLOCK);
SET_BIT (in_stack, node);
bitmap_set_bit (in_stack, node);
dfs_nr[node] = ++count;
/* We don't traverse to the exit block. */
@ -713,10 +713,10 @@ haifa_find_rgns (void)
/* If the successor is in the stack, then we've found a loop.
Mark the loop, if it is not a natural loop, then it will
be rejected during the second traversal. */
if (TEST_BIT (in_stack, child))
if (bitmap_bit_p (in_stack, child))
{
no_loops = 0;
SET_BIT (header, child);
bitmap_set_bit (header, child);
UPDATE_LOOP_RELATIONS (node, child);
SET_EDGE_PASSED (current_edge);
ei_next (&current_edge);
@ -728,7 +728,7 @@ haifa_find_rgns (void)
with a new edge. */
if (dfs_nr[child])
{
if (max_hdr[child] >= 0 && TEST_BIT (in_stack, max_hdr[child]))
if (max_hdr[child] >= 0 && bitmap_bit_p (in_stack, max_hdr[child]))
UPDATE_LOOP_RELATIONS (node, max_hdr[child]);
SET_EDGE_PASSED (current_edge);
ei_next (&current_edge);
@ -786,7 +786,7 @@ haifa_find_rgns (void)
bool extend_regions_p;
if (no_loops)
SET_BIT (header, 0);
bitmap_set_bit (header, 0);
/* Second traversal:find reducible inner loops and topologically sort
block of each region. */
@ -805,7 +805,7 @@ haifa_find_rgns (void)
loops to consider at this point. */
FOR_EACH_BB (bb)
{
if (TEST_BIT (header, bb->index) && TEST_BIT (inner, bb->index))
if (bitmap_bit_p (header, bb->index) && bitmap_bit_p (inner, bb->index))
{
edge e;
edge_iterator ei;
@ -876,7 +876,7 @@ haifa_find_rgns (void)
&& single_succ (jbb) == EXIT_BLOCK_PTR)
{
queue[++tail] = jbb->index;
SET_BIT (in_queue, jbb->index);
bitmap_set_bit (in_queue, jbb->index);
if (too_large (jbb->index, &num_bbs, &num_insns))
{
@ -900,7 +900,7 @@ haifa_find_rgns (void)
{
/* This is a loop latch. */
queue[++tail] = node;
SET_BIT (in_queue, node);
bitmap_set_bit (in_queue, node);
if (too_large (node, &num_bbs, &num_insns))
{
@ -958,10 +958,10 @@ haifa_find_rgns (void)
tail = -1;
break;
}
else if (!TEST_BIT (in_queue, node) && node != bb->index)
else if (!bitmap_bit_p (in_queue, node) && node != bb->index)
{
queue[++tail] = node;
SET_BIT (in_queue, node);
bitmap_set_bit (in_queue, node);
if (too_large (node, &num_bbs, &num_insns))
{
@ -1025,7 +1025,7 @@ haifa_find_rgns (void)
of one too_large region. */
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR)
SET_BIT (extended_rgn_header, e->dest->index);
bitmap_set_bit (extended_rgn_header, e->dest->index);
}
}
}
@ -1194,7 +1194,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
edge_iterator ei;
int bbn = order[i];
if (max_hdr[bbn] != -1 && !TEST_BIT (header, bbn))
if (max_hdr[bbn] != -1 && !bitmap_bit_p (header, bbn))
{
int hdr = -1;
@ -1233,7 +1233,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
{
/* If BB start its own region,
update set of headers with BB. */
SET_BIT (header, bbn);
bitmap_set_bit (header, bbn);
rescan = 1;
}
else
@ -1408,7 +1408,7 @@ compute_dom_prob_ps (int bb)
if (IS_RGN_ENTRY (bb))
{
SET_BIT (dom[bb], 0);
bitmap_set_bit (dom[bb], 0);
prob[bb] = REG_BR_PROB_BASE;
return;
}
@ -1432,17 +1432,17 @@ compute_dom_prob_ps (int bb)
bitmap_ior (ancestor_edges[bb],
ancestor_edges[bb], ancestor_edges[pred_bb]);
SET_BIT (ancestor_edges[bb], EDGE_TO_BIT (in_edge));
bitmap_set_bit (ancestor_edges[bb], EDGE_TO_BIT (in_edge));
bitmap_ior (pot_split[bb], pot_split[bb], pot_split[pred_bb]);
FOR_EACH_EDGE (out_edge, out_ei, in_edge->src->succs)
SET_BIT (pot_split[bb], EDGE_TO_BIT (out_edge));
bitmap_set_bit (pot_split[bb], EDGE_TO_BIT (out_edge));
prob[bb] += ((prob[pred_bb] * in_edge->probability) / REG_BR_PROB_BASE);
}
SET_BIT (dom[bb], bb);
bitmap_set_bit (dom[bb], bb);
bitmap_and_compl (pot_split[bb], pot_split[bb], ancestor_edges[bb]);
if (sched_verbose >= 2)
@ -1548,7 +1548,7 @@ compute_trg_info (int trg)
block = el.first_member[j]->src;
FOR_EACH_EDGE (e, ei, block->succs)
{
if (!TEST_BIT (visited, e->dest->index))
if (!bitmap_bit_p (visited, e->dest->index))
{
for (k = 0; k < el.nr_members; k++)
if (e == el.first_member[k])
@ -1557,7 +1557,7 @@ compute_trg_info (int trg)
if (k >= el.nr_members)
{
bblst_table[bblst_last++] = e->dest;
SET_BIT (visited, e->dest->index);
bitmap_set_bit (visited, e->dest->index);
update_idx++;
}
}
@ -1829,7 +1829,7 @@ update_live (rtx insn, int src)
#define IS_REACHABLE(bb_from, bb_to) \
(bb_from == bb_to \
|| IS_RGN_ENTRY (bb_from) \
|| (TEST_BIT (ancestor_edges[bb_to], \
|| (bitmap_bit_p (ancestor_edges[bb_to], \
EDGE_TO_BIT (single_pred_edge (BASIC_BLOCK (BB_TO_BLOCK (bb_from)))))))
/* Turns on the fed_by_spec_load flag for insns fed by load_insn. */
@ -2466,7 +2466,7 @@ add_branch_dependences (rtx head, rtx tail)
{
if (! sched_insns_conditions_mutex_p (last, insn))
add_dependence (last, insn, REG_DEP_ANTI);
SET_BIT (insn_referenced, INSN_LUID (insn));
bitmap_set_bit (insn_referenced, INSN_LUID (insn));
}
CANT_MOVE (insn) = 1;
@ -2490,7 +2490,7 @@ add_branch_dependences (rtx head, rtx tail)
{
insn = prev_nonnote_insn (insn);
if (TEST_BIT (insn_referenced, INSN_LUID (insn))
if (bitmap_bit_p (insn_referenced, INSN_LUID (insn))
|| DEBUG_INSN_P (insn))
continue;

View File

@ -6007,7 +6007,7 @@ make_region_from_loop (struct loop *loop)
new_rgn_number = sel_create_new_region ();
sel_add_block_to_region (preheader_block, &bb_ord_index, new_rgn_number);
SET_BIT (bbs_in_loop_rgns, preheader_block->index);
bitmap_set_bit (bbs_in_loop_rgns, preheader_block->index);
for (i = 0; i < loop->num_nodes; i++)
{
@ -6018,11 +6018,11 @@ make_region_from_loop (struct loop *loop)
gcc_assert (new_rgn_number >= 0);
if (! TEST_BIT (bbs_in_loop_rgns, loop_blocks[i]->index))
if (! bitmap_bit_p (bbs_in_loop_rgns, loop_blocks[i]->index))
{
sel_add_block_to_region (loop_blocks[i], &bb_ord_index,
new_rgn_number);
SET_BIT (bbs_in_loop_rgns, loop_blocks[i]->index);
bitmap_set_bit (bbs_in_loop_rgns, loop_blocks[i]->index);
}
}
@ -6068,7 +6068,7 @@ make_regions_from_loop_nest (struct loop *loop)
/* Traverse all inner nodes of the loop. */
for (cur_loop = loop->inner; cur_loop; cur_loop = cur_loop->next)
if (! TEST_BIT (bbs_in_loop_rgns, cur_loop->header->index))
if (! bitmap_bit_p (bbs_in_loop_rgns, cur_loop->header->index))
return false;
/* At this moment all regular inner loops should have been pipelined.
@ -6182,10 +6182,10 @@ make_regions_from_the_rest (void)
{
degree[bb->index] = 0;
if (!TEST_BIT (bbs_in_loop_rgns, bb->index))
if (!bitmap_bit_p (bbs_in_loop_rgns, bb->index))
{
FOR_EACH_EDGE (e, ei, bb->preds)
if (!TEST_BIT (bbs_in_loop_rgns, e->src->index))
if (!bitmap_bit_p (bbs_in_loop_rgns, e->src->index))
degree[bb->index]++;
}
else

View File

@ -6732,7 +6732,7 @@ init_seqno_1 (basic_block bb, sbitmap visited_bbs, bitmap blocks_to_reschedule)
insn_t succ_insn;
succ_iterator si;
SET_BIT (visited_bbs, bbi);
bitmap_set_bit (visited_bbs, bbi);
if (blocks_to_reschedule)
bitmap_clear_bit (blocks_to_reschedule, bb->index);
@ -6744,7 +6744,7 @@ init_seqno_1 (basic_block bb, sbitmap visited_bbs, bitmap blocks_to_reschedule)
gcc_assert (in_current_region_p (succ));
if (!TEST_BIT (visited_bbs, succ_bbi))
if (!bitmap_bit_p (visited_bbs, succ_bbi))
{
gcc_assert (succ_bbi > bbi);
@ -6779,7 +6779,7 @@ init_seqno (bitmap blocks_to_reschedule, basic_block from)
EXECUTE_IF_SET_IN_BITMAP (blocks_to_reschedule, 0, bbi, bi)
{
gcc_assert (BLOCK_TO_BB (bbi) < current_nr_blocks);
RESET_BIT (visited_bbs, BLOCK_TO_BB (bbi));
bitmap_clear_bit (visited_bbs, BLOCK_TO_BB (bbi));
}
}
else

View File

@ -796,7 +796,7 @@ insert_store (struct st_expr * expr, edge e)
int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
gcc_assert (index != EDGE_INDEX_NO_EDGE);
if (! TEST_BIT (st_insert_map[index], expr->index))
if (! bitmap_bit_p (st_insert_map[index], expr->index))
break;
}
@ -807,7 +807,7 @@ insert_store (struct st_expr * expr, edge e)
FOR_EACH_EDGE (tmp, ei, e->dest->preds)
{
int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
RESET_BIT (st_insert_map[index], expr->index);
bitmap_clear_bit (st_insert_map[index], expr->index);
}
insert_insn_start_basic_block (insn, bb);
return 0;
@ -867,16 +867,16 @@ remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
bb = act->dest;
if (bb == EXIT_BLOCK_PTR
|| TEST_BIT (visited, bb->index))
|| bitmap_bit_p (visited, bb->index))
{
if (!ei_end_p (ei))
ei_next (&ei);
act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
continue;
}
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
if (TEST_BIT (st_antloc[bb->index], smexpr->index))
if (bitmap_bit_p (st_antloc[bb->index], smexpr->index))
{
for (last = smexpr->antic_stores;
BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
@ -1030,7 +1030,7 @@ build_store_vectors (void)
we can delete this one (It occurs earlier in the block). We'll
copy the SRC expression to an unused register in case there
are any side effects. */
if (TEST_BIT (st_avloc[bb->index], ptr->index))
if (bitmap_bit_p (st_avloc[bb->index], ptr->index))
{
rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
if (dump_file)
@ -1038,14 +1038,14 @@ build_store_vectors (void)
replace_store_insn (r, XEXP (st, 0), bb, ptr);
continue;
}
SET_BIT (st_avloc[bb->index], ptr->index);
bitmap_set_bit (st_avloc[bb->index], ptr->index);
}
for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
{
insn = XEXP (st, 0);
bb = BLOCK_FOR_INSN (insn);
SET_BIT (st_antloc[bb->index], ptr->index);
bitmap_set_bit (st_antloc[bb->index], ptr->index);
}
}
@ -1079,12 +1079,12 @@ build_store_vectors (void)
{
/* It should not be necessary to consider the expression
killed if it is both anticipatable and available. */
if (!TEST_BIT (st_antloc[bb->index], ptr->index)
|| !TEST_BIT (st_avloc[bb->index], ptr->index))
SET_BIT (st_kill[bb->index], ptr->index);
if (!bitmap_bit_p (st_antloc[bb->index], ptr->index)
|| !bitmap_bit_p (st_avloc[bb->index], ptr->index))
bitmap_set_bit (st_kill[bb->index], ptr->index);
}
else
SET_BIT (st_transp[bb->index], ptr->index);
bitmap_set_bit (st_transp[bb->index], ptr->index);
}
}
@ -1164,7 +1164,7 @@ one_store_motion_pass (void)
/* If any of the edges we have above are abnormal, we can't move this
store. */
for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
if (TEST_BIT (st_insert_map[x], ptr->index)
if (bitmap_bit_p (st_insert_map[x], ptr->index)
&& (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
break;
@ -1181,14 +1181,14 @@ one_store_motion_pass (void)
/* Now we want to insert the new stores which are going to be needed. */
FOR_EACH_BB (bb)
if (TEST_BIT (st_delete_map[bb->index], ptr->index))
if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
{
delete_store (ptr, bb);
n_stores_deleted++;
}
for (x = 0; x < NUM_EDGES (edge_list); x++)
if (TEST_BIT (st_insert_map[x], ptr->index))
if (bitmap_bit_p (st_insert_map[x], ptr->index))
{
did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
n_stores_created++;

View File

@ -74,13 +74,13 @@ mark_bb_seen (basic_block bb)
if ((unsigned int)bb->index >= size)
bb_seen = sbitmap_resize (bb_seen, size * 2, 0);
SET_BIT (bb_seen, bb->index);
bitmap_set_bit (bb_seen, bb->index);
}
static inline bool
bb_seen_p (basic_block bb)
{
return TEST_BIT (bb_seen, bb->index);
return bitmap_bit_p (bb_seen, bb->index);
}
/* Return true if we should ignore the basic block for purposes of tracing. */

View File

@ -7174,7 +7174,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
if (! blocks)
check_last_block = true;
else
check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
@ -7222,7 +7222,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
if (!bb)
continue;
if (blocks && !TEST_BIT (blocks, i))
if (blocks && !bitmap_bit_p (blocks, i))
continue;
gsi = gsi_last_nondebug_bb (bb);

View File

@ -3551,25 +3551,25 @@ remove_unreachable_handlers (void)
/* Negative LP numbers are MUST_NOT_THROW regions which
are not considered BB enders. */
if (lp_nr < 0)
SET_BIT (r_reachable, -lp_nr);
bitmap_set_bit (r_reachable, -lp_nr);
/* Positive LP numbers are real landing pads, are are BB enders. */
else if (lp_nr > 0)
{
gcc_assert (gsi_one_before_end_p (gsi));
region = get_eh_region_from_lp_number (lp_nr);
SET_BIT (r_reachable, region->index);
SET_BIT (lp_reachable, lp_nr);
bitmap_set_bit (r_reachable, region->index);
bitmap_set_bit (lp_reachable, lp_nr);
}
/* Avoid removing regions referenced from RESX/EH_DISPATCH. */
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
SET_BIT (r_reachable, gimple_resx_region (stmt));
bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
break;
case GIMPLE_EH_DISPATCH:
SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
break;
default:
break;
@ -3589,7 +3589,7 @@ remove_unreachable_handlers (void)
for (r_nr = 1;
VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
if (region && !TEST_BIT (r_reachable, r_nr))
if (region && !bitmap_bit_p (r_reachable, r_nr))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
@ -3598,7 +3598,7 @@ remove_unreachable_handlers (void)
for (lp_nr = 1;
VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
if (lp && !TEST_BIT (lp_reachable, lp_nr))
if (lp && !bitmap_bit_p (lp_reachable, lp_nr))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
@ -3666,10 +3666,10 @@ remove_unreachable_handlers_no_lp (void)
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
SET_BIT (r_reachable, gimple_resx_region (stmt));
bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
break;
case GIMPLE_EH_DISPATCH:
SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
break;
default:
break;
@ -3678,7 +3678,7 @@ remove_unreachable_handlers_no_lp (void)
for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
&& !TEST_BIT (r_reachable, i))
&& !bitmap_bit_p (r_reachable, i))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable region %d\n", i);

View File

@ -541,7 +541,7 @@ is_old_name (tree name)
if (!new_ssa_names)
return false;
return (ver < SBITMAP_SIZE (new_ssa_names)
&& TEST_BIT (old_ssa_names, ver));
&& bitmap_bit_p (old_ssa_names, ver));
}
@ -554,7 +554,7 @@ is_new_name (tree name)
if (!new_ssa_names)
return false;
return (ver < SBITMAP_SIZE (new_ssa_names)
&& TEST_BIT (new_ssa_names, ver));
&& bitmap_bit_p (new_ssa_names, ver));
}
@ -610,8 +610,8 @@ add_new_name_mapping (tree new_tree, tree old)
/* Register NEW_TREE and OLD in NEW_SSA_NAMES and OLD_SSA_NAMES,
respectively. */
SET_BIT (new_ssa_names, SSA_NAME_VERSION (new_tree));
SET_BIT (old_ssa_names, SSA_NAME_VERSION (old));
bitmap_set_bit (new_ssa_names, SSA_NAME_VERSION (new_tree));
bitmap_set_bit (old_ssa_names, SSA_NAME_VERSION (old));
}
@ -653,7 +653,7 @@ mark_def_sites (basic_block bb, gimple stmt, bitmap kills)
set_rewrite_uses (stmt, true);
}
if (rewrite_uses_p (stmt))
SET_BIT (interesting_blocks, bb->index);
bitmap_set_bit (interesting_blocks, bb->index);
return;
}
@ -681,7 +681,7 @@ mark_def_sites (basic_block bb, gimple stmt, bitmap kills)
/* If we found the statement interesting then also mark the block BB
as interesting. */
if (rewrite_uses_p (stmt) || register_defs_p (stmt))
SET_BIT (interesting_blocks, bb->index);
bitmap_set_bit (interesting_blocks, bb->index);
}
/* Structure used by prune_unused_phi_nodes to record bounds of the intervals
@ -1402,7 +1402,7 @@ rewrite_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Step 2. Rewrite every variable used in each statement in the block
with its immediate reaching definitions. Update the current definition
of a variable when a new real or virtual definition is found. */
if (TEST_BIT (interesting_blocks, bb->index))
if (bitmap_bit_p (interesting_blocks, bb->index))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
rewrite_stmt (&gsi);
@ -2114,7 +2114,7 @@ rewrite_update_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
/* Step 2. Rewrite every variable used in each statement in the block. */
if (TEST_BIT (interesting_blocks, bb->index))
if (bitmap_bit_p (interesting_blocks, bb->index))
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
@ -2668,7 +2668,7 @@ prepare_names_to_update (bool insert_phi_p)
want to replace existing instances. */
if (names_to_release)
EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
RESET_BIT (new_ssa_names, i);
bitmap_clear_bit (new_ssa_names, i);
/* First process names in NEW_SSA_NAMES. Otherwise, uses of old
names may be considered to be live-in on blocks that contain
@ -3276,7 +3276,7 @@ update_ssa (unsigned update_flags)
interesting_blocks = sbitmap_alloc (last_basic_block);
bitmap_clear (interesting_blocks);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_update, 0, i, bi)
SET_BIT (interesting_blocks, i);
bitmap_set_bit (interesting_blocks, i);
rewrite_blocks (start_bb, REWRITE_UPDATE);

View File

@ -550,10 +550,10 @@ elim_forward (elim_graph g, int T)
int S;
source_location locus;
SET_BIT (g->visited, T);
bitmap_set_bit (g->visited, T);
FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, locus,
{
if (!TEST_BIT (g->visited, S))
if (!bitmap_bit_p (g->visited, S))
elim_forward (g, S);
});
VEC_safe_push (int, heap, g->stack, T);
@ -570,7 +570,7 @@ elim_unvisited_predecessor (elim_graph g, int T)
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
if (!TEST_BIT (g->visited, P))
if (!bitmap_bit_p (g->visited, P))
return 1;
});
return 0;
@ -584,10 +584,10 @@ elim_backward (elim_graph g, int T)
int P;
source_location locus;
SET_BIT (g->visited, T);
bitmap_set_bit (g->visited, T);
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
if (!TEST_BIT (g->visited, P))
if (!bitmap_bit_p (g->visited, P))
{
elim_backward (g, P);
insert_partition_copy_on_edge (g->e, P, T, locus);
@ -629,7 +629,7 @@ elim_create (elim_graph g, int T)
insert_part_to_rtx_on_edge (g->e, U, T, UNKNOWN_LOCATION);
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
if (!TEST_BIT (g->visited, P))
if (!bitmap_bit_p (g->visited, P))
{
elim_backward (g, P);
insert_rtx_to_part_on_edge (g->e, P, U, unsignedsrcp, locus);
@ -641,7 +641,7 @@ elim_create (elim_graph g, int T)
S = elim_graph_remove_succ_edge (g, T, &locus);
if (S != -1)
{
SET_BIT (g->visited, T);
bitmap_set_bit (g->visited, T);
insert_partition_copy_on_edge (g->e, T, S, locus);
}
}
@ -675,7 +675,7 @@ eliminate_phi (edge e, elim_graph g)
FOR_EACH_VEC_ELT (int, g->nodes, x, part)
{
if (!TEST_BIT (g->visited, part))
if (!bitmap_bit_p (g->visited, part))
elim_forward (g, part);
}
@ -683,7 +683,7 @@ eliminate_phi (edge e, elim_graph g)
while (VEC_length (int, g->stack) > 0)
{
x = VEC_pop (int, g->stack);
if (!TEST_BIT (g->visited, x))
if (!bitmap_bit_p (g->visited, x))
elim_create (g, x);
}
}

View File

@ -214,7 +214,7 @@ mark_stmt_necessary (gimple stmt, bool add_to_worklist)
if (add_to_worklist)
VEC_safe_push (gimple, heap, worklist, stmt);
if (bb_contains_live_stmts && !is_gimple_debug (stmt))
SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
}
@ -229,14 +229,14 @@ mark_operand_necessary (tree op)
gcc_assert (op);
ver = SSA_NAME_VERSION (op);
if (TEST_BIT (processed, ver))
if (bitmap_bit_p (processed, ver))
{
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (gimple_nop_p (stmt)
|| gimple_plf (stmt, STMT_NECESSARY));
return;
}
SET_BIT (processed, ver);
bitmap_set_bit (processed, ver);
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (stmt);
@ -254,7 +254,7 @@ mark_operand_necessary (tree op)
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (bb_contains_live_stmts)
SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
VEC_safe_push (gimple, heap, worklist, stmt);
}
@ -386,8 +386,8 @@ mark_last_stmt_necessary (basic_block bb)
{
gimple stmt = last_stmt (bb);
SET_BIT (last_stmt_necessary, bb->index);
SET_BIT (bb_contains_live_stmts, bb->index);
bitmap_set_bit (last_stmt_necessary, bb->index);
bitmap_set_bit (bb_contains_live_stmts, bb->index);
/* We actually mark the statement only if it is a control statement. */
if (stmt && is_ctrl_stmt (stmt))
@ -423,12 +423,12 @@ mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el,
continue;
}
if (!TEST_BIT (last_stmt_necessary, cd_bb->index))
if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
mark_last_stmt_necessary (cd_bb);
}
if (!skipped)
SET_BIT (visited_control_parents, bb->index);
bitmap_set_bit (visited_control_parents, bb->index);
}
@ -617,7 +617,7 @@ mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
/* We have to skip already visited (and thus necessary) statements
to make the chaining work after we dropped back to simple mode. */
if (chain_ovfl
&& TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
&& bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
{
gcc_assert (gimple_nop_p (def_stmt)
|| gimple_plf (def_stmt, STMT_NECESSARY));
@ -713,7 +713,7 @@ propagate_necessity (struct edge_list *el)
already done so. */
basic_block bb = gimple_bb (stmt);
if (bb != ENTRY_BLOCK_PTR
&& !TEST_BIT (visited_control_parents, bb->index))
&& !bitmap_bit_p (visited_control_parents, bb->index))
mark_control_dependent_edges_necessary (bb, el, false);
}
@ -815,11 +815,11 @@ propagate_necessity (struct edge_list *el)
if (gimple_bb (stmt)
!= get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
{
if (!TEST_BIT (last_stmt_necessary, arg_bb->index))
if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
mark_last_stmt_necessary (arg_bb);
}
else if (arg_bb != ENTRY_BLOCK_PTR
&& !TEST_BIT (visited_control_parents,
&& !bitmap_bit_p (visited_control_parents,
arg_bb->index))
mark_control_dependent_edges_necessary (arg_bb, el, true);
}
@ -1342,7 +1342,7 @@ eliminate_unnecessary_stmts (void)
call (); saving one operand. */
if (name
&& TREE_CODE (name) == SSA_NAME
&& !TEST_BIT (processed, SSA_NAME_VERSION (name))
&& !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
/* Avoid doing so for allocation calls which we
did not mark as necessary, it will confuse the
special logic we apply to malloc/free pair removal. */
@ -1387,7 +1387,7 @@ eliminate_unnecessary_stmts (void)
{
prev_bb = bb->prev_bb;
if (!TEST_BIT (bb_contains_live_stmts, bb->index)
if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
|| !(bb->flags & BB_REACHABLE))
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))

View File

@ -968,9 +968,9 @@ loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
edge_iterator ei;
basic_block pred_bb;
bitmap loe;
gcc_assert (!TEST_BIT (visited, bb->index));
gcc_assert (!bitmap_bit_p (visited, bb->index));
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
loe = live_on_entry (live, bb);
FOR_EACH_EDGE (e, ei, bb->preds)
@ -988,9 +988,9 @@ loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
changes, and pred_bb has been visited already, add it to the
revisit stack. */
change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
if (TEST_BIT (visited, pred_bb->index) && change)
if (bitmap_bit_p (visited, pred_bb->index) && change)
{
RESET_BIT (visited, pred_bb->index);
bitmap_clear_bit (visited, pred_bb->index);
*(live->stack_top)++ = pred_bb->index;
}
}

View File

@ -2523,7 +2523,7 @@ fill_always_executed_in (struct loop *loop, sbitmap contains_call)
if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
last = bb;
if (TEST_BIT (contains_call, bb->index))
if (bitmap_bit_p (contains_call, bb->index))
break;
FOR_EACH_EDGE (e, ei, bb->succs)
@ -2588,7 +2588,7 @@ tree_ssa_lim_initialize (void)
}
if (!gsi_end_p (bsi))
SET_BIT (contains_call, bb->index);
bitmap_set_bit (contains_call, bb->index);
}
for (loop = current_loops->tree_root->inner; loop; loop = loop->next)

View File

@ -521,7 +521,7 @@ try_unroll_loop_completely (struct loop *loop,
initialize_original_copy_tables ();
wont_exit = sbitmap_alloc (n_unroll + 1);
bitmap_ones (wont_exit);
RESET_BIT (wont_exit, 0);
bitmap_clear_bit (wont_exit, 0);
if (!gimple_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
n_unroll, wont_exit,

View File

@ -3082,14 +3082,14 @@ multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
{
XEXP (addr, 1) = gen_int_mode (i, address_mode);
if (memory_address_addr_space_p (mode, addr, as))
SET_BIT (valid_mult, i + MAX_RATIO);
bitmap_set_bit (valid_mult, i + MAX_RATIO);
}
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " allowed multipliers:");
for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
if (TEST_BIT (valid_mult, i + MAX_RATIO))
if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
fprintf (dump_file, " %d", (int) i);
fprintf (dump_file, "\n");
fprintf (dump_file, "\n");
@ -3101,7 +3101,7 @@ multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
return false;
return TEST_BIT (valid_mult, ratio + MAX_RATIO);
return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
}
/* Returns cost of address in shape symbol + var + OFFSET + RATIO * index.

View File

@ -1173,7 +1173,7 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
last one. */
wont_exit = sbitmap_alloc (factor);
bitmap_ones (wont_exit);
RESET_BIT (wont_exit, factor - 1);
bitmap_clear_bit (wont_exit, factor - 1);
ok = gimple_duplicate_loop_to_header_edge
(loop, loop_latch_edge (loop), factor - 1,

View File

@ -479,8 +479,8 @@ blocks_in_phiopt_order (void)
unsigned np, i;
sbitmap visited = sbitmap_alloc (last_basic_block);
#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
bitmap_clear (visited);

View File

@ -2119,7 +2119,7 @@ defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
{
if (!BB_VISITED (phiblock))
{
SET_BIT (changed_blocks, block->index);
bitmap_set_bit (changed_blocks, block->index);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 1;
return false;
@ -2215,7 +2215,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
/* Of multiple successors we have to have visited one already. */
if (!first)
{
SET_BIT (changed_blocks, block->index);
bitmap_set_bit (changed_blocks, block->index);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 1;
changed = true;
@ -2265,12 +2265,12 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
if (!bitmap_set_equal (old, ANTIC_IN (block)))
{
changed = true;
SET_BIT (changed_blocks, block->index);
bitmap_set_bit (changed_blocks, block->index);
FOR_EACH_EDGE (e, ei, block->preds)
SET_BIT (changed_blocks, e->src->index);
bitmap_set_bit (changed_blocks, e->src->index);
}
else
RESET_BIT (changed_blocks, block->index);
bitmap_clear_bit (changed_blocks, block->index);
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
@ -2422,12 +2422,12 @@ compute_partial_antic_aux (basic_block block,
if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
{
changed = true;
SET_BIT (changed_blocks, block->index);
bitmap_set_bit (changed_blocks, block->index);
FOR_EACH_EDGE (e, ei, block->preds)
SET_BIT (changed_blocks, e->src->index);
bitmap_set_bit (changed_blocks, e->src->index);
}
else
RESET_BIT (changed_blocks, block->index);
bitmap_clear_bit (changed_blocks, block->index);
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
@ -2469,7 +2469,7 @@ compute_antic (void)
e->flags &= ~EDGE_DFS_BACK;
if (e->flags & EDGE_ABNORMAL)
{
SET_BIT (has_abnormal_preds, block->index);
bitmap_set_bit (has_abnormal_preds, block->index);
break;
}
}
@ -2499,11 +2499,11 @@ compute_antic (void)
changed = false;
for (i = postorder_num - 1; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
if (bitmap_bit_p (changed_blocks, postorder[i]))
{
basic_block block = BASIC_BLOCK (postorder[i]);
changed |= compute_antic_aux (block,
TEST_BIT (has_abnormal_preds,
bitmap_bit_p (has_abnormal_preds,
block->index));
}
}
@ -2528,12 +2528,12 @@ compute_antic (void)
changed = false;
for (i = postorder_num - 1 ; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
if (bitmap_bit_p (changed_blocks, postorder[i]))
{
basic_block block = BASIC_BLOCK (postorder[i]);
changed
|= compute_partial_antic_aux (block,
TEST_BIT (has_abnormal_preds,
bitmap_bit_p (has_abnormal_preds,
block->index));
}
}

View File

@ -177,7 +177,7 @@ cfg_blocks_add (basic_block bb)
bool head = false;
gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
gcc_assert (!TEST_BIT (bb_in_list, bb->index));
gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
if (cfg_blocks_empty_p ())
{
@ -218,7 +218,7 @@ cfg_blocks_add (basic_block bb)
VEC_replace (basic_block, cfg_blocks,
head ? cfg_blocks_head : cfg_blocks_tail,
bb);
SET_BIT (bb_in_list, bb->index);
bitmap_set_bit (bb_in_list, bb->index);
}
@ -237,7 +237,7 @@ cfg_blocks_get (void)
cfg_blocks_head = ((cfg_blocks_head + 1)
% VEC_length (basic_block, cfg_blocks));
--cfg_blocks_num;
RESET_BIT (bb_in_list, bb->index);
bitmap_clear_bit (bb_in_list, bb->index);
return bb;
}
@ -286,7 +286,7 @@ add_control_edge (edge e)
e->flags |= EDGE_EXECUTABLE;
/* If the block is already in the list, we're done. */
if (TEST_BIT (bb_in_list, bb->index))
if (bitmap_bit_p (bb_in_list, bb->index))
return;
cfg_blocks_add (bb);
@ -390,7 +390,7 @@ process_ssa_edge_worklist (VEC(gimple,gc) **worklist)
the destination block is executable. Otherwise, visit the
statement only if its block is marked executable. */
if (gimple_code (stmt) == GIMPLE_PHI
|| TEST_BIT (executable_blocks, bb->index))
|| bitmap_bit_p (executable_blocks, bb->index))
simulate_stmt (stmt);
}
}
@ -418,7 +418,7 @@ simulate_block (basic_block block)
/* If this is the first time we've simulated this block, then we
must simulate each of its statements. */
if (!TEST_BIT (executable_blocks, block->index))
if (!bitmap_bit_p (executable_blocks, block->index))
{
gimple_stmt_iterator j;
unsigned int normal_edge_count;
@ -426,7 +426,7 @@ simulate_block (basic_block block)
edge_iterator ei;
/* Note that we have simulated this block. */
SET_BIT (executable_blocks, block->index);
bitmap_set_bit (executable_blocks, block->index);
for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
{

View File

@ -1300,7 +1300,7 @@ undistribute_ops_list (enum tree_code opcode,
|| !is_reassociable_op (oe1def, dcode, loop))
continue;
SET_BIT (candidates, i);
bitmap_set_bit (candidates, i);
nr_candidates++;
}
@ -1410,7 +1410,7 @@ undistribute_ops_list (enum tree_code opcode,
{
if (oe1->op == c->op)
{
SET_BIT (candidates2, i);
bitmap_set_bit (candidates2, i);
++nr_candidates2;
break;
}

View File

@ -1208,7 +1208,7 @@ build_pred_graph (void)
for (j = 0; j < FIRST_REF_NODE; j++)
{
if (!get_varinfo (j)->is_special_var)
SET_BIT (graph->direct_nodes, j);
bitmap_set_bit (graph->direct_nodes, j);
}
for (j = 0; j < graph->size; j++)
@ -1236,7 +1236,7 @@ build_pred_graph (void)
if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
else
RESET_BIT (graph->direct_nodes, lhsvar);
bitmap_clear_bit (graph->direct_nodes, lhsvar);
}
else if (rhs.type == ADDRESSOF)
{
@ -1255,14 +1255,14 @@ build_pred_graph (void)
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
/* All related variables are no longer direct nodes. */
RESET_BIT (graph->direct_nodes, rhsvar);
bitmap_clear_bit (graph->direct_nodes, rhsvar);
v = get_varinfo (rhsvar);
if (!v->is_full_var)
{
v = lookup_vi_for_tree (v->decl);
do
{
RESET_BIT (graph->direct_nodes, v->id);
bitmap_clear_bit (graph->direct_nodes, v->id);
v = v->next;
}
while (v != NULL);
@ -1281,9 +1281,9 @@ build_pred_graph (void)
else if (lhs.offset != 0 || rhs.offset != 0)
{
if (rhs.offset != 0)
RESET_BIT (graph->direct_nodes, lhs.var);
bitmap_clear_bit (graph->direct_nodes, lhs.var);
else if (lhs.offset != 0)
RESET_BIT (graph->direct_nodes, rhs.var);
bitmap_clear_bit (graph->direct_nodes, rhs.var);
}
}
}
@ -1339,7 +1339,7 @@ build_succ_graph (void)
t = find (storedanything_id);
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
{
if (!TEST_BIT (graph->direct_nodes, i)
if (!bitmap_bit_p (graph->direct_nodes, i)
&& get_varinfo (i)->may_have_pointers)
add_graph_edge (graph, find (i), t);
}
@ -1383,7 +1383,7 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
bitmap_iterator bi;
unsigned int my_dfs;
SET_BIT (si->visited, n);
bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
@ -1396,10 +1396,10 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
break;
w = find (i);
if (TEST_BIT (si->deleted, w))
if (bitmap_bit_p (si->deleted, w))
continue;
if (!TEST_BIT (si->visited, w))
if (!bitmap_bit_p (si->visited, w))
scc_visit (graph, si, w);
{
unsigned int t = find (w);
@ -1450,7 +1450,7 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
}
}
}
SET_BIT (si->deleted, n);
bitmap_set_bit (si->deleted, n);
}
else
VEC_safe_push (unsigned, heap, si->scc_stack, n);
@ -1559,12 +1559,12 @@ topo_visit (constraint_graph_t graph, struct topo_info *ti,
bitmap_iterator bi;
unsigned int j;
SET_BIT (ti->visited, n);
bitmap_set_bit (ti->visited, n);
if (graph->succs[n])
EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
{
if (!TEST_BIT (ti->visited, j))
if (!bitmap_bit_p (ti->visited, j))
topo_visit (graph, ti, j);
}
@ -1853,7 +1853,7 @@ find_indirect_cycles (constraint_graph_t graph)
struct scc_info *si = init_scc_info (size);
for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
if (!TEST_BIT (si->visited, i) && find (i) == i)
if (!bitmap_bit_p (si->visited, i) && find (i) == i)
scc_visit (graph, si, i);
free_scc_info (si);
@ -1870,7 +1870,7 @@ compute_topo_order (constraint_graph_t graph,
unsigned int size = graph->size;
for (i = 0; i != size; ++i)
if (!TEST_BIT (ti->visited, i) && find (i) == i)
if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
topo_visit (graph, ti, i);
}
@ -2014,7 +2014,7 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
unsigned int my_dfs;
gcc_assert (si->node_mapping[n] == n);
SET_BIT (si->visited, n);
bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
@ -2023,10 +2023,10 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
unsigned int w = si->node_mapping[i];
if (TEST_BIT (si->deleted, w))
if (bitmap_bit_p (si->deleted, w))
continue;
if (!TEST_BIT (si->visited, w))
if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
@ -2043,10 +2043,10 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
unsigned int w = si->node_mapping[i];
if (TEST_BIT (si->deleted, w))
if (bitmap_bit_p (si->deleted, w))
continue;
if (!TEST_BIT (si->visited, w))
if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
@ -2067,8 +2067,8 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
unsigned int w = VEC_pop (unsigned, si->scc_stack);
si->node_mapping[w] = n;
if (!TEST_BIT (graph->direct_nodes, w))
RESET_BIT (graph->direct_nodes, n);
if (!bitmap_bit_p (graph->direct_nodes, w))
bitmap_clear_bit (graph->direct_nodes, n);
/* Unify our nodes. */
if (graph->preds[w])
@ -2092,7 +2092,7 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
graph->points_to[w]);
}
}
SET_BIT (si->deleted, n);
bitmap_set_bit (si->deleted, n);
}
else
VEC_safe_push (unsigned, heap, si->scc_stack, n);
@ -2105,7 +2105,7 @@ label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
unsigned int i;
bitmap_iterator bi;
SET_BIT (si->visited, n);
bitmap_set_bit (si->visited, n);
if (!graph->points_to[n])
graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
@ -2114,7 +2114,7 @@ label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
{
unsigned int w = si->node_mapping[i];
if (!TEST_BIT (si->visited, w))
if (!bitmap_bit_p (si->visited, w))
label_visit (graph, si, w);
/* Skip unused edges */
@ -2125,7 +2125,7 @@ label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
}
/* Indirect nodes get fresh variables. */
if (!TEST_BIT (graph->direct_nodes, n))
if (!bitmap_bit_p (graph->direct_nodes, n))
bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
if (!bitmap_empty_p (graph->points_to[n]))
@ -2163,13 +2163,13 @@ perform_var_substitution (constraint_graph_t graph)
/* Condense the nodes, which means to find SCC's, count incoming
predecessors, and unite nodes in SCC's. */
for (i = 0; i < FIRST_REF_NODE; i++)
if (!TEST_BIT (si->visited, si->node_mapping[i]))
if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
condense_visit (graph, si, si->node_mapping[i]);
bitmap_clear (si->visited);
/* Actually the label the nodes for pointer equivalences */
for (i = 0; i < FIRST_REF_NODE; i++)
if (!TEST_BIT (si->visited, si->node_mapping[i]))
if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
label_visit (graph, si, si->node_mapping[i]);
/* Calculate location equivalence labels. */
@ -2218,7 +2218,7 @@ perform_var_substitution (constraint_graph_t graph)
if (dump_file && (dump_flags & TDF_DETAILS))
for (i = 0; i < FIRST_REF_NODE; i++)
{
bool direct_node = TEST_BIT (graph->direct_nodes, i);
bool direct_node = bitmap_bit_p (graph->direct_nodes, i);
fprintf (dump_file,
"Equivalence classes for %s node id %d:%s are pointer: %d"
", location:%d\n",

View File

@ -91,9 +91,9 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
gcc_assert (src != ENTRY_BLOCK_PTR);
if (! TEST_BIT (visited, src->index))
if (! bitmap_bit_p (visited, src->index))
{
SET_BIT (visited, src->index);
bitmap_set_bit (visited, src->index);
FOR_EACH_EDGE (e, ei, src->preds)
VEC_safe_push (edge, heap, stack, e);
}

View File

@ -1293,18 +1293,18 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
for (k = 0; k < group_size; k++)
{
first_group_load_index = VEC_index (int, load_permutation, k);
if (TEST_BIT (load_index, first_group_load_index))
if (bitmap_bit_p (load_index, first_group_load_index))
{
bad_permutation = true;
break;
}
SET_BIT (load_index, first_group_load_index);
bitmap_set_bit (load_index, first_group_load_index);
}
if (!bad_permutation)
for (k = 0; k < group_size; k++)
if (!TEST_BIT (load_index, k))
if (!bitmap_bit_p (load_index, k))
{
bad_permutation = true;
break;
@ -1423,17 +1423,17 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
prev = next;
}
if (TEST_BIT (load_index, prev))
if (bitmap_bit_p (load_index, prev))
{
supported = false;
break;
}
SET_BIT (load_index, prev);
bitmap_set_bit (load_index, prev);
}
for (j = 0; j < group_size; j++)
if (!TEST_BIT (load_index, j))
if (!bitmap_bit_p (load_index, j))
return false;
sbitmap_free (load_index);

View File

@ -86,7 +86,7 @@ static bool
live_on_edge (edge e, tree name)
{
return (live[e->dest->index]
&& TEST_BIT (live[e->dest->index], SSA_NAME_VERSION (name)));
&& bitmap_bit_p (live[e->dest->index], SSA_NAME_VERSION (name)));
}
/* Local functions. */
@ -5556,7 +5556,7 @@ find_assert_locations_1 (basic_block bb, sbitmap live)
/* If op is not live beyond this stmt, do not bother to insert
asserts for it. */
if (!TEST_BIT (live, SSA_NAME_VERSION (op)))
if (!bitmap_bit_p (live, SSA_NAME_VERSION (op)))
continue;
/* If OP is used in such a way that we can infer a value
@ -5604,9 +5604,9 @@ find_assert_locations_1 (basic_block bb, sbitmap live)
/* Update live. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
SET_BIT (live, SSA_NAME_VERSION (op));
bitmap_set_bit (live, SSA_NAME_VERSION (op));
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_DEF)
RESET_BIT (live, SSA_NAME_VERSION (op));
bitmap_clear_bit (live, SSA_NAME_VERSION (op));
}
/* Traverse all PHI nodes in BB, updating live. */
@ -5624,10 +5624,10 @@ find_assert_locations_1 (basic_block bb, sbitmap live)
{
tree arg = USE_FROM_PTR (arg_p);
if (TREE_CODE (arg) == SSA_NAME)
SET_BIT (live, SSA_NAME_VERSION (arg));
bitmap_set_bit (live, SSA_NAME_VERSION (arg));
}
RESET_BIT (live, SSA_NAME_VERSION (res));
bitmap_clear_bit (live, SSA_NAME_VERSION (res));
}
return need_assert;

View File

@ -6768,15 +6768,15 @@ vt_find_locations (void)
while (!fibheap_empty (worklist))
{
bb = (basic_block) fibheap_extract_min (worklist);
RESET_BIT (in_worklist, bb->index);
gcc_assert (!TEST_BIT (visited, bb->index));
if (!TEST_BIT (visited, bb->index))
bitmap_clear_bit (in_worklist, bb->index);
gcc_assert (!bitmap_bit_p (visited, bb->index));
if (!bitmap_bit_p (visited, bb->index))
{
bool changed;
edge_iterator ei;
int oldinsz, oldoutsz;
SET_BIT (visited, bb->index);
bitmap_set_bit (visited, bb->index);
if (VTI (bb)->in.vars)
{
@ -6869,21 +6869,21 @@ vt_find_locations (void)
if (e->dest == EXIT_BLOCK_PTR)
continue;
if (TEST_BIT (visited, e->dest->index))
if (bitmap_bit_p (visited, e->dest->index))
{
if (!TEST_BIT (in_pending, e->dest->index))
if (!bitmap_bit_p (in_pending, e->dest->index))
{
/* Send E->DEST to next round. */
SET_BIT (in_pending, e->dest->index);
bitmap_set_bit (in_pending, e->dest->index);
fibheap_insert (pending,
bb_order[e->dest->index],
e->dest);
}
}
else if (!TEST_BIT (in_worklist, e->dest->index))
else if (!bitmap_bit_p (in_worklist, e->dest->index))
{
/* Add E->DEST to current round. */
SET_BIT (in_worklist, e->dest->index);
bitmap_set_bit (in_worklist, e->dest->index);
fibheap_insert (worklist, bb_order[e->dest->index],
e->dest);
}