Whitespace fixups

From-SVN: r113893
This commit is contained in:
Mike Stump 2006-05-18 22:16:23 +00:00
parent e836620206
commit c22cacf346
54 changed files with 1496 additions and 1498 deletions

View File

@ -49,7 +49,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
/* The aliasing API provided here solves related but different problems:
Say there exists (in c)
Say there exists (in c)
struct X {
struct Y y1;
@ -87,7 +87,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
this may be too conservative for some C++ types.
The pass ipa-type-escape does this analysis for the types whose
instances do not escape across the compilation boundary.
instances do not escape across the compilation boundary.
Historically in GCC, these two problems were combined and a single
data structure was used to represent the solution to these
@ -109,11 +109,11 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
`double'. (However, a store to an `int' cannot alias a `double'
and vice versa.) We indicate this via a tree structure that looks
like:
struct S
/ \
struct S
/ \
/ \
|/_ _\|
int double
|/_ _\|
int double
(The arrows are directed and point downwards.)
In this situation we say the alias set for `struct S' is the
@ -730,7 +730,7 @@ record_component_aliases (tree type)
{
int i;
tree binfo, base_binfo;
for (binfo = TYPE_BINFO (type), i = 0;
BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
record_alias_subset (superset,
@ -1023,7 +1023,7 @@ record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
If neither case holds, reject the original base value as invalid.
Note that the following situation is not detected:
extern int x, y; int *p = &x; p += (&y-&x);
extern int x, y; int *p = &x; p += (&y-&x);
ANSI C does not allow computing the difference of addresses
of distinct top level objects. */
@ -1091,7 +1091,7 @@ clear_reg_alias_info (rtx reg)
/* If a value is known for REGNO, return it. */
rtx
rtx
get_reg_known_value (unsigned int regno)
{
if (regno >= FIRST_PSEUDO_REGISTER)
@ -1620,7 +1620,7 @@ addr_side_effect_eval (rtx addr, int size, int n_refs)
if (offset)
addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
GEN_INT (offset));
GEN_INT (offset));
else
addr = XEXP (addr, 0);
addr = canon_rtx (addr);
@ -2001,14 +2001,14 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
/* Unless both have exprs, we can't tell anything. */
if (exprx == 0 || expry == 0)
return 0;
/* If both are field references, we may be able to determine something. */
if (TREE_CODE (exprx) == COMPONENT_REF
&& TREE_CODE (expry) == COMPONENT_REF
&& nonoverlapping_component_refs_p (exprx, expry))
return 1;
/* If the field reference test failed, look at the DECLs involved. */
moffsetx = MEM_OFFSET (x);
if (TREE_CODE (exprx) == COMPONENT_REF)
@ -2020,7 +2020,7 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
tree fieldcontext = DECL_FIELD_CONTEXT (field);
if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
TREE_TYPE (field)))
return 1;
return 1;
}
{
tree t = decl_for_component_ref (exprx);
@ -2048,7 +2048,7 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
tree fieldcontext = DECL_FIELD_CONTEXT (field);
if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
TREE_TYPE (field)))
return 1;
return 1;
}
{
tree t = decl_for_component_ref (expry);
@ -2506,8 +2506,8 @@ init_alias_analysis (void)
#endif
/* If this insn has a noalias note, process it, Otherwise,
scan for sets. A simple set will have no side effects
which could change the base value of any other register. */
scan for sets. A simple set will have no side effects
which could change the base value of any other register. */
if (GET_CODE (PATTERN (insn)) == SET
&& REG_NOTES (insn) != 0
@ -2564,7 +2564,7 @@ init_alias_analysis (void)
/* Now propagate values from new_reg_base_value to reg_base_value. */
gcc_assert (maxreg == (unsigned int) max_reg_num());
for (ui = 0; ui < maxreg; ui++)
{
if (new_reg_base_value[ui]

View File

@ -103,7 +103,7 @@ alloc_pool_descriptor (const char *name)
slot = (struct alloc_pool_descriptor **)
htab_find_slot_with_hash (alloc_pool_hash, name,
htab_hash_pointer (name),
htab_hash_pointer (name),
1);
if (*slot)
return *slot;
@ -248,12 +248,12 @@ pool_alloc (alloc_pool pool)
/* Mark the element to be free. */
((allocation_object *) block)->id = 0;
#endif
header = (alloc_pool_list) USER_PTR_FROM_ALLOCATION_OBJECT_PTR (block);
header->next = pool->free_list;
pool->free_list = header;
header = (alloc_pool_list) USER_PTR_FROM_ALLOCATION_OBJECT_PTR (block);
header->next = pool->free_list;
pool->free_list = header;
}
/* Also update the number of elements we have free/allocated, and
increment the allocated block count. */
increment the allocated block count. */
pool->elts_allocated += pool->elts_per_block;
pool->elts_free += pool->elts_per_block;
pool->blocks_allocated += 1;

View File

@ -77,21 +77,21 @@ init_attributes (void)
/* The name must not begin and end with __. */
const char *name = attribute_tables[i][j].name;
int len = strlen (name);
gcc_assert (!(name[0] == '_' && name[1] == '_'
&& name[len - 1] == '_' && name[len - 2] == '_'));
/* The minimum and maximum lengths must be consistent. */
gcc_assert (attribute_tables[i][j].min_length >= 0);
gcc_assert (attribute_tables[i][j].max_length == -1
|| (attribute_tables[i][j].max_length
>= attribute_tables[i][j].min_length));
/* An attribute cannot require both a DECL and a TYPE. */
gcc_assert (!attribute_tables[i][j].decl_required
|| !attribute_tables[i][j].type_required);
/* If an attribute requires a function type, in particular
it requires a type. */
gcc_assert (!attribute_tables[i][j].function_type_required
@ -227,8 +227,8 @@ decl_attributes (tree *node, tree attributes, int flags)
pull out the target type now, frob it as appropriate, and
rebuild the pointer type later.
This would all be simpler if attributes were part of the
declarator, grumble grumble. */
This would all be simpler if attributes were part of the
declarator, grumble grumble. */
fn_ptr_tmp = TREE_TYPE (*anode);
anode = &fn_ptr_tmp;
flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE;

View File

@ -516,7 +516,7 @@ extern void brief_dump_cfg (FILE *);
extern void clear_edges (void);
extern rtx first_insn_after_basic_block_note (basic_block);
extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
gcov_type);
/* Structure to group all of the information to process IF-THEN and
@ -765,7 +765,7 @@ ei_cond (edge_iterator ei, edge *p)
an element might be removed during the traversal, otherwise
elements will be missed. Instead, use a for-loop like that shown
in the following pseudo-code:
FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
IF (e != taken_edge)
@ -970,7 +970,7 @@ extern void calculate_dominance_info (enum cdi_direction);
extern void free_dominance_info (enum cdi_direction);
extern basic_block nearest_common_dominator (enum cdi_direction,
basic_block, basic_block);
extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
bitmap);
extern void set_immediate_dominator (enum cdi_direction, basic_block,
basic_block);

View File

@ -95,7 +95,7 @@
#define N_ROUNDS 5
/* Stubs in case we don't have a return insn.
We have to check at runtime too, not only compiletime. */
We have to check at runtime too, not only compiletime. */
#ifndef HAVE_return
#define HAVE_return 0
@ -178,7 +178,7 @@ static void connect_traces (int, struct trace *);
static bool copy_bb_p (basic_block, int);
static int get_uncond_jump_length (void);
static bool push_to_next_round_p (basic_block, int, int, int, gcov_type);
static void find_rarely_executed_basic_blocks_and_crossing_edges (edge *,
static void find_rarely_executed_basic_blocks_and_crossing_edges (edge *,
int *,
int *);
static void add_labels_and_missing_jumps (edge *, int);
@ -205,14 +205,14 @@ push_to_next_round_p (basic_block bb, int round, int number_of_rounds,
there_exists_another_round = round < number_of_rounds - 1;
block_not_hot_enough = (bb->frequency < exec_th
block_not_hot_enough = (bb->frequency < exec_th
|| bb->count < count_th
|| probably_never_executed_bb_p (bb));
if (there_exists_another_round
&& block_not_hot_enough)
return true;
else
else
return false;
}
@ -387,7 +387,7 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
/* Duplicate HEADER if it is a small block containing cond jump
in the end. */
if (any_condjump_p (BB_END (header)) && copy_bb_p (header, 0)
&& !find_reg_note (BB_END (header), REG_CROSSING_JUMP,
&& !find_reg_note (BB_END (header), REG_CROSSING_JUMP,
NULL_RTX))
copy_bb (header, single_succ_edge (prev_bb), prev_bb, trace_n);
}
@ -449,11 +449,11 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
fprintf (dump_file, "Getting bb %d\n", bb->index);
/* If the BB's frequency is too low send BB to the next round. When
partitioning hot/cold blocks into separate sections, make sure all
the cold blocks (and ONLY the cold blocks) go into the (extra) final
round. */
partitioning hot/cold blocks into separate sections, make sure all
the cold blocks (and ONLY the cold blocks) go into the (extra) final
round. */
if (push_to_next_round_p (bb, round, number_of_rounds, exec_th,
if (push_to_next_round_p (bb, round, number_of_rounds, exec_th,
count_th))
{
int key = bb_to_key (bb);
@ -491,7 +491,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
fprintf (dump_file, "Basic block %d was visited in trace %d\n",
bb->index, *n_traces - 1);
ends_in_call = block_ends_with_call_p (bb);
ends_in_call = block_ends_with_call_p (bb);
/* Select the successor that will be placed after BB. */
FOR_EACH_EDGE (e, ei, bb->succs)
@ -593,7 +593,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
the cold blocks (and only the cold blocks) all get
pushed to the last round of trace collection. */
if (push_to_next_round_p (e->dest, round,
if (push_to_next_round_p (e->dest, round,
number_of_rounds,
exec_th, count_th))
which_heap = new_heap;
@ -637,8 +637,8 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
best_edge->dest->index, bb->index);
}
bb->aux = best_edge->dest;
bbd[best_edge->dest->index].in_trace =
(*n_traces) - 1;
bbd[best_edge->dest->index].in_trace =
(*n_traces) - 1;
bb = rotate_loop (best_edge, trace, *n_traces);
}
}
@ -882,8 +882,8 @@ better_edge_p (basic_block bb, edge e, int prob, int freq, int best_prob,
non-crossing edges over crossing edges. */
if (!is_better_edge
&& flag_reorder_blocks_and_partition
&& cur_best_edge
&& flag_reorder_blocks_and_partition
&& cur_best_edge
&& (cur_best_edge->flags & EDGE_CROSSING)
&& !(e->flags & EDGE_CROSSING))
is_better_edge = true;
@ -919,7 +919,7 @@ connect_traces (int n_traces, struct trace *traces)
if (flag_reorder_blocks_and_partition)
for (i = 0; i < n_traces && !two_passes; i++)
if (BB_PARTITION (traces[0].first)
if (BB_PARTITION (traces[0].first)
!= BB_PARTITION (traces[i].first))
two_passes = true;
@ -941,11 +941,11 @@ connect_traces (int n_traces, struct trace *traces)
else
current_partition = BB_HOT_PARTITION;
}
if (connected[t])
continue;
if (two_passes
if (two_passes
&& BB_PARTITION (traces[t].first) != current_partition)
continue;
@ -1217,8 +1217,8 @@ get_uncond_jump_length (void)
cache locality). */
static void
find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
int *n_crossing_edges,
find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
int *n_crossing_edges,
int *max_idx)
{
basic_block bb;
@ -1228,7 +1228,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
edge_iterator ei;
/* Mark which partition (hot/cold) each basic block belongs in. */
FOR_EACH_BB (bb)
{
if (probably_never_executed_bb_p (bb))
@ -1269,7 +1269,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
Convert any fall-through crossing edges (for blocks that do not contain
a jump) to unconditional jumps. */
static void
static void
add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
{
int i;
@ -1278,34 +1278,34 @@ add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
rtx label;
rtx barrier;
rtx new_jump;
for (i=0; i < n_crossing_edges; i++)
for (i=0; i < n_crossing_edges; i++)
{
if (crossing_edges[i])
{
src = crossing_edges[i]->src;
dest = crossing_edges[i]->dest;
/* Make sure dest has a label. */
if (dest && (dest != EXIT_BLOCK_PTR))
{
if (crossing_edges[i])
{
src = crossing_edges[i]->src;
dest = crossing_edges[i]->dest;
/* Make sure dest has a label. */
if (dest && (dest != EXIT_BLOCK_PTR))
{
label = block_label (dest);
/* Make sure source block ends with a jump. */
if (src && (src != ENTRY_BLOCK_PTR))
{
/* Make sure source block ends with a jump. */
if (src && (src != ENTRY_BLOCK_PTR))
{
if (!JUMP_P (BB_END (src)))
/* bb just falls through. */
{
/* make sure there's only one successor */
/* bb just falls through. */
{
/* make sure there's only one successor */
gcc_assert (single_succ_p (src));
/* Find label in dest block. */
label = block_label (dest);
new_jump = emit_jump_insn_after (gen_jump (label),
new_jump = emit_jump_insn_after (gen_jump (label),
BB_END (src));
barrier = emit_barrier_after (new_jump);
JUMP_LABEL (new_jump) = label;
@ -1313,10 +1313,10 @@ add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
src->il.rtl->footer = unlink_insn_chain (barrier, barrier);
/* Mark edge as non-fallthru. */
crossing_edges[i]->flags &= ~EDGE_FALLTHRU;
} /* end: 'if (GET_CODE ... ' */
} /* end: 'if (src && src->index...' */
} /* end: 'if (dest && dest->index...' */
} /* end: 'if (crossing_edges[i]...' */
} /* end: 'if (GET_CODE ... ' */
} /* end: 'if (src && src->index...' */
} /* end: 'if (dest && dest->index...' */
} /* end: 'if (crossing_edges[i]...' */
} /* end for loop */
}
@ -1329,7 +1329,7 @@ add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
unconditional jump (crossing edge) to the original fall through
destination. */
static void
static void
fix_up_fall_thru_edges (void)
{
basic_block cur_bb;
@ -1344,7 +1344,7 @@ fix_up_fall_thru_edges (void)
rtx old_jump;
rtx fall_thru_label;
rtx barrier;
FOR_EACH_BB (cur_bb)
{
fall_thru = NULL;
@ -1354,114 +1354,114 @@ fix_up_fall_thru_edges (void)
succ1 = NULL;
if (EDGE_COUNT (cur_bb->succs) > 1)
succ2 = EDGE_SUCC (cur_bb, 1);
succ2 = EDGE_SUCC (cur_bb, 1);
else
succ2 = NULL;
succ2 = NULL;
/* Find the fall-through edge. */
if (succ1
&& (succ1->flags & EDGE_FALLTHRU))
{
fall_thru = succ1;
cond_jump = succ2;
}
else if (succ2
&& (succ2->flags & EDGE_FALLTHRU))
{
fall_thru = succ2;
cond_jump = succ1;
}
if (succ1
&& (succ1->flags & EDGE_FALLTHRU))
{
fall_thru = succ1;
cond_jump = succ2;
}
else if (succ2
&& (succ2->flags & EDGE_FALLTHRU))
{
fall_thru = succ2;
cond_jump = succ1;
}
if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR))
{
/* Check to see if the fall-thru edge is a crossing edge. */
{
/* Check to see if the fall-thru edge is a crossing edge. */
if (fall_thru->flags & EDGE_CROSSING)
{
{
/* The fall_thru edge crosses; now check the cond jump edge, if
it exists. */
cond_jump_crosses = true;
invert_worked = 0;
it exists. */
cond_jump_crosses = true;
invert_worked = 0;
old_jump = BB_END (cur_bb);
/* Find the jump instruction, if there is one. */
if (cond_jump)
{
/* Find the jump instruction, if there is one. */
if (cond_jump)
{
if (!(cond_jump->flags & EDGE_CROSSING))
cond_jump_crosses = false;
/* We know the fall-thru edge crosses; if the cond
jump edge does NOT cross, and its destination is the
cond_jump_crosses = false;
/* We know the fall-thru edge crosses; if the cond
jump edge does NOT cross, and its destination is the
next block in the bb order, invert the jump
(i.e. fix it so the fall thru does not cross and
the cond jump does). */
(i.e. fix it so the fall thru does not cross and
the cond jump does). */
if (!cond_jump_crosses
&& cur_bb->aux == cond_jump->dest)
{
/* Find label in fall_thru block. We've already added
any missing labels, so there must be one. */
fall_thru_label = block_label (fall_thru->dest);
{
/* Find label in fall_thru block. We've already added
any missing labels, so there must be one. */
if (old_jump && fall_thru_label)
invert_worked = invert_jump (old_jump,
fall_thru_label,0);
if (invert_worked)
{
fall_thru->flags &= ~EDGE_FALLTHRU;
cond_jump->flags |= EDGE_FALLTHRU;
update_br_prob_note (cur_bb);
e = fall_thru;
fall_thru = cond_jump;
cond_jump = e;
fall_thru_label = block_label (fall_thru->dest);
if (old_jump && fall_thru_label)
invert_worked = invert_jump (old_jump,
fall_thru_label,0);
if (invert_worked)
{
fall_thru->flags &= ~EDGE_FALLTHRU;
cond_jump->flags |= EDGE_FALLTHRU;
update_br_prob_note (cur_bb);
e = fall_thru;
fall_thru = cond_jump;
cond_jump = e;
cond_jump->flags |= EDGE_CROSSING;
fall_thru->flags &= ~EDGE_CROSSING;
}
}
}
if (cond_jump_crosses || !invert_worked)
{
/* This is the case where both edges out of the basic
block are crossing edges. Here we will fix up the
}
}
}
if (cond_jump_crosses || !invert_worked)
{
/* This is the case where both edges out of the basic
block are crossing edges. Here we will fix up the
fall through edge. The jump edge will be taken care
of later. */
new_bb = force_nonfallthru (fall_thru);
if (new_bb)
{
new_bb->aux = cur_bb->aux;
cur_bb->aux = new_bb;
/* Make sure new fall-through bb is in same
new_bb = force_nonfallthru (fall_thru);
if (new_bb)
{
new_bb->aux = cur_bb->aux;
cur_bb->aux = new_bb;
/* Make sure new fall-through bb is in same
partition as bb it's falling through from. */
BB_COPY_PARTITION (new_bb, cur_bb);
single_succ_edge (new_bb)->flags |= EDGE_CROSSING;
}
/* Add barrier after new jump */
if (new_bb)
{
barrier = emit_barrier_after (BB_END (new_bb));
new_bb->il.rtl->footer = unlink_insn_chain (barrier,
barrier);
}
else
{
barrier = emit_barrier_after (BB_END (cur_bb));
cur_bb->il.rtl->footer = unlink_insn_chain (barrier,
barrier);
}
}
}
}
}
/* Add barrier after new jump */
if (new_bb)
{
barrier = emit_barrier_after (BB_END (new_bb));
new_bb->il.rtl->footer = unlink_insn_chain (barrier,
barrier);
}
else
{
barrier = emit_barrier_after (BB_END (cur_bb));
cur_bb->il.rtl->footer = unlink_insn_chain (barrier,
barrier);
}
}
}
}
}
}
@ -1472,9 +1472,9 @@ fix_up_fall_thru_edges (void)
contain unconditional jumps to the same destination). */
static basic_block
find_jump_block (basic_block jump_dest)
{
basic_block source_bb = NULL;
find_jump_block (basic_block jump_dest)
{
basic_block source_bb = NULL;
edge e;
rtx insn;
edge_iterator ei;
@ -1483,13 +1483,13 @@ find_jump_block (basic_block jump_dest)
if (e->flags & EDGE_CROSSING)
{
basic_block src = e->src;
/* Check each predecessor to see if it has a label, and contains
only one executable instruction, which is an unconditional jump.
If so, we can use it. */
if (LABEL_P (BB_HEAD (src)))
for (insn = BB_HEAD (src);
for (insn = BB_HEAD (src);
!INSN_P (insn) && insn != NEXT_INSN (BB_END (src));
insn = NEXT_INSN (insn))
{
@ -1502,7 +1502,7 @@ find_jump_block (basic_block jump_dest)
break;
}
}
if (source_bb)
break;
}
@ -1536,7 +1536,7 @@ fix_crossing_conditional_branches (void)
rtx barrier;
last_bb = EXIT_BLOCK_PTR->prev_bb;
FOR_EACH_BB (cur_bb)
{
crossing_edge = NULL;
@ -1544,27 +1544,27 @@ fix_crossing_conditional_branches (void)
succ1 = EDGE_SUCC (cur_bb, 0);
else
succ1 = NULL;
if (EDGE_COUNT (cur_bb->succs) > 1)
succ2 = EDGE_SUCC (cur_bb, 1);
else
succ2 = NULL;
/* We already took care of fall-through edges, so only one successor
can be a crossing edge. */
if (succ1 && (succ1->flags & EDGE_CROSSING))
crossing_edge = succ1;
else if (succ2 && (succ2->flags & EDGE_CROSSING))
crossing_edge = succ2;
if (crossing_edge)
{
crossing_edge = succ2;
if (crossing_edge)
{
old_jump = BB_END (cur_bb);
/* Check to make sure the jump instruction is a
conditional jump. */
set_src = NULL_RTX;
if (any_condjump_p (old_jump))
@ -1587,89 +1587,89 @@ fix_crossing_conditional_branches (void)
old_label = XEXP (set_src, 2);
else if (GET_CODE (XEXP (set_src, 2)) == PC)
old_label = XEXP (set_src, 1);
/* Check to see if new bb for jumping to that dest has
already been created; if so, use it; if not, create
a new one. */
new_bb = find_jump_block (crossing_edge->dest);
if (new_bb)
new_label = block_label (new_bb);
else
{
/* Create new basic block to be dest for
conditional jump. */
new_bb = create_basic_block (NULL, NULL, last_bb);
new_bb->aux = last_bb->aux;
last_bb->aux = new_bb;
prev_bb = last_bb;
last_bb = new_bb;
/* Update register liveness information. */
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
prev_bb->il.rtl->global_live_at_end);
COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
prev_bb->il.rtl->global_live_at_end);
/* Put appropriate instructions in new bb. */
new_label = gen_label_rtx ();
emit_label_before (new_label, BB_HEAD (new_bb));
BB_HEAD (new_bb) = new_label;
if (GET_CODE (old_label) == LABEL_REF)
{
old_label = JUMP_LABEL (old_jump);
new_jump = emit_jump_insn_after (gen_jump
(old_label),
new_jump = emit_jump_insn_after (gen_jump
(old_label),
BB_END (new_bb));
}
else
{
gcc_assert (HAVE_return
&& GET_CODE (old_label) == RETURN);
new_jump = emit_jump_insn_after (gen_return (),
new_jump = emit_jump_insn_after (gen_return (),
BB_END (new_bb));
}
barrier = emit_barrier_after (new_jump);
JUMP_LABEL (new_jump) = old_label;
new_bb->il.rtl->footer = unlink_insn_chain (barrier,
new_bb->il.rtl->footer = unlink_insn_chain (barrier,
barrier);
/* Make sure new bb is in same partition as source
of conditional branch. */
BB_COPY_PARTITION (new_bb, cur_bb);
}
/* Make old jump branch to new bb. */
redirect_jump (old_jump, new_label, 0);
/* Remove crossing_edge as predecessor of 'dest'. */
dest = crossing_edge->dest;
redirect_edge_succ (crossing_edge, new_bb);
/* Make a new edge from new_bb to old dest; new edge
will be a successor for new_bb and a predecessor
for 'dest'. */
if (EDGE_COUNT (new_bb->succs) == 0)
new_edge = make_edge (new_bb, dest, 0);
else
new_edge = EDGE_SUCC (new_bb, 0);
crossing_edge->flags &= ~EDGE_CROSSING;
new_edge->flags |= EDGE_CROSSING;
}
}
}
}
}
@ -1699,7 +1699,7 @@ fix_crossing_unconditional_branches (void)
succ = EDGE_SUCC (cur_bb, 0);
/* Check to see if bb ends in a crossing (unconditional) jump. At
this point, no crossing jumps should be conditional. */
this point, no crossing jumps should be conditional. */
if (JUMP_P (last_insn)
&& (succ->flags & EDGE_CROSSING))
@ -1716,26 +1716,26 @@ fix_crossing_unconditional_branches (void)
/* We have found a "crossing" unconditional branch. Now
we must convert it to an indirect jump. First create
reference of label, as target for jump. */
label = JUMP_LABEL (last_insn);
label_addr = gen_rtx_LABEL_REF (Pmode, label);
LABEL_NUSES (label) += 1;
/* Get a register to use for the indirect jump. */
new_reg = gen_reg_rtx (Pmode);
/* Generate indirect the jump sequence. */
start_sequence ();
emit_move_insn (new_reg, label_addr);
emit_indirect_jump (new_reg);
indirect_jump_sequence = get_insns ();
end_sequence ();
/* Make sure every instruction in the new jump sequence has
its basic block set to be cur_bb. */
for (cur_insn = indirect_jump_sequence; cur_insn;
cur_insn = NEXT_INSN (cur_insn))
{
@ -1744,16 +1744,16 @@ fix_crossing_unconditional_branches (void)
if (JUMP_P (cur_insn))
jump_insn = cur_insn;
}
/* Insert the new (indirect) jump sequence immediately before
the unconditional jump, then delete the unconditional jump. */
emit_insn_before (indirect_jump_sequence, last_insn);
delete_insn (last_insn);
/* Make BB_END for cur_bb be the jump instruction (NOT the
barrier instruction at the end of the sequence...). */
BB_END (cur_bb) = jump_insn;
}
}
@ -1773,9 +1773,9 @@ add_reg_crossing_jump_notes (void)
FOR_EACH_EDGE (e, ei, bb->succs)
if ((e->flags & EDGE_CROSSING)
&& JUMP_P (BB_END (e->src)))
REG_NOTES (BB_END (e->src)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
NULL_RTX,
REG_NOTES (BB_END
REG_NOTES (BB_END (e->src)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
NULL_RTX,
REG_NOTES (BB_END
(e->src)));
}
@ -1793,16 +1793,16 @@ add_reg_crossing_jump_notes (void)
sections are converted to indirect jumps.
The code for fixing up fall_thru edges that cross between hot and
cold basic blocks does so by creating new basic blocks containing
unconditional branches to the appropriate label in the "other"
cold basic blocks does so by creating new basic blocks containing
unconditional branches to the appropriate label in the "other"
section. The new basic block is then put in the same (hot or cold)
section as the original conditional branch, and the fall_thru edge
is modified to fall into the new basic block instead. By adding
this level of indirection we end up with only unconditional branches
crossing between hot and cold sections.
crossing between hot and cold sections.
Conditional branches are dealt with by adding a level of indirection.
A new basic block is added in the same (hot/cold) section as the
A new basic block is added in the same (hot/cold) section as the
conditional branch, and the conditional branch is retargeted to the
new basic block. The new basic block contains an unconditional branch
to the original target of the conditional branch (in the other section).
@ -1810,40 +1810,40 @@ add_reg_crossing_jump_notes (void)
Unconditional branches are dealt with by converting them into
indirect jumps. */
static void
fix_edges_for_rarely_executed_code (edge *crossing_edges,
static void
fix_edges_for_rarely_executed_code (edge *crossing_edges,
int n_crossing_edges)
{
/* Make sure the source of any crossing edge ends in a jump and the
destination of any crossing edge has a label. */
add_labels_and_missing_jumps (crossing_edges, n_crossing_edges);
/* Convert all crossing fall_thru edges to non-crossing fall
thrus to unconditional jumps (that jump to the original fall
thru dest). */
fix_up_fall_thru_edges ();
/* If the architecture does not have conditional branches that can
span all of memory, convert crossing conditional branches into
crossing unconditional branches. */
if (!HAS_LONG_COND_BRANCH)
fix_crossing_conditional_branches ();
/* If the architecture does not have unconditional branches that
can span all of memory, convert crossing unconditional branches
into indirect jumps. Since adding an indirect jump also adds
a new register usage, update the register usage information as
well. */
if (!HAS_LONG_UNCOND_BRANCH)
{
fix_crossing_unconditional_branches ();
reg_scan (get_insns(), max_reg_num ());
}
add_reg_crossing_jump_notes ();
}
@ -1860,7 +1860,7 @@ verify_hot_cold_block_grouping (void)
int err = 0;
bool switched_sections = false;
int current_partition = 0;
FOR_EACH_BB (bb)
{
if (!current_partition)
@ -1880,7 +1880,7 @@ verify_hot_cold_block_grouping (void)
}
}
}
gcc_assert(!err);
}
@ -1951,7 +1951,7 @@ insert_section_boundary_note (void)
basic_block bb;
rtx new_note;
int first_partition = 0;
if (flag_reorder_blocks_and_partition)
FOR_EACH_BB (bb)
{
@ -2061,7 +2061,7 @@ duplicate_computed_gotos (void)
bb->il.rtl->visited = 1;
/* BB must have one outgoing edge. That edge must not lead to
the exit block or the next block.
the exit block or the next block.
The destination must have more than one predecessor. */
if (!single_succ_p (bb)
|| single_succ (bb) == EXIT_BLOCK_PTR
@ -2169,26 +2169,26 @@ partition_hot_cold_basic_blocks (void)
edge *crossing_edges;
int n_crossing_edges;
int max_edges = 2 * last_basic_block;
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return;
crossing_edges = XCNEWVEC (edge, max_edges);
cfg_layout_initialize (0);
FOR_EACH_BB (cur_bb)
if (cur_bb->index >= NUM_FIXED_BLOCKS
&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
cur_bb->aux = cur_bb->next_bb;
find_rarely_executed_basic_blocks_and_crossing_edges (crossing_edges,
&n_crossing_edges,
find_rarely_executed_basic_blocks_and_crossing_edges (crossing_edges,
&n_crossing_edges,
&max_edges);
if (n_crossing_edges > 0)
fix_edges_for_rarely_executed_code (crossing_edges, n_crossing_edges);
free (crossing_edges);
cfg_layout_finalize();
@ -2231,7 +2231,7 @@ rest_of_handle_reorder_blocks (void)
but should not be terribly bad. */
if (changed && HAVE_conditional_execution)
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES);
PROP_DEATH_NOTES);
/* Add NOTE_INSN_SWITCH_TEXT_SECTIONS notes. */
insert_section_boundary_note ();
@ -2264,8 +2264,8 @@ gate_handle_partition_blocks (void)
arises. */
return (flag_reorder_blocks_and_partition
&& !DECL_ONE_ONLY (current_function_decl)
&& !user_defined_section_attribute);
&& !DECL_ONE_ONLY (current_function_decl)
&& !user_defined_section_attribute);
}
/* Partition hot and cold basic blocks. */
@ -2276,7 +2276,7 @@ rest_of_handle_partition_blocks (void)
partition_hot_cold_basic_blocks ();
allocate_reg_life_data ();
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
no_new_pseudos = 1;
return 0;
}

View File

@ -50,7 +50,7 @@ static inline void
bitmap_elem_to_freelist (bitmap head, bitmap_element *elt)
{
bitmap_obstack *bit_obstack = head->obstack;
elt->next = NULL;
if (bit_obstack)
{
@ -89,7 +89,7 @@ bitmap_element_free (bitmap head, bitmap_element *elt)
head->current = next != 0 ? next : prev;
if (head->current)
head->indx = head->current->indx;
else
else
head->indx = 0;
}
bitmap_elem_to_freelist (head, elt);
@ -102,11 +102,11 @@ bitmap_element_allocate (bitmap head)
{
bitmap_element *element;
bitmap_obstack *bit_obstack = head->obstack;
if (bit_obstack)
{
element = bit_obstack->elements;
if (element)
/* Use up the inner list first before looking at the next
element of the outer list. */
@ -163,7 +163,7 @@ bitmap_elt_clear_from (bitmap head, bitmap_element *elt)
head->current = prev;
head->indx = prev->indx;
}
}
}
else
{
head->first = NULL;
@ -171,10 +171,10 @@ bitmap_elt_clear_from (bitmap head, bitmap_element *elt)
head->indx = 0;
}
/* Put the entire list onto the free list in one operation. */
/* Put the entire list onto the free list in one operation. */
if (bit_obstack)
{
elt->prev = bit_obstack->elements;
elt->prev = bit_obstack->elements;
bit_obstack->elements = elt;
}
else
@ -222,7 +222,7 @@ bitmap_obstack_release (bitmap_obstack *bit_obstack)
{
if (!bit_obstack)
bit_obstack = &bitmap_default_obstack;
bit_obstack->elements = NULL;
bit_obstack->heads = NULL;
obstack_free (&bit_obstack->obstack, NULL);
@ -529,7 +529,7 @@ bitmap_bit_p (bitmap head, int bit)
#if GCC_VERSION < 3400
/* Table of number of set bits in a character, indexed by value of char. */
static unsigned char popcount_table[] =
static unsigned char popcount_table[] =
{
0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,
1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
@ -571,13 +571,13 @@ bitmap_count_bits (bitmap a)
of BITMAP_WORD is not material. */
count += __builtin_popcountl (elt->bits[ix]);
#else
count += bitmap_popcount (elt->bits[ix]);
count += bitmap_popcount (elt->bits[ix]);
#endif
}
}
return count;
}
/* Return the bit number of the first set bit in the bitmap. The
@ -590,7 +590,7 @@ bitmap_first_set_bit (bitmap a)
unsigned bit_no;
BITMAP_WORD word;
unsigned ix;
gcc_assert (elt);
bit_no = elt->indx * BITMAP_ELEMENT_ALL_BITS;
for (ix = 0; ix != BITMAP_ELEMENT_WORDS; ix++)
@ -625,7 +625,7 @@ bitmap_first_set_bit (bitmap a)
word >>= 2, bit_no += 2;
if (!(word & 0x1))
word >>= 1, bit_no += 1;
gcc_assert (word & 1);
#endif
return bit_no;
@ -664,7 +664,7 @@ bitmap_and (bitmap dst, bitmap a, bitmap b)
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
@ -697,7 +697,7 @@ bitmap_and_into (bitmap a, bitmap b)
bitmap_element *b_elt = b->first;
bitmap_element *next;
if (a == b)
if (a == b)
return;
while (a_elt && b_elt)
@ -746,7 +746,7 @@ bitmap_and_compl (bitmap dst, bitmap a, bitmap b)
bitmap_element *dst_prev = NULL;
gcc_assert (dst != a && dst != b);
if (a == b)
{
bitmap_clear (dst);
@ -777,7 +777,7 @@ bitmap_and_compl (bitmap dst, bitmap a, bitmap b)
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
@ -869,7 +869,7 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
to the result. If the current is less than first index, find the
next one. Otherwise, just set elt to be current. */
if (!elt)
{
{
if (head->current)
{
if (head->indx < first_index)
@ -878,7 +878,7 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
if (!elt)
return;
}
else
else
elt = head->current;
}
else
@ -895,11 +895,11 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
if (elt_start_bit >= start && elt_end_bit_plus1 <= end_bit_plus1)
/* Get rid of the entire elt and go to the next one. */
bitmap_element_free (head, elt);
else
else
{
/* Going to have to knock out some bits in this elt. */
unsigned int first_word_to_mod;
BITMAP_WORD first_mask;
unsigned int first_word_to_mod;
BITMAP_WORD first_mask;
unsigned int last_word_to_mod;
BITMAP_WORD last_mask;
unsigned int i;
@ -912,7 +912,7 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
first_word_to_mod = (start - elt_start_bit) / BITMAP_WORD_BITS;
/* This mask should have 1s in all bits >= start position. */
first_mask =
first_mask =
(((BITMAP_WORD) 1) << ((start % BITMAP_WORD_BITS))) - 1;
first_mask = ~first_mask;
}
@ -922,8 +922,8 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
first_word_to_mod = 0;
first_mask = 0;
first_mask = ~first_mask;
}
}
if (elt_end_bit_plus1 <= end_bit_plus1)
{
/* The last bit to turn off is beyond this elt. */
@ -934,11 +934,11 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
else
{
/* The last bit to turn off is inside to this elt. */
last_word_to_mod =
last_word_to_mod =
(end_bit_plus1 - elt_start_bit) / BITMAP_WORD_BITS;
/* The last mask should have 1s below the end bit. */
last_mask =
last_mask =
(((BITMAP_WORD) 1) << (((end_bit_plus1) % BITMAP_WORD_BITS))) - 1;
}
@ -967,7 +967,7 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
}
elt = next_elt;
}
if (elt)
{
head->current = elt;
@ -1053,7 +1053,7 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
bitmap_element *a_elt = a->first;
bitmap_element *b_elt = b->first;
bitmap_element *dst_prev = NULL;
bool changed = false;
bool changed = false;
gcc_assert (dst != a && dst != b);
@ -1063,7 +1063,7 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
{
/* Matching elts, generate A | B. */
unsigned ix;
if (!changed && dst_elt && dst_elt->indx == a_elt->indx)
{
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
@ -1082,12 +1082,12 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
dst_elt->bits[ix] = r;
}
}
@ -1115,7 +1115,7 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
if (!changed && dst_elt && dst_elt->indx == src->indx)
{
unsigned ix;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
if (src->bits[ix] != dst_elt->bits[ix])
{
@ -1128,11 +1128,11 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, src->indx);
else
else
dst_elt->indx = src->indx;
memcpy (dst_elt->bits, src->bits, sizeof (dst_elt->bits));
}
dst_prev = dst_elt;
dst_elt = dst_elt->next;
}
@ -1187,7 +1187,7 @@ bitmap_ior_into (bitmap a, bitmap b)
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
a_elt->bits[ix] = r;
}
else
@ -1274,7 +1274,7 @@ bitmap_xor (bitmap dst, bitmap a, bitmap b)
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, src->indx);
else
else
dst_elt->indx = src->indx;
memcpy (dst_elt->bits, src->bits, sizeof (dst_elt->bits));
dst_prev = dst_elt;
@ -1354,7 +1354,7 @@ bitmap_equal_p (bitmap a, bitmap b)
bitmap_element *a_elt;
bitmap_element *b_elt;
unsigned ix;
for (a_elt = a->first, b_elt = b->first;
a_elt && b_elt;
a_elt = a_elt->next, b_elt = b_elt->next)
@ -1376,7 +1376,7 @@ bitmap_intersect_p (bitmap a, bitmap b)
bitmap_element *a_elt;
bitmap_element *b_elt;
unsigned ix;
for (a_elt = a->first, b_elt = b->first;
a_elt && b_elt;)
{
@ -1447,7 +1447,7 @@ bitmap_ior_and_compl_into (bitmap a, bitmap from1, bitmap from2)
{
bitmap_head tmp;
bool changed;
bitmap_initialize (&tmp, &bitmap_default_obstack);
bitmap_and_compl (&tmp, from1, from2);
changed = bitmap_ior_into (a, &tmp);

View File

@ -49,7 +49,7 @@ typedef struct bitmap_obstack GTY (())
/* Bitmap set element. We use a linked list to hold only the bits that
are set. This allows for use to grow the bitset dynamically without
having to realloc and copy a giant bit array.
having to realloc and copy a giant bit array.
The free list is implemented as a list of lists. There is one
outer list connected together by prev fields. Each element of that
@ -172,7 +172,7 @@ extern unsigned bitmap_first_set_bit (bitmap);
/* Do any cleanup needed on a bitmap when it is no longer used. */
#define BITMAP_FREE(BITMAP) \
((void)(bitmap_obstack_free (BITMAP), (BITMAP) = NULL))
((void)(bitmap_obstack_free (BITMAP), (BITMAP) = NULL))
/* Iterator for bitmaps. */
@ -180,13 +180,13 @@ typedef struct
{
/* Pointer to the current bitmap element. */
bitmap_element *elt1;
/* Pointer to 2nd bitmap element when two are involved. */
bitmap_element *elt2;
/* Word within the current element. */
unsigned word_no;
/* Contents of the actually processed word. When finding next bit
it is shifted right, so that the actual bit is always the least
significant bit of ACTUAL. */
@ -211,7 +211,7 @@ bmp_iter_set_init (bitmap_iterator *bi, bitmap map,
bi->elt1 = &bitmap_zero_bits;
break;
}
if (bi->elt1->indx >= start_bit / BITMAP_ELEMENT_ALL_BITS)
break;
bi->elt1 = bi->elt1->next;
@ -220,7 +220,7 @@ bmp_iter_set_init (bitmap_iterator *bi, bitmap map,
/* We might have gone past the start bit, so reinitialize it. */
if (bi->elt1->indx != start_bit / BITMAP_ELEMENT_ALL_BITS)
start_bit = bi->elt1->indx * BITMAP_ELEMENT_ALL_BITS;
/* Initialize for what is now start_bit. */
bi->word_no = start_bit / BITMAP_WORD_BITS % BITMAP_ELEMENT_WORDS;
bi->bits = bi->elt1->bits[bi->word_no];
@ -231,7 +231,7 @@ bmp_iter_set_init (bitmap_iterator *bi, bitmap map,
will fail. It won't matter if this increment moves us into the
next word. */
start_bit += !bi->bits;
*bit_no = start_bit;
}
@ -254,12 +254,12 @@ bmp_iter_and_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
bi->elt2 = NULL;
break;
}
if (bi->elt1->indx >= start_bit / BITMAP_ELEMENT_ALL_BITS)
break;
bi->elt1 = bi->elt1->next;
}
/* Advance elt2 until it is not before elt1. */
while (1)
{
@ -268,7 +268,7 @@ bmp_iter_and_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
bi->elt1 = bi->elt2 = &bitmap_zero_bits;
break;
}
if (bi->elt2->indx >= bi->elt1->indx)
break;
bi->elt2 = bi->elt2->next;
@ -278,10 +278,10 @@ bmp_iter_and_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
if (bi->elt1->indx == bi->elt2->indx)
{
/* We might have advanced beyond the start_bit, so reinitialize
for that. */
for that. */
if (bi->elt1->indx != start_bit / BITMAP_ELEMENT_ALL_BITS)
start_bit = bi->elt1->indx * BITMAP_ELEMENT_ALL_BITS;
bi->word_no = start_bit / BITMAP_WORD_BITS % BITMAP_ELEMENT_WORDS;
bi->bits = bi->elt1->bits[bi->word_no] & bi->elt2->bits[bi->word_no];
bi->bits >>= start_bit % BITMAP_WORD_BITS;
@ -293,13 +293,13 @@ bmp_iter_and_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
bi->word_no = BITMAP_ELEMENT_WORDS - 1;
bi->bits = 0;
}
/* If this word is zero, we must make sure we're not pointing at the
first bit, otherwise our incrementing to the next word boundary
will fail. It won't matter if this increment moves us into the
next word. */
start_bit += !bi->bits;
*bit_no = start_bit;
}
@ -321,7 +321,7 @@ bmp_iter_and_compl_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
bi->elt1 = &bitmap_zero_bits;
break;
}
if (bi->elt1->indx >= start_bit / BITMAP_ELEMENT_ALL_BITS)
break;
bi->elt1 = bi->elt1->next;
@ -335,19 +335,19 @@ bmp_iter_and_compl_init (bitmap_iterator *bi, bitmap map1, bitmap map2,
that. */
if (bi->elt1->indx != start_bit / BITMAP_ELEMENT_ALL_BITS)
start_bit = bi->elt1->indx * BITMAP_ELEMENT_ALL_BITS;
bi->word_no = start_bit / BITMAP_WORD_BITS % BITMAP_ELEMENT_WORDS;
bi->bits = bi->elt1->bits[bi->word_no];
if (bi->elt2 && bi->elt1->indx == bi->elt2->indx)
bi->bits &= ~bi->elt2->bits[bi->word_no];
bi->bits >>= start_bit % BITMAP_WORD_BITS;
/* If this word is zero, we must make sure we're not pointing at the
first bit, otherwise our incrementing to the next word boundary
will fail. It won't matter if this increment moves us into the
next word. */
start_bit += !bi->bits;
*bit_no = start_bit;
}
@ -398,7 +398,7 @@ bmp_iter_set (bitmap_iterator *bi, unsigned *bit_no)
*bit_no += BITMAP_WORD_BITS;
bi->word_no++;
}
/* Advance to the next element. */
bi->elt1 = bi->elt1->next;
if (!bi->elt1)
@ -433,7 +433,7 @@ bmp_iter_and (bitmap_iterator *bi, unsigned *bit_no)
*bit_no = ((*bit_no + BITMAP_WORD_BITS - 1)
/ BITMAP_WORD_BITS * BITMAP_WORD_BITS);
bi->word_no++;
while (1)
{
/* Find the next nonzero word in this elt. */
@ -445,7 +445,7 @@ bmp_iter_and (bitmap_iterator *bi, unsigned *bit_no)
*bit_no += BITMAP_WORD_BITS;
bi->word_no++;
}
/* Advance to the next identical element. */
do
{
@ -458,7 +458,7 @@ bmp_iter_and (bitmap_iterator *bi, unsigned *bit_no)
return false;
}
while (bi->elt1->indx < bi->elt2->indx);
/* Advance elt2 to be no less than elt1. This might not
advance. */
while (bi->elt2->indx < bi->elt1->indx)
@ -469,7 +469,7 @@ bmp_iter_and (bitmap_iterator *bi, unsigned *bit_no)
}
}
while (bi->elt1->indx != bi->elt2->indx);
*bit_no = bi->elt1->indx * BITMAP_ELEMENT_ALL_BITS;
bi->word_no = 0;
}
@ -514,7 +514,7 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
*bit_no += BITMAP_WORD_BITS;
bi->word_no++;
}
/* Advance to the next element of elt1. */
bi->elt1 = bi->elt1->next;
if (!bi->elt1)
@ -523,7 +523,7 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
/* Advance elt2 until it is no less than elt1. */
while (bi->elt2 && bi->elt2->indx < bi->elt1->indx)
bi->elt2 = bi->elt2->next;
*bit_no = bi->elt1->indx * BITMAP_ELEMENT_ALL_BITS;
bi->word_no = 0;
}
@ -545,7 +545,7 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
loop state. */
#define EXECUTE_IF_AND_IN_BITMAP(BITMAP1, BITMAP2, MIN, BITNUM, ITER) \
for (bmp_iter_and_init (&(ITER), (BITMAP1), (BITMAP2), (MIN), \
for (bmp_iter_and_init (&(ITER), (BITMAP1), (BITMAP2), (MIN), \
&(BITNUM)); \
bmp_iter_and (&(ITER), &(BITNUM)); \
bmp_iter_next (&(ITER), &(BITNUM)))
@ -557,7 +557,7 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
#define EXECUTE_IF_AND_COMPL_IN_BITMAP(BITMAP1, BITMAP2, MIN, BITNUM, ITER) \
for (bmp_iter_and_compl_init (&(ITER), (BITMAP1), (BITMAP2), (MIN), \
&(BITNUM)); \
&(BITNUM)); \
bmp_iter_and_compl (&(ITER), &(BITNUM)); \
bmp_iter_next (&(ITER), &(BITNUM)))

View File

@ -241,7 +241,7 @@ insn_sets_btr_p (rtx insn, int check_const, int *regno)
&& TEST_HARD_REG_BIT (all_btrs, REGNO (dest)))
{
gcc_assert (!btr_referenced_p (src, NULL));
if (!check_const || CONSTANT_P (src))
{
if (regno)
@ -924,12 +924,12 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
int new_block = new_bb->index;
gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
bitmap_set_bit (live_range, new_block);
/* A previous btr migration could have caused a register to be
live just at the end of new_block which we need in full, so
use trs_live_at_end even if full_range is set. */
live just at the end of new_block which we need in full, so
use trs_live_at_end even if full_range is set. */
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
if (full_range)
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
@ -1192,7 +1192,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
insp = BB_END (b);
for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
gcc_assert (insp != BB_HEAD (b));
if (JUMP_P (insp) || can_throw_internal (insp))
insp = PREV_INSN (insp);
}
@ -1513,7 +1513,7 @@ rest_of_handle_branch_target_load_optimize (void)
&& !warned)
{
warning (0, "branch target register load optimization is not intended "
"to be run twice");
"to be run twice");
warned = 1;
}
@ -1530,7 +1530,7 @@ struct tree_opt_pass pass_branch_target_load_optimize =
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
0, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */

View File

@ -58,7 +58,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
DEF_FUNCTION_TYPE_VAR_1 (BT_INT_DOUBLE_VAR, BT_INT, BT_DOUBLE)
describes the type `int ()(double, ...)'.
DEF_POINTER_TYPE (ENUM, TYPE)
This macro describes a pointer type. ENUM is as above; TYPE is
@ -142,20 +142,20 @@ DEF_FUNCTION_TYPE_1 (BT_FN_LONGLONG_LONGLONG, BT_LONGLONG, BT_LONGLONG)
DEF_FUNCTION_TYPE_1 (BT_FN_INTMAX_INTMAX, BT_INTMAX, BT_INTMAX)
DEF_FUNCTION_TYPE_1 (BT_FN_FLOAT_FLOAT, BT_FLOAT, BT_FLOAT)
DEF_FUNCTION_TYPE_1 (BT_FN_DOUBLE_DOUBLE, BT_DOUBLE, BT_DOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_LONGDOUBLE_LONGDOUBLE,
BT_LONGDOUBLE, BT_LONGDOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_FLOAT_COMPLEX_FLOAT,
DEF_FUNCTION_TYPE_1 (BT_FN_LONGDOUBLE_LONGDOUBLE,
BT_LONGDOUBLE, BT_LONGDOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_FLOAT_COMPLEX_FLOAT,
BT_COMPLEX_FLOAT, BT_COMPLEX_FLOAT)
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_DOUBLE_COMPLEX_DOUBLE,
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_DOUBLE_COMPLEX_DOUBLE,
BT_COMPLEX_DOUBLE, BT_COMPLEX_DOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE,
BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_FLOAT_COMPLEX_FLOAT,
BT_FLOAT, BT_COMPLEX_FLOAT)
DEF_FUNCTION_TYPE_1 (BT_FN_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE,
BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_FLOAT_COMPLEX_FLOAT,
BT_FLOAT, BT_COMPLEX_FLOAT)
DEF_FUNCTION_TYPE_1 (BT_FN_DOUBLE_COMPLEX_DOUBLE,
BT_DOUBLE, BT_COMPLEX_DOUBLE)
BT_DOUBLE, BT_COMPLEX_DOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_LONGDOUBLE_COMPLEX_LONGDOUBLE,
BT_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
BT_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
DEF_FUNCTION_TYPE_1 (BT_FN_PTR_UINT, BT_PTR, BT_UINT)
DEF_FUNCTION_TYPE_1 (BT_FN_PTR_SIZE, BT_PTR, BT_SIZE)
DEF_FUNCTION_TYPE_1 (BT_FN_INT_INT, BT_INT, BT_INT)
@ -207,25 +207,25 @@ DEF_FUNCTION_TYPE_1 (BT_FN_UINT_UINT, BT_UINT, BT_UINT)
DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR, BT_FN_VOID_PTR)
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_PTR_INT, BT_VOID, BT_PTR, BT_INT)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_STRING_CONST_STRING,
BT_STRING, BT_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_STRING_CONST_STRING,
BT_STRING, BT_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_STRING_CONST_STRING,
BT_INT, BT_CONST_STRING, BT_CONST_STRING)
BT_INT, BT_CONST_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_CONST_STRING_CONST_STRING,
BT_STRING, BT_CONST_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_2 (BT_FN_SIZE_CONST_STRING_CONST_STRING,
BT_SIZE, BT_CONST_STRING, BT_CONST_STRING)
BT_SIZE, BT_CONST_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_CONST_STRING_INT,
BT_STRING, BT_CONST_STRING, BT_INT)
BT_STRING, BT_CONST_STRING, BT_INT)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_CONST_STRING_SIZE,
BT_STRING, BT_CONST_STRING, BT_SIZE)
BT_STRING, BT_CONST_STRING, BT_SIZE)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_STRING_FILEPTR,
BT_INT, BT_CONST_STRING, BT_FILEPTR)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_INT_FILEPTR,
BT_INT, BT_INT, BT_FILEPTR)
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_PTRMODE_PTR,
BT_VOID, BT_PTRMODE, BT_PTR)
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_VALIST_REF_VALIST_ARG,
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_VALIST_REF_VALIST_ARG,
BT_VOID, BT_VALIST_REF, BT_VALIST_ARG)
DEF_FUNCTION_TYPE_2 (BT_FN_LONG_LONG_LONG,
BT_LONG, BT_LONG, BT_LONG)
@ -277,12 +277,12 @@ DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_STRING_VALIST_ARG,
BT_INT, BT_CONST_STRING, BT_VALIST_ARG)
DEF_FUNCTION_TYPE_2 (BT_FN_PTR_SIZE_SIZE,
BT_PTR, BT_SIZE, BT_SIZE)
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_FLOAT_COMPLEX_FLOAT_COMPLEX_FLOAT,
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_FLOAT_COMPLEX_FLOAT_COMPLEX_FLOAT,
BT_COMPLEX_FLOAT, BT_COMPLEX_FLOAT, BT_COMPLEX_FLOAT)
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_DOUBLE_COMPLEX_DOUBLE_COMPLEX_DOUBLE,
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_DOUBLE_COMPLEX_DOUBLE_COMPLEX_DOUBLE,
BT_COMPLEX_DOUBLE, BT_COMPLEX_DOUBLE, BT_COMPLEX_DOUBLE)
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE,
BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
DEF_FUNCTION_TYPE_2 (BT_FN_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE_COMPLEX_LONGDOUBLE,
BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE, BT_COMPLEX_LONGDOUBLE)
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_PTR_PTR, BT_VOID, BT_PTR, BT_PTR)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_STRING_PTR_CONST_STRING,
BT_INT, BT_CONST_STRING, BT_PTR_CONST_STRING)
@ -302,9 +302,9 @@ DEF_FUNCTION_TYPE_3 (BT_FN_INT_CONST_STRING_CONST_STRING_SIZE,
DEF_FUNCTION_TYPE_3 (BT_FN_PTR_PTR_CONST_PTR_SIZE,
BT_PTR, BT_PTR, BT_CONST_PTR, BT_SIZE)
DEF_FUNCTION_TYPE_3 (BT_FN_INT_CONST_PTR_CONST_PTR_SIZE,
BT_INT, BT_CONST_PTR, BT_CONST_PTR, BT_SIZE)
BT_INT, BT_CONST_PTR, BT_CONST_PTR, BT_SIZE)
DEF_FUNCTION_TYPE_3 (BT_FN_PTR_PTR_INT_SIZE,
BT_PTR, BT_PTR, BT_INT, BT_SIZE)
BT_PTR, BT_PTR, BT_INT, BT_SIZE)
DEF_FUNCTION_TYPE_3 (BT_FN_VOID_PTR_INT_INT,
BT_VOID, BT_PTR, BT_INT, BT_INT)
DEF_FUNCTION_TYPE_3 (BT_FN_VOID_CONST_PTR_PTR_SIZE,
@ -401,15 +401,15 @@ DEF_FUNCTION_TYPE_VAR_0 (BT_FN_VOID_VAR, BT_VOID)
DEF_FUNCTION_TYPE_VAR_0 (BT_FN_INT_VAR, BT_INT)
DEF_FUNCTION_TYPE_VAR_0 (BT_FN_PTR_VAR, BT_PTR)
DEF_FUNCTION_TYPE_VAR_1 (BT_FN_VOID_VALIST_REF_VAR,
DEF_FUNCTION_TYPE_VAR_1 (BT_FN_VOID_VALIST_REF_VAR,
BT_VOID, BT_VALIST_REF)
DEF_FUNCTION_TYPE_VAR_1 (BT_FN_VOID_CONST_PTR_VAR,
BT_VOID, BT_CONST_PTR)
DEF_FUNCTION_TYPE_VAR_1 (BT_FN_INT_CONST_STRING_VAR,
BT_INT, BT_CONST_STRING)
BT_INT, BT_CONST_STRING)
DEF_FUNCTION_TYPE_VAR_2 (BT_FN_INT_FILEPTR_CONST_STRING_VAR,
BT_INT, BT_FILEPTR, BT_CONST_STRING)
BT_INT, BT_FILEPTR, BT_CONST_STRING)
DEF_FUNCTION_TYPE_VAR_2 (BT_FN_INT_STRING_CONST_STRING_VAR,
BT_INT, BT_STRING, BT_CONST_STRING)
DEF_FUNCTION_TYPE_VAR_2 (BT_FN_INT_CONST_STRING_CONST_STRING_VAR,

View File

@ -279,7 +279,7 @@ get_pointer_alignment (tree exp, unsigned int max_align)
while (handled_component_p (exp))
{
/* Fields in a structure can be packed, honor DECL_ALIGN
of the FIELD_DECL. For all other references the conservative
of the FIELD_DECL. For all other references the conservative
alignment is the element type alignment. */
if (TREE_CODE (exp) == COMPONENT_REF)
inner = MIN (inner, DECL_ALIGN (TREE_OPERAND (exp, 1)));
@ -517,7 +517,7 @@ expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
we must disable frame pointer elimination. */
if (count == 0)
tem = frame_pointer_rtx;
else
else
{
tem = hard_frame_pointer_rtx;
@ -2470,7 +2470,7 @@ expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
if (n < POWI_TABLE_SIZE)
{
if (cache[n])
return cache[n];
return cache[n];
target = gen_reg_rtx (mode);
cache[n] = target;
@ -2864,7 +2864,7 @@ expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
return 0;
/* If either SRC is not a pointer type, don't do this
operation in-line. */
operation in-line. */
if (src_align == 0)
return 0;
@ -2947,9 +2947,9 @@ expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode m
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
/* If either SRC or DEST is not a pointer type, don't do this
operation in-line. */
operation in-line. */
if (dest_align == 0 || src_align == 0)
return 0;
@ -3027,14 +3027,14 @@ expand_builtin_memmove (tree arglist, tree type, rtx target,
return 0;
/* If either SRC is not a pointer type, don't do this
operation in-line. */
operation in-line. */
if (src_align == 0)
return 0;
/* If src is categorized for a readonly section we can use
normal memcpy. */
if (readonly_data_expr (src))
{
{
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return 0;
@ -3047,12 +3047,12 @@ expand_builtin_memmove (tree arglist, tree type, rtx target,
/* If length is 1 and we can expand memcpy call inline,
it is ok to use memcpy as well. */
if (integer_onep (len))
{
{
rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
/*endp=*/0);
if (ret)
return ret;
}
}
/* Otherwise, call the normal function. */
return 0;
@ -3206,9 +3206,9 @@ expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
rtx ret;
/* Ensure we get an actual string whose length can be evaluated at
compile-time, not an expression containing a string. This is
because the latter will potentially produce pessimized code
when used to produce the return value. */
compile-time, not an expression containing a string. This is
because the latter will potentially produce pessimized code
when used to produce the return value. */
src = TREE_VALUE (TREE_CHAIN (arglist));
if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
return expand_movstr (TREE_VALUE (arglist),
@ -3232,7 +3232,7 @@ expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
if (GET_CODE (len_rtx) == CONST_INT)
{
ret = expand_builtin_strcpy (get_callee_fndecl (exp),
ret = expand_builtin_strcpy (get_callee_fndecl (exp),
arglist, target, mode);
if (ret)
@ -3292,7 +3292,7 @@ expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
tree result = fold_builtin_strncpy (fndecl, arglist, slen);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
@ -3303,7 +3303,7 @@ expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
/* We're required to pad with trailing zeros if the requested
len is greater than strlen(s2)+1. In that case try to
len is greater than strlen(s2)+1. In that case try to
use store_by_pieces, if it fails, punt. */
if (tree_int_cst_lt (slen, len))
{
@ -3436,12 +3436,12 @@ expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
&& can_store_by_pieces (tree_low_cst (len, 1),
builtin_memset_read_str, &c, dest_align))
{
val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
val_rtx);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_memset_gen_str, val_rtx, dest_align, 0);
}
else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
dest_align))
goto do_libcall;
@ -3660,7 +3660,7 @@ expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
rtx arg1_rtx, arg2_rtx;
rtx result, insn = NULL_RTX;
tree fndecl, fn;
tree arg1 = TREE_VALUE (arglist);
tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
int arg1_align
@ -3683,7 +3683,7 @@ expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
/* Try to call cmpstrsi. */
if (HAVE_cmpstrsi)
{
enum machine_mode insn_mode
enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
/* Make a place to write the result of the instruction. */
@ -3699,12 +3699,12 @@ expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
#endif
#ifdef HAVE_cmpstrnsi
/* Try to determine at least one length and call cmpstrnsi. */
if (!insn && HAVE_cmpstrnsi)
if (!insn && HAVE_cmpstrnsi)
{
tree len;
rtx arg3_rtx;
enum machine_mode insn_mode
enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
tree len1 = c_strlen (arg1, 1);
tree len2 = c_strlen (arg2, 1);
@ -3933,9 +3933,9 @@ expand_builtin_strcat (tree fndecl, tree arglist, rtx target, enum machine_mode
const char *p = c_getstr (src);
/* If the string length is zero, return the dst parameter. */
if (p && *p == '\0')
if (p && *p == '\0')
return expand_expr (dst, target, mode, EXPAND_NORMAL);
if (!optimize_size)
{
/* See if we can store by pieces into (dst + strlen(dst)). */
@ -3947,7 +3947,7 @@ expand_builtin_strcat (tree fndecl, tree arglist, rtx target, enum machine_mode
newsrc = builtin_save_expr (src);
if (newsrc != src)
arglist = build_tree_list (NULL_TREE, newsrc);
else
else
arglist = TREE_CHAIN (arglist); /* Reusing arglist if safe. */
dst = builtin_save_expr (dst);
@ -3970,12 +3970,12 @@ expand_builtin_strcat (tree fndecl, tree arglist, rtx target, enum machine_mode
end_sequence (); /* Stop sequence. */
return 0;
}
/* Output the entire sequence. */
insns = get_insns ();
end_sequence ();
emit_insn (insns);
return expand_expr (dst, target, mode, EXPAND_NORMAL);
}
@ -4344,7 +4344,7 @@ gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
{
/* If va_list is an array type, the argument may have decayed
to a pointer type, e.g. by being passed to another function.
In that case, unwrap both types so that we can compare the
In that case, unwrap both types so that we can compare the
underlying records. */
if (TREE_CODE (have_va_type) == ARRAY_TYPE
|| POINTER_TYPE_P (have_va_type))
@ -4394,7 +4394,7 @@ gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
else
{
/* Make it easier for the backends by protecting the valist argument
from multiple evaluations. */
from multiple evaluations. */
if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
{
/* For this case, the backends will be expecting a pointer to
@ -4750,7 +4750,7 @@ expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
probabilities. */
if (integer_zerop (arg1))
taken = 1 - taken;
predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
}
}
@ -4881,12 +4881,12 @@ expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
if (!init_target_chars())
return 0;
/* If the format specifier was "%s\n", call __builtin_puts(arg). */
if (strcmp (fmt_str, target_percent_s_newline) == 0)
{
if (! arglist
|| ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| TREE_CHAIN (arglist))
return 0;
fn = fn_puts;
@ -4904,7 +4904,7 @@ expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
{
/* We can't handle anything else with % args or %% ... yet. */
if (strchr (fmt_str, target_percent))
return 0;
return 0;
if (arglist)
return 0;
@ -4960,7 +4960,7 @@ expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
call. */
static rtx
expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
bool unlocked)
bool unlocked)
{
tree arglist = TREE_OPERAND (exp, 1);
/* If we're using an unlocked function, assume the other unlocked
@ -4997,12 +4997,12 @@ expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
if (!init_target_chars())
return 0;
/* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
if (strcmp (fmt_str, target_percent_s) == 0)
{
if (! arglist
|| ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| TREE_CHAIN (arglist))
return 0;
arg = TREE_VALUE (arglist);
@ -5026,7 +5026,7 @@ expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
{
/* We can't handle anything else with % args or %% ... yet. */
if (strchr (fmt_str, target_percent))
return 0;
return 0;
if (arglist)
return 0;
@ -5296,9 +5296,9 @@ expand_builtin_signbit (tree exp, rtx target)
imode = word_mode;
/* Handle targets with different FP word orders. */
if (FLOAT_WORDS_BIG_ENDIAN)
word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
else
word = bitpos / BITS_PER_WORD;
word = bitpos / BITS_PER_WORD;
temp = operand_subword_force (temp, word, fmode);
bitpos = bitpos % BITS_PER_WORD;
}
@ -5334,7 +5334,7 @@ expand_builtin_signbit (tree exp, rtx target)
else
{
/* Perform a logical right shift to place the signbit in the least
significant bit, then truncate the result to the desired mode
significant bit, then truncate the result to the desired mode
and mask just this bit. */
temp = expand_shift (RSHIFT_EXPR, imode, temp,
build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
@ -5451,7 +5451,7 @@ get_builtin_sync_mem (tree loc, enum machine_mode mode)
}
/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
ARGLIST is the operands list to the function. CODE is the rtx code
ARGLIST is the operands list to the function. CODE is the rtx code
that corresponds to the arithmetic or logical operation from the name;
an exception here is that NOT actually means NAND. TARGET is an optional
place for us to store the results; AFTER is true if this is the
@ -5650,7 +5650,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
CASE_FLT_FN (BUILT_IN_FABS):
target = expand_builtin_fabs (arglist, target, subtarget);
if (target)
return target;
return target;
break;
CASE_FLT_FN (BUILT_IN_COPYSIGN):
@ -5794,7 +5794,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
/* Return the address of the first anonymous stack arg. */
case BUILT_IN_NEXT_ARG:
if (fold_builtin_next_arg (arglist))
return const0_rtx;
return const0_rtx;
return expand_builtin_next_arg ();
case BUILT_IN_CLASSIFY_TYPE:
@ -6660,7 +6660,7 @@ integer_valued_real_p (tree t)
case REAL_CST:
if (! TREE_CONSTANT_OVERFLOW (t))
{
REAL_VALUE_TYPE c, cint;
REAL_VALUE_TYPE c, cint;
c = TREE_REAL_CST (t);
real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
@ -6958,7 +6958,7 @@ fold_builtin_cbrt (tree arglist, tree type)
{
/* Optimize cbrt(expN(x)) -> expN(x/3). */
if (BUILTIN_EXPONENT_P (fcode))
{
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
const REAL_VALUE_TYPE third_trunc =
real_value_truncate (TYPE_MODE (type), dconstthird);
@ -6971,7 +6971,7 @@ fold_builtin_cbrt (tree arglist, tree type)
/* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
if (BUILTIN_SQRT_P (fcode))
{
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
if (powfn)
@ -6991,17 +6991,17 @@ fold_builtin_cbrt (tree arglist, tree type)
/* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
if (BUILTIN_CBRT_P (fcode))
{
{
tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
if (tree_expr_nonnegative_p (arg0))
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
if (powfn)
{
{
tree tree_root;
REAL_VALUE_TYPE dconstroot;
real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
@ -7011,11 +7011,11 @@ fold_builtin_cbrt (tree arglist, tree type)
}
}
}
/* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
|| fcode == BUILT_IN_POWL)
{
{
tree arg00 = TREE_VALUE (TREE_OPERAND (arg, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
if (tree_expr_nonnegative_p (arg00))
@ -7408,11 +7408,11 @@ real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_dconstp (TREE_REALPART (expr), value)
&& real_zerop (TREE_IMAGPART (expr))));
&& ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_dconstp (TREE_REALPART (expr), value)
&& real_zerop (TREE_IMAGPART (expr))));
}
/* A subroutine of fold_builtin to fold the various logarithmic
@ -7434,10 +7434,10 @@ fold_builtin_logarithm (tree fndecl, tree arglist,
return build_real (type, dconst0);
/* Optimize logN(N) = 1.0. If N can't be truncated to MODE
exactly, then only do this if flag_unsafe_math_optimizations. */
exactly, then only do this if flag_unsafe_math_optimizations. */
if (exact_real_truncate (TYPE_MODE (type), value)
|| flag_unsafe_math_optimizations)
{
{
const REAL_VALUE_TYPE value_truncate =
real_value_truncate (TYPE_MODE (type), *value);
if (real_dconstp (arg, &value_truncate))
@ -7458,10 +7458,10 @@ fold_builtin_logarithm (tree fndecl, tree arglist,
return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
/* Optimize logN(func()) for various exponential functions. We
want to determine the value "x" and the power "exponent" in
order to transform logN(x**exponent) into exponent*logN(x). */
want to determine the value "x" and the power "exponent" in
order to transform logN(x**exponent) into exponent*logN(x). */
if (flag_unsafe_math_optimizations)
{
{
tree exponent = 0, x = 0;
switch (fcode)
@ -7606,7 +7606,7 @@ fold_builtin_pow (tree fndecl, tree arglist, tree type)
/* Optimize pow(expN(x),y) = expN(x*y). */
if (BUILTIN_EXPONENT_P (fcode))
{
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
arg = fold_build2 (MULT_EXPR, type, arg, arg1);
@ -7616,7 +7616,7 @@ fold_builtin_pow (tree fndecl, tree arglist, tree type)
/* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
if (BUILTIN_SQRT_P (fcode))
{
{
tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
@ -7628,7 +7628,7 @@ fold_builtin_pow (tree fndecl, tree arglist, tree type)
/* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
if (BUILTIN_CBRT_P (fcode))
{
{
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
if (tree_expr_nonnegative_p (arg))
{
@ -7641,11 +7641,11 @@ fold_builtin_pow (tree fndecl, tree arglist, tree type)
return build_function_call_expr (fndecl, arglist);
}
}
/* Optimize pow(pow(x,y),z) = pow(x,y*z). */
if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
|| fcode == BUILT_IN_POWL)
{
{
tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
@ -7724,7 +7724,7 @@ fold_builtin_exponent (tree fndecl, tree arglist,
/* Optimize expN(1.0) = N. */
if (real_onep (arg))
{
{
REAL_VALUE_TYPE cst;
real_convert (&cst, TYPE_MODE (type), value);
@ -7735,7 +7735,7 @@ fold_builtin_exponent (tree fndecl, tree arglist,
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg))
{
{
REAL_VALUE_TYPE cint;
REAL_VALUE_TYPE c;
HOST_WIDE_INT n;
@ -7755,7 +7755,7 @@ fold_builtin_exponent (tree fndecl, tree arglist,
/* Optimize expN(logN(x)) = x. */
if (flag_unsafe_math_optimizations)
{
{
const enum built_in_function fcode = builtin_mathfn_code (arg);
if ((value == &dconste
@ -7823,14 +7823,14 @@ fold_builtin_mempcpy (tree arglist, tree type, int endp)
/* If SRC and DEST are the same (and not volatile), return DEST+LEN. */
if (operand_equal_p (src, dest, 0))
{
{
if (endp == 0)
return omit_one_operand (type, dest, len);
if (endp == 2)
len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
len = fold_convert (TREE_TYPE (dest), len);
len = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
return fold_convert (type, len);
@ -8289,9 +8289,9 @@ fold_builtin_isascii (tree arglist)
arg, integer_zero_node);
if (in_gimple_form && !TREE_CONSTANT (arg))
return NULL_TREE;
return NULL_TREE;
else
return arg;
return arg;
}
}
@ -8337,9 +8337,9 @@ fold_builtin_isdigit (tree arglist)
arg = fold_build2 (LE_EXPR, integer_type_node, arg,
build_int_cst (unsigned_type_node, 9));
if (in_gimple_form && !TREE_CONSTANT (arg))
return NULL_TREE;
return NULL_TREE;
else
return arg;
return arg;
}
}
@ -8415,7 +8415,7 @@ fold_builtin_classify (tree fndecl, tree arglist, int builtin_index)
{
case BUILT_IN_ISINF:
if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
@ -8431,8 +8431,8 @@ fold_builtin_classify (tree fndecl, tree arglist, int builtin_index)
case BUILT_IN_FINITE:
if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg)))
&& !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
&& !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
@ -8445,7 +8445,7 @@ fold_builtin_classify (tree fndecl, tree arglist, int builtin_index)
case BUILT_IN_ISNAN:
if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
@ -8500,13 +8500,13 @@ fold_builtin_unordered_cmp (tree fndecl, tree arglist,
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
type0 = TREE_TYPE (arg0);
type1 = TREE_TYPE (arg1);
code0 = TREE_CODE (type0);
code1 = TREE_CODE (type1);
if (code0 == REAL_TYPE && code1 == REAL_TYPE)
/* Choose the wider of two real types. */
cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
@ -8521,7 +8521,7 @@ fold_builtin_unordered_cmp (tree fndecl, tree arglist,
IDENTIFIER_POINTER (DECL_NAME (fndecl)));
return error_mark_node;
}
arg0 = fold_convert (cmp_type, arg0);
arg1 = fold_convert (cmp_type, arg1);
@ -8645,13 +8645,13 @@ fold_builtin_1 (tree fndecl, tree arglist, bool ignore)
CASE_FLT_FN (BUILT_IN_CREAL):
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
return non_lvalue (fold_build1 (REALPART_EXPR, type,
return non_lvalue (fold_build1 (REALPART_EXPR, type,
TREE_VALUE (arglist)));
break;
CASE_FLT_FN (BUILT_IN_CIMAG):
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
return non_lvalue (fold_build1 (IMAGPART_EXPR, type,
return non_lvalue (fold_build1 (IMAGPART_EXPR, type,
TREE_VALUE (arglist)));
break;
@ -8739,7 +8739,7 @@ fold_builtin_1 (tree fndecl, tree arglist, bool ignore)
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
return fold_builtin_int_roundingfn (fndecl, arglist);
@ -9320,10 +9320,10 @@ fold_builtin_strncat (tree arglist)
/* If the requested length is zero, or the src parameter string
length is zero, return the dst parameter. */
if (integer_zerop (len) || (p && *p == '\0'))
return omit_two_operands (TREE_TYPE (dst), dst, src, len);
return omit_two_operands (TREE_TYPE (dst), dst, src, len);
/* If the requested len is greater than or equal to the string
length, call strcat. */
length, call strcat. */
if (TREE_CODE (len) == INTEGER_CST && p
&& compare_tree_int (len, strlen (p)) >= 0)
{
@ -9561,18 +9561,18 @@ fold_builtin_next_arg (tree arglist)
/* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
when we checked the arguments and if needed issued a warning. */
else if (!TREE_CHAIN (arglist)
|| !integer_zerop (TREE_VALUE (arglist))
|| !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
|| TREE_CHAIN (TREE_CHAIN (arglist)))
|| !integer_zerop (TREE_VALUE (arglist))
|| !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
|| TREE_CHAIN (TREE_CHAIN (arglist)))
{
tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
tree arg = TREE_VALUE (arglist);
if (TREE_CHAIN (arglist))
{
error ("%<va_start%> used with too many arguments");
return true;
}
{
error ("%<va_start%> used with too many arguments");
return true;
}
/* Strip off all nops for the sake of the comparison. This
is not quite the same as STRIP_NOPS. It does more.
@ -9584,7 +9584,7 @@ fold_builtin_next_arg (tree arglist)
|| TREE_CODE (arg) == INDIRECT_REF)
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
{
{
/* FIXME: Sometimes with the tree optimizers we can get the
not the last argument even though the user used the last
argument. We just warn and set the arg to be the last
@ -9593,10 +9593,10 @@ fold_builtin_next_arg (tree arglist)
warning (0, "second parameter of %<va_start%> not last named argument");
}
/* We want to verify the second parameter just once before the tree
optimizers are run and then avoid keeping it in the tree,
as otherwise we could warn even for correct code like:
void foo (int i, ...)
{ va_list ap; i++; va_start (ap, i); va_end (ap); } */
optimizers are run and then avoid keeping it in the tree,
as otherwise we could warn even for correct code like:
void foo (int i, ...)
{ va_list ap; i++; va_start (ap, i); va_end (ap); } */
TREE_VALUE (arglist) = integer_zero_node;
TREE_CHAIN (arglist) = build_tree_list (NULL, integer_zero_node);
}
@ -10167,7 +10167,7 @@ fold_builtin_stxcpy_chk (tree fndecl, tree arglist, tree maxlen, bool ignore,
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
if (! host_integerp (size, 1))
return 0;
@ -10628,7 +10628,7 @@ fold_builtin_printf (tree fndecl, tree arglist, bool ignore,
if (!init_target_chars())
return 0;
if (strcmp (fmt_str, target_percent_s) == 0 || strchr (fmt_str, target_percent) == NULL)
{
const char *str;
@ -10789,7 +10789,7 @@ fold_builtin_fprintf (tree fndecl, tree arglist, bool ignore,
if (!init_target_chars())
return 0;
/* If the format doesn't contain % args or %%, use strcpy. */
if (strchr (fmt_str, target_percent) == NULL)
{
@ -10881,7 +10881,7 @@ init_target_chars (void)
target_percent_s_newline[1] = target_s;
target_percent_s_newline[2] = target_newline;
target_percent_s_newline[3] = '\0';
init = true;
}
return true;

View File

@ -54,7 +54,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
exist when compiling in ANSI conformant mode.
ATTRs is an attribute list as defined in builtin-attrs.def that
describes the attributes of this builtin function.
describes the attributes of this builtin function.
IMPLICIT specifies condition when the builtin can be produced by
compiler. For instance C90 reserves floorf function, but does not
@ -71,13 +71,13 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#undef DEF_GCC_BUILTIN
#define DEF_GCC_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, BT_LAST, \
false, false, false, ATTRS, true, true)
false, false, false, ATTRS, true, true)
/* Like DEF_GCC_BUILTIN, except we don't prepend "__builtin_". */
#undef DEF_SYNC_BUILTIN
#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, NAME, BUILT_IN_NORMAL, TYPE, BT_LAST, \
false, false, false, ATTRS, true, true)
false, false, false, ATTRS, true, true)
/* A library builtin (like __builtin_strchr) is a builtin equivalent
of an ANSI/ISO standard library function. In addition to the
@ -85,7 +85,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
`strchr') as well. If we cannot compute the answer using the
builtin function, we will fall back to the standard library
version. */
#undef DEF_LIB_BUILTIN
#undef DEF_LIB_BUILTIN
#define DEF_LIB_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, false, ATTRS, true, true)
@ -94,39 +94,39 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
specified by ANSI/ISO C. So, when we're being fully conformant we
ignore the version of these builtins that does not begin with
__builtin. */
#undef DEF_EXT_LIB_BUILTIN
#undef DEF_EXT_LIB_BUILTIN
#define DEF_EXT_LIB_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, true, ATTRS, false, true)
true, true, true, ATTRS, false, true)
/* Like DEF_LIB_BUILTIN, except that the function is only a part of
the standard in C94 or above. */
#undef DEF_C94_BUILTIN
#undef DEF_C94_BUILTIN
#define DEF_C94_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, !flag_isoc94, ATTRS, TARGET_C99_FUNCTIONS, true)
true, true, !flag_isoc94, ATTRS, TARGET_C99_FUNCTIONS, true)
/* Like DEF_LIB_BUILTIN, except that the function is only a part of
the standard in C99 or above. */
#undef DEF_C99_BUILTIN
#undef DEF_C99_BUILTIN
#define DEF_C99_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, !flag_isoc99, ATTRS, TARGET_C99_FUNCTIONS, true)
true, true, !flag_isoc99, ATTRS, TARGET_C99_FUNCTIONS, true)
/* Builtin that is specified by C99 and C90 reserve the name for future use.
We can still recognize the builtin in C90 mode but we can't produce it
implicitly. */
#undef DEF_C99_C90RES_BUILTIN
#undef DEF_C99_C90RES_BUILTIN
#define DEF_C99_C90RES_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, !flag_isoc99, ATTRS, TARGET_C99_FUNCTIONS, true)
true, true, !flag_isoc99, ATTRS, TARGET_C99_FUNCTIONS, true)
/* Builtin that C99 reserve the name for future use. We can still recognize
the builtin in C99 mode but we can't produce it implicitly. */
#undef DEF_EXT_C99RES_BUILTIN
#define DEF_EXT_C99RES_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_NORMAL, TYPE, TYPE, \
true, true, true, ATTRS, false, true)
true, true, true, ATTRS, false, true)
/* Allocate the enum and the name for a builtin, but do not actually
define it here at all. */

View File

@ -75,15 +75,15 @@ affix_data_type (const char *param)
for (;;)
{
if (!strncmp (p, "volatile ", 9))
{
p += 9;
continue;
}
{
p += 9;
continue;
}
if (!strncmp (p, "const ", 6))
{
p += 6;
continue;
}
{
p += 6;
continue;
}
break;
}
@ -123,7 +123,7 @@ gen_formal_list_for_type (tree fntype, formals_style style)
const char *this_type;
if (*formal_list)
formal_list = concat (formal_list, ", ", NULL);
formal_list = concat (formal_list, ", ", NULL);
this_type = gen_type ("", TREE_VALUE (formal_type), ansi);
formal_list
@ -167,18 +167,18 @@ gen_formal_list_for_type (tree fntype, formals_style style)
if (!*formal_list)
{
if (TYPE_ARG_TYPES (fntype))
/* assert (TREE_VALUE (TYPE_ARG_TYPES (fntype)) == void_type_node); */
formal_list = "void";
/* assert (TREE_VALUE (TYPE_ARG_TYPES (fntype)) == void_type_node); */
formal_list = "void";
else
formal_list = "/* ??? */";
formal_list = "/* ??? */";
}
else
{
/* If there were at least some parameters, and if the formals-types-list
petered out to a NULL (i.e. without being terminated by a
void_type_node) then we need to tack on an ellipsis. */
petered out to a NULL (i.e. without being terminated by a
void_type_node) then we need to tack on an ellipsis. */
if (!formal_type)
formal_list = concat (formal_list, ", ...", NULL);
formal_list = concat (formal_list, ", ...", NULL);
}
return concat (" (", formal_list, ")", NULL);
@ -237,20 +237,20 @@ gen_formal_list_for_func_def (tree fndecl, formals_style style)
const char *this_formal;
if (*formal_list && ((style == ansi) || (style == k_and_r_names)))
formal_list = concat (formal_list, ", ", NULL);
formal_list = concat (formal_list, ", ", NULL);
this_formal = gen_decl (formal_decl, 0, style);
if (style == k_and_r_decls)
formal_list = concat (formal_list, this_formal, "; ", NULL);
formal_list = concat (formal_list, this_formal, "; ", NULL);
else
formal_list = concat (formal_list, this_formal, NULL);
formal_list = concat (formal_list, this_formal, NULL);
formal_decl = TREE_CHAIN (formal_decl);
}
if (style == ansi)
{
if (!DECL_ARGUMENTS (fndecl))
formal_list = concat (formal_list, "void", NULL);
formal_list = concat (formal_list, "void", NULL);
if (deserves_ellipsis (TREE_TYPE (fndecl)))
formal_list = concat (formal_list, ", ...", NULL);
formal_list = concat (formal_list, ", ...", NULL);
}
if ((style == ansi) || (style == k_and_r_names))
formal_list = concat (" (", formal_list, ")", NULL);
@ -309,23 +309,23 @@ gen_type (const char *ret_val, tree t, formals_style style)
else
{
switch (TREE_CODE (t))
{
case POINTER_TYPE:
if (TYPE_READONLY (t))
ret_val = concat ("const ", ret_val, NULL);
if (TYPE_VOLATILE (t))
ret_val = concat ("volatile ", ret_val, NULL);
{
case POINTER_TYPE:
if (TYPE_READONLY (t))
ret_val = concat ("const ", ret_val, NULL);
if (TYPE_VOLATILE (t))
ret_val = concat ("volatile ", ret_val, NULL);
ret_val = concat ("*", ret_val, NULL);
ret_val = concat ("*", ret_val, NULL);
if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE || TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE)
ret_val = concat ("(", ret_val, ")", NULL);
ret_val = gen_type (ret_val, TREE_TYPE (t), style);
ret_val = gen_type (ret_val, TREE_TYPE (t), style);
return ret_val;
return ret_val;
case ARRAY_TYPE:
case ARRAY_TYPE:
if (!COMPLETE_TYPE_P (t) || TREE_CODE (TYPE_SIZE (t)) != INTEGER_CST)
ret_val = gen_type (concat (ret_val, "[]", NULL),
TREE_TYPE (t), style);
@ -340,23 +340,23 @@ gen_type (const char *ret_val, tree t, formals_style style)
ret_val = gen_type (concat (ret_val, buff, NULL),
TREE_TYPE (t), style);
}
break;
break;
case FUNCTION_TYPE:
ret_val = gen_type (concat (ret_val,
case FUNCTION_TYPE:
ret_val = gen_type (concat (ret_val,
gen_formal_list_for_type (t, style),
NULL),
TREE_TYPE (t), style);
break;
break;
case IDENTIFIER_NODE:
data_type = IDENTIFIER_POINTER (t);
break;
case IDENTIFIER_NODE:
data_type = IDENTIFIER_POINTER (t);
break;
/* The following three cases are complicated by the fact that a
user may do something really stupid, like creating a brand new
"anonymous" type specification in a formal argument list (or as
part of a function return type specification). For example:
user may do something really stupid, like creating a brand new
"anonymous" type specification in a formal argument list (or as
part of a function return type specification). For example:
int f (enum { red, green, blue } color);
@ -364,7 +364,7 @@ gen_type (const char *ret_val, tree t, formals_style style)
to represent the (anonymous) type. Thus, we have to generate the
whole darn type specification. Yuck! */
case RECORD_TYPE:
case RECORD_TYPE:
if (TYPE_NAME (t))
data_type = IDENTIFIER_POINTER (TYPE_NAME (t));
else
@ -383,7 +383,7 @@ gen_type (const char *ret_val, tree t, formals_style style)
data_type = concat ("struct ", data_type, NULL);
break;
case UNION_TYPE:
case UNION_TYPE:
if (TYPE_NAME (t))
data_type = IDENTIFIER_POINTER (TYPE_NAME (t));
else
@ -402,7 +402,7 @@ gen_type (const char *ret_val, tree t, formals_style style)
data_type = concat ("union ", data_type, NULL);
break;
case ENUMERAL_TYPE:
case ENUMERAL_TYPE:
if (TYPE_NAME (t))
data_type = IDENTIFIER_POINTER (TYPE_NAME (t));
else
@ -422,33 +422,33 @@ gen_type (const char *ret_val, tree t, formals_style style)
data_type = concat ("enum ", data_type, NULL);
break;
case TYPE_DECL:
data_type = IDENTIFIER_POINTER (DECL_NAME (t));
break;
case TYPE_DECL:
data_type = IDENTIFIER_POINTER (DECL_NAME (t));
break;
case INTEGER_TYPE:
data_type = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t)));
/* Normally, `unsigned' is part of the deal. Not so if it comes
case INTEGER_TYPE:
data_type = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t)));
/* Normally, `unsigned' is part of the deal. Not so if it comes
with a type qualifier. */
if (TYPE_UNSIGNED (t) && TYPE_QUALS (t))
if (TYPE_UNSIGNED (t) && TYPE_QUALS (t))
data_type = concat ("unsigned ", data_type, NULL);
break;
case REAL_TYPE:
data_type = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t)));
break;
case REAL_TYPE:
data_type = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t)));
break;
case VOID_TYPE:
data_type = "void";
break;
case VOID_TYPE:
data_type = "void";
break;
case ERROR_MARK:
data_type = "[ERROR]";
break;
default:
gcc_unreachable ();
}
default:
gcc_unreachable ();
}
}
if (TYPE_READONLY (t))
ret_val = concat ("const ", ret_val, NULL);
@ -518,11 +518,11 @@ gen_decl (tree decl, int is_func_definition, formals_style style)
NULL);
/* Since we have already added in the formals list stuff, here we don't
add the whole "type" of the function we are considering (which
would include its parameter-list info), rather, we only add in
the "type" of the "type" of the function, which is really just
the return-type of the function (and does not include the parameter
list info). */
add the whole "type" of the function we are considering (which
would include its parameter-list info), rather, we only add in
the "type" of the "type" of the function, which is really just
the return-type of the function (and does not include the parameter
list info). */
ret_val = gen_type (ret_val, TREE_TYPE (TREE_TYPE (decl)), style);
}

View File

@ -574,7 +574,7 @@ const struct attribute_spec c_common_attribute_table[] =
{ "always_inline", 0, 0, true, false, false,
handle_always_inline_attribute },
{ "flatten", 0, 0, true, false, false,
handle_flatten_attribute },
handle_flatten_attribute },
{ "used", 0, 0, true, false, false,
handle_used_attribute },
{ "unused", 0, 0, false, false, false,
@ -773,10 +773,10 @@ c_expand_decl (tree decl)
{
/* Let the back-end know about this variable. */
if (!anon_aggr_type_p (TREE_TYPE (decl)))
emit_local_var (decl);
emit_local_var (decl);
else
expand_anon_union_decl (decl, NULL_TREE,
DECL_ANON_UNION_ELEMS (decl));
expand_anon_union_decl (decl, NULL_TREE,
DECL_ANON_UNION_ELEMS (decl));
}
else
return 0;
@ -1060,8 +1060,8 @@ vector_types_convertible_p (tree t1, tree t2)
{
return targetm.vector_opaque_p (t1)
|| targetm.vector_opaque_p (t2)
|| (tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))
&& (TREE_CODE (TREE_TYPE (t1)) != REAL_TYPE ||
|| (tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))
&& (TREE_CODE (TREE_TYPE (t1)) != REAL_TYPE ||
TYPE_PRECISION (t1) == TYPE_PRECISION (t2))
&& INTEGRAL_TYPE_P (TREE_TYPE (t1))
== INTEGRAL_TYPE_P (TREE_TYPE (t2)));
@ -1445,7 +1445,7 @@ verify_tree (tree x, struct tlist **pbefore_sp, struct tlist **pno_sp,
default:
/* For other expressions, simply recurse on their operands.
Manual tail recursion for unary expressions.
Manual tail recursion for unary expressions.
Other non-expressions need not be processed. */
if (cl == tcc_unary)
{
@ -2798,9 +2798,9 @@ c_common_get_alias_set (tree t)
But, the standard is wrong. In particular, this code is
legal C++:
int *ip;
int **ipp = &ip;
const int* const* cipp = ipp;
int *ip;
int **ipp = &ip;
const int* const* cipp = ipp;
And, it doesn't make sense for that to be legal unless you
can dereference IPP and CIPP. So, we ignore cv-qualifiers on
@ -3357,10 +3357,10 @@ c_common_nodes_and_builtins (void)
NONANSI_P, ATTRS, IMPLICIT, COND) \
if (NAME && COND) \
def_builtin_1 (ENUM, NAME, CLASS, \
builtin_types[(int) TYPE], \
builtin_types[(int) LIBTYPE], \
BOTH_P, FALLBACK_P, NONANSI_P, \
built_in_attributes[(int) ATTRS], IMPLICIT);
builtin_types[(int) TYPE], \
builtin_types[(int) LIBTYPE], \
BOTH_P, FALLBACK_P, NONANSI_P, \
built_in_attributes[(int) ATTRS], IMPLICIT);
#include "builtins.def"
#undef DEF_BUILTIN
@ -3659,8 +3659,8 @@ c_add_case_label (splay_tree cases, tree cond, tree orig_type,
if (low_value && high_value)
{
/* If the LOW_VALUE and HIGH_VALUE are the same, then this isn't
really a case range, even though it was written that way.
Remove the HIGH_VALUE to simplify later processing. */
really a case range, even though it was written that way.
Remove the HIGH_VALUE to simplify later processing. */
if (tree_int_cst_equal (low_value, high_value))
high_value = NULL_TREE;
else if (!tree_int_cst_lt (low_value, high_value))
@ -3877,9 +3877,9 @@ c_do_switch_warnings (splay_tree cases, location_t switch_location,
(splay_tree_key) low_value);
high_bound = splay_tree_successor (cases,
(splay_tree_key) low_value);
/* It is smaller than the LOW_VALUE, so there is no need to check
unless the LOW_BOUND is in fact itself a case range. */
unless the LOW_BOUND is in fact itself a case range. */
if (low_bound
&& CASE_HIGH ((tree) low_bound->value)
&& tree_int_cst_compare (CASE_HIGH ((tree) low_bound->value),
@ -4079,7 +4079,7 @@ handle_packed_attribute (tree *node, tree name, tree ARG_UNUSED (args),
if (TYPE_MAIN_VARIANT (*node) == *node)
{
/* If it is the main variant, then pack the other variants
too. This happens in,
too. This happens in,
struct Foo {
struct Foo const *ptr; // creates a variant w/o packed flag
@ -4224,8 +4224,8 @@ handle_always_inline_attribute (tree *node, tree name,
static tree
handle_flatten_attribute (tree *node, tree name,
tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
{
if (TREE_CODE (*node) == FUNCTION_DECL)
/* Do nothing else, just set the attribute. We'll get at
@ -4567,14 +4567,14 @@ handle_mode_attribute (tree *node, tree name, tree args,
return NULL_TREE;
}
if (TREE_CODE (type) == POINTER_TYPE)
if (TREE_CODE (type) == POINTER_TYPE)
fn = build_pointer_type_for_mode;
else
fn = build_reference_type_for_mode;
typefm = fn (TREE_TYPE (type), mode, false);
}
else
typefm = lang_hooks.types.type_for_mode (mode, TYPE_UNSIGNED (type));
typefm = lang_hooks.types.type_for_mode (mode, TYPE_UNSIGNED (type));
if (typefm == NULL_TREE)
{
@ -4881,9 +4881,9 @@ handle_visibility_attribute (tree *node, tree name, tree args,
{
if (TREE_CODE (*node) != RECORD_TYPE && TREE_CODE (*node) != UNION_TYPE)
{
warning (OPT_Wattributes, "%qE attribute ignored on non-class types",
warning (OPT_Wattributes, "%qE attribute ignored on non-class types",
name);
return NULL_TREE;
return NULL_TREE;
}
}
else if (decl_function_context (decl) != 0 || !TREE_PUBLIC (decl))
@ -4903,7 +4903,7 @@ handle_visibility_attribute (tree *node, tree name, tree args,
{
decl = TYPE_NAME (decl);
if (!decl)
return NULL_TREE;
return NULL_TREE;
if (TREE_CODE (decl) == IDENTIFIER_NODE)
{
warning (OPT_Wattributes, "%qE attribute ignored on types",
@ -5387,15 +5387,15 @@ check_function_sentinel (tree attrs, tree params, tree typelist)
typelist = TREE_CHAIN (typelist);
params = TREE_CHAIN (params);
}
if (typelist || !params)
warning (OPT_Wformat,
"not enough variable arguments to fit a sentinel");
else
{
{
tree sentinel, end;
unsigned pos = 0;
if (TREE_VALUE (attr))
{
tree p = TREE_VALUE (TREE_VALUE (attr));
@ -5432,7 +5432,7 @@ check_function_sentinel (tree attrs, tree params, tree typelist)
as wide as a pointer, and we don't want to force
users to cast the NULL they have written there.
We warn with -Wstrict-null-sentinel, though. */
&& (warn_strict_null_sentinel
&& (warn_strict_null_sentinel
|| null_node != TREE_VALUE (sentinel)))
warning (OPT_Wformat, "missing sentinel in function call");
}
@ -5596,24 +5596,24 @@ handle_sentinel_attribute (tree *node, tree name, tree args,
params = TREE_CHAIN (params);
if (VOID_TYPE_P (TREE_VALUE (params)))
{
{
warning (OPT_Wattributes,
"%qE attribute only applies to variadic functions", name);
*no_add_attrs = true;
}
}
if (args)
{
tree position = TREE_VALUE (args);
if (TREE_CODE (position) != INTEGER_CST)
{
{
warning (0, "requested position is not an integer constant");
*no_add_attrs = true;
}
else
{
{
if (tree_int_cst_lt (position, integer_zero_node))
{
warning (0, "requested position is less than zero");
@ -5621,7 +5621,7 @@ handle_sentinel_attribute (tree *node, tree name, tree args,
}
}
}
return NULL_TREE;
}
@ -5826,9 +5826,9 @@ c_parse_error (const char *gmsgid, enum cpp_ttype token, tree value)
unsigned int val = TREE_INT_CST_LOW (value);
const char *const ell = (token == CPP_CHAR) ? "" : "L";
if (val <= UCHAR_MAX && ISGRAPH (val))
message = catenate_messages (gmsgid, " before %s'%c'");
message = catenate_messages (gmsgid, " before %s'%c'");
else
message = catenate_messages (gmsgid, " before %s'\\x%x'");
message = catenate_messages (gmsgid, " before %s'\\x%x'");
error (message, ell, val);
free (message);
@ -5864,7 +5864,7 @@ c_parse_error (const char *gmsgid, enum cpp_ttype token, tree value)
error (message);
free (message);
}
#undef catenate_messages
#undef catenate_messages
}
/* Walk a gimplified function and warn for functions whose return value is
@ -6196,7 +6196,7 @@ sync_resolve_size (tree function, tree params)
return 0;
}
/* A helper function for resolve_overloaded_builtin. Adds casts to
/* A helper function for resolve_overloaded_builtin. Adds casts to
PARAMS to make arguments match up with those of FUNCTION. Drops
the variadic arguments at the end. Returns false if some error
was encountered; true on success. */
@ -6249,7 +6249,7 @@ sync_resolve_params (tree orig_function, tree function, tree params)
return true;
}
/* A helper function for resolve_overloaded_builtin. Adds a cast to
/* A helper function for resolve_overloaded_builtin. Adds a cast to
RESULT to make it match the type of the first pointer argument in
PARAMS. */
@ -6280,13 +6280,13 @@ resolve_overloaded_builtin (tree function, tree params)
break;
case BUILT_IN_MD:
if (targetm.resolve_overloaded_builtin)
return targetm.resolve_overloaded_builtin (function, params);
return targetm.resolve_overloaded_builtin (function, params);
else
return NULL_TREE;
return NULL_TREE;
default:
return NULL_TREE;
}
/* Handle BUILT_IN_NORMAL here. */
switch (orig_code)
{

View File

@ -82,9 +82,9 @@ enum rid
RID_PUBLIC, RID_PRIVATE, RID_PROTECTED,
RID_TEMPLATE, RID_NULL, RID_CATCH,
RID_DELETE, RID_FALSE, RID_NAMESPACE,
RID_NEW, RID_OFFSETOF, RID_OPERATOR,
RID_THIS, RID_THROW, RID_TRUE,
RID_TRY, RID_TYPENAME, RID_TYPEID,
RID_NEW, RID_OFFSETOF, RID_OPERATOR,
RID_THIS, RID_THROW, RID_TRUE,
RID_TRY, RID_TYPENAME, RID_TYPEID,
RID_USING,
/* casts */
@ -94,7 +94,7 @@ enum rid
RID_AT_ENCODE, RID_AT_END,
RID_AT_CLASS, RID_AT_ALIAS, RID_AT_DEFS,
RID_AT_PRIVATE, RID_AT_PROTECTED, RID_AT_PUBLIC,
RID_AT_PROTOCOL, RID_AT_SELECTOR,
RID_AT_PROTOCOL, RID_AT_SELECTOR,
RID_AT_THROW, RID_AT_TRY, RID_AT_CATCH,
RID_AT_FINALLY, RID_AT_SYNCHRONIZED,
RID_AT_INTERFACE,
@ -156,7 +156,7 @@ enum c_tree_index
CTI_PRETTY_FUNCTION_NAME_DECL,
CTI_C99_FUNCTION_NAME_DECL,
CTI_SAVED_FUNCTION_NAME_DECLS,
CTI_VOID_ZERO,
CTI_NULL,
@ -304,8 +304,8 @@ extern tree build_indirect_ref (tree, const char *);
extern int c_expand_decl (tree);
extern int field_decl_cmp (const void *, const void *);
extern void resort_sorted_fields (void *, void *, gt_pointer_operator,
void *);
extern void resort_sorted_fields (void *, void *, gt_pointer_operator,
void *);
extern bool has_c_linkage (tree decl);
/* Switches common to the C front ends. */
@ -406,9 +406,9 @@ extern int flag_const_strings;
extern int flag_signed_bitfields;
/* Warn about #pragma directives that are not recognized. */
/* Warn about #pragma directives that are not recognized. */
extern int warn_unknown_pragmas; /* Tri state variable. */
extern int warn_unknown_pragmas; /* Tri state variable. */
/* Warn about format/argument anomalies in calls to formatted I/O functions
(*printf, *scanf, strftime, strfmon, etc.). */

View File

@ -231,7 +231,7 @@ builtin_define_float_constants (const char *name_prefix,
/* This is an IBM extended double format, so 1.0 + any double is
representable precisely. */
sprintf (buf, "0x1p%d", (fmt->emin - fmt->p) * fmt->log2_b);
else
else
sprintf (buf, "0x1p%d", (1 - fmt->p) * fmt->log2_b);
builtin_define_with_hex_fp_value (name, type, decimal_dig, buf, fp_suffix, fp_cast);
@ -351,7 +351,7 @@ define__GNUC__ (void)
builtin_define_with_value_n ("__GNUG__", q, v - q);
gcc_assert (*v == '.' && ISDIGIT (v[1]));
q = ++v;
while (ISDIGIT (*v))
v++;
@ -403,7 +403,7 @@ c_cpp_builtins (cpp_reader *pfile)
if (c_dialect_cxx ())
{
if (flag_weak && SUPPORTS_ONE_ONLY)
if (flag_weak && SUPPORTS_ONE_ONLY)
cpp_define (pfile, "__GXX_WEAK__=1");
else
cpp_define (pfile, "__GXX_WEAK__=0");
@ -420,7 +420,7 @@ c_cpp_builtins (cpp_reader *pfile)
if (flag_abi_version == 0)
/* Use a very large value so that:
#if __GXX_ABI_VERSION >= <value for version X>
#if __GXX_ABI_VERSION >= <value for version X>
will work whether the user explicitly says "-fabi-version=x" or
"-fabi-version=0". Do not use INT_MAX because that will be
@ -432,7 +432,7 @@ c_cpp_builtins (cpp_reader *pfile)
builtin_define_with_int_value ("__GXX_ABI_VERSION", 102);
else
/* Newer versions have values 1002, 1003, .... */
builtin_define_with_int_value ("__GXX_ABI_VERSION",
builtin_define_with_int_value ("__GXX_ABI_VERSION",
1000 + flag_abi_version);
/* libgcc needs to know this. */

View File

@ -770,8 +770,8 @@ pop_scope (void)
TREE_CHAIN (p) = BLOCK_VARS (block);
BLOCK_VARS (block) = p;
gcc_assert (I_LABEL_BINDING (b->id) == b);
I_LABEL_BINDING (b->id) = b->shadowed;
break;
I_LABEL_BINDING (b->id) = b->shadowed;
break;
case ENUMERAL_TYPE:
case UNION_TYPE:
@ -785,7 +785,7 @@ pop_scope (void)
gcc_assert (I_TAG_BINDING (b->id) == b);
I_TAG_BINDING (b->id) = b->shadowed;
}
break;
break;
case FUNCTION_DECL:
/* Propagate TREE_ADDRESSABLE from nested functions to their
@ -812,7 +812,7 @@ pop_scope (void)
&& DECL_NAME (p)
&& !DECL_ARTIFICIAL (p)
&& scope != file_scope
&& scope != external_scope)
&& scope != external_scope)
warning (OPT_Wunused_variable, "unused variable %q+D", p);
if (b->inner_comp)
@ -1484,7 +1484,7 @@ diagnose_mismatched_decls (tree newdecl, tree olddecl,
/* warnings */
/* All decls must agree on a visibility. */
if (CODE_CONTAINS_STRUCT (TREE_CODE (newdecl), TS_DECL_WITH_VIS)
if (CODE_CONTAINS_STRUCT (TREE_CODE (newdecl), TS_DECL_WITH_VIS)
&& DECL_VISIBILITY_SPECIFIED (newdecl) && DECL_VISIBILITY_SPECIFIED (olddecl)
&& DECL_VISIBILITY (newdecl) != DECL_VISIBILITY (olddecl))
{
@ -1704,7 +1704,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
are assigned. */
if (DECL_SECTION_NAME (newdecl) == NULL_TREE)
DECL_SECTION_NAME (newdecl) = DECL_SECTION_NAME (olddecl);
/* Copy the assembler name.
Currently, it can only be defined in the prototype. */
COPY_DECL_ASSEMBLER_NAME (olddecl, newdecl);
@ -1715,7 +1715,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
DECL_VISIBILITY (newdecl) = DECL_VISIBILITY (olddecl);
DECL_VISIBILITY_SPECIFIED (newdecl) = 1;
}
if (TREE_CODE (newdecl) == FUNCTION_DECL)
{
DECL_STATIC_CONSTRUCTOR(newdecl) |= DECL_STATIC_CONSTRUCTOR(olddecl);
@ -1729,7 +1729,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
DECL_IS_PURE (newdecl) |= DECL_IS_PURE (olddecl);
DECL_IS_NOVOPS (newdecl) |= DECL_IS_NOVOPS (olddecl);
}
/* Merge the storage class information. */
merge_weak (newdecl, olddecl);
@ -1745,7 +1745,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
TREE_PUBLIC (DECL_NAME (olddecl)) = 0;
}
}
if (DECL_EXTERNAL (newdecl))
{
TREE_STATIC (newdecl) = TREE_STATIC (olddecl);
@ -1764,8 +1764,8 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
TREE_STATIC (olddecl) = TREE_STATIC (newdecl);
TREE_PUBLIC (olddecl) = TREE_PUBLIC (newdecl);
}
if (TREE_CODE (newdecl) == FUNCTION_DECL)
if (TREE_CODE (newdecl) == FUNCTION_DECL)
{
/* If we're redefining a function previously defined as extern
inline, make sure we emit debug info for the inline before we
@ -1837,13 +1837,13 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
DECL_INLINE (newdecl) = 1;
}
}
/* Copy most of the decl-specific fields of NEWDECL into OLDDECL.
But preserve OLDDECL's DECL_UID and DECL_CONTEXT. */
{
unsigned olddecl_uid = DECL_UID (olddecl);
tree olddecl_context = DECL_CONTEXT (olddecl);
memcpy ((char *) olddecl + sizeof (struct tree_common),
(char *) newdecl + sizeof (struct tree_common),
sizeof (struct tree_decl_common) - sizeof (struct tree_common));
@ -1863,7 +1863,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
break;
default:
memcpy ((char *) olddecl + sizeof (struct tree_decl_common),
(char *) newdecl + sizeof (struct tree_decl_common),
sizeof (struct tree_decl_non_common) - sizeof (struct tree_decl_common));
@ -1972,7 +1972,7 @@ warn_if_shadowing (tree new_decl)
translation and get back the corresponding typedef name. For
example, given:
typedef struct S MY_TYPE;
typedef struct S MY_TYPE;
MY_TYPE object;
Later parts of the compiler might only know that `object' was of
@ -2456,7 +2456,7 @@ undeclared_variable (tree id, location_t loc)
}
/* If we are parsing old-style parameter decls, current_function_decl
will be nonnull but current_function_scope will be null. */
will be nonnull but current_function_scope will be null. */
scope = current_function_scope ? current_function_scope : current_scope;
}
bind (id, error_mark_node, scope, /*invisible=*/false, /*nested=*/false);
@ -2595,7 +2595,7 @@ define_label (location_t location, tree name)
if (!in_system_header && lookup_name (name))
warning (OPT_Wtraditional, "%Htraditional C lacks a separate namespace "
"for labels, identifier %qE conflicts", &location, name);
"for labels, identifier %qE conflicts", &location, name);
nlist_se = XOBNEW (&parser_obstack, struct c_label_list);
nlist_se->next = label_context_stack_se->labels_def;
@ -2670,7 +2670,7 @@ pending_xref_error (void)
{
if (pending_invalid_xref != 0)
error ("%H%qE defined as wrong kind of tag",
&pending_invalid_xref_location, pending_invalid_xref);
&pending_invalid_xref_location, pending_invalid_xref);
pending_invalid_xref = 0;
}
@ -3460,8 +3460,8 @@ finish_decl (tree decl, tree init, tree asmspec_tree)
maybe_apply_pragma_weak (decl);
/* If this is a variable definition, determine its ELF visibility. */
if (TREE_CODE (decl) == VAR_DECL
&& TREE_STATIC (decl)
if (TREE_CODE (decl) == VAR_DECL
&& TREE_STATIC (decl)
&& !DECL_EXTERNAL (decl))
c_determine_visibility (decl);
@ -3475,7 +3475,7 @@ finish_decl (tree decl, tree init, tree asmspec_tree)
if (c_dialect_objc ())
objc_check_decl (decl);
if (asmspec)
if (asmspec)
{
/* If this is not a static variable, issue a warning.
It doesn't make any sense to give an ASMSPEC for an
@ -3491,7 +3491,7 @@ finish_decl (tree decl, tree init, tree asmspec_tree)
else
set_user_assembler_name (decl, asmspec);
}
if (DECL_FILE_SCOPE_P (decl))
{
if (DECL_INITIAL (decl) == NULL_TREE
@ -3537,7 +3537,7 @@ finish_decl (tree decl, tree init, tree asmspec_tree)
add_stmt (build_stmt (DECL_EXPR, decl));
}
}
if (!DECL_FILE_SCOPE_P (decl))
{
@ -3821,7 +3821,7 @@ check_bitfield_type_and_width (tree *type, tree *width, const char *orig_name)
{
struct lang_type *lt = TYPE_LANG_SPECIFIC (*type);
if (!lt
|| w < min_precision (lt->enum_min, TYPE_UNSIGNED (*type))
|| w < min_precision (lt->enum_min, TYPE_UNSIGNED (*type))
|| w < min_precision (lt->enum_max, TYPE_UNSIGNED (*type)))
warning (0, "%qs is narrower than values of its type", name);
}
@ -4156,25 +4156,25 @@ grokdeclarator (const struct c_declarator *declarator,
declarator = declarator->declarator;
/* Check for some types that there cannot be arrays of. */
if (VOID_TYPE_P (type))
{
error ("declaration of %qs as array of voids", name);
type = error_mark_node;
}
if (TREE_CODE (type) == FUNCTION_TYPE)
{
error ("declaration of %qs as array of functions", name);
type = error_mark_node;
}
if (pedantic && !in_system_header && flexible_array_type_p (type))
pedwarn ("invalid use of structure with flexible array member");
if (size == error_mark_node)
type = error_mark_node;
if (type == error_mark_node)
continue;
@ -4187,16 +4187,16 @@ grokdeclarator (const struct c_declarator *declarator,
/* Strip NON_LVALUE_EXPRs since we aren't using as an
lvalue. */
STRIP_TYPE_NOPS (size);
if (!INTEGRAL_TYPE_P (TREE_TYPE (size)))
{
error ("size of array %qs has non-integer type", name);
size = integer_one_node;
}
if (pedantic && integer_zerop (size))
pedwarn ("ISO C forbids zero-size array %qs", name);
if (TREE_CODE (size) == INTEGER_CST)
{
constant_expression_warning (size);
@ -4212,7 +4212,7 @@ grokdeclarator (const struct c_declarator *declarator,
nonconstant even if it is (eg) a const variable
with known value. */
size_varies = 1;
if (!flag_isoc99 && pedantic)
{
if (TREE_CONSTANT (size))
@ -4227,10 +4227,10 @@ grokdeclarator (const struct c_declarator *declarator,
if (integer_zerop (size))
{
/* A zero-length array cannot be represented with
an unsigned index type, which is what we'll
get with build_index_type. Create an
open-ended range instead. */
/* A zero-length array cannot be represented with
an unsigned index type, which is what we'll
get with build_index_type. Create an
open-ended range instead. */
itype = build_range_type (sizetype, size, NULL_TREE);
}
else
@ -4250,7 +4250,7 @@ grokdeclarator (const struct c_declarator *declarator,
convert (index_type,
size_one_node));
/* If that overflowed, the array is too big. ???
/* If that overflowed, the array is too big. ???
While a size of INT_MAX+1 technically shouldn't
cause an overflow (because we subtract 1), the
overflow is recorded during the conversion to
@ -4264,7 +4264,7 @@ grokdeclarator (const struct c_declarator *declarator,
type = error_mark_node;
continue;
}
itype = build_index_type (itype);
}
}
@ -4296,7 +4296,7 @@ grokdeclarator (const struct c_declarator *declarator,
if (!COMPLETE_TYPE_P (type))
{
error ("array type has incomplete element type");
type = error_mark_node;
type = error_mark_node;
}
else
type = build_array_type (type, itype);
@ -4353,7 +4353,7 @@ grokdeclarator (const struct c_declarator *declarator,
type for the function to return. */
if (type == error_mark_node)
continue;
size_varies = 0;
/* Warn about some types functions can't return. */
@ -4377,7 +4377,7 @@ grokdeclarator (const struct c_declarator *declarator,
qualify the return type, not the function type. */
if (type_quals)
{
/* Type qualifiers on a function return type are
/* Type qualifiers on a function return type are
normally permitted by the standard but have no
effect, so give a warning at -Wreturn-type.
Qualifiers on a void return type are banned on
@ -4388,20 +4388,20 @@ grokdeclarator (const struct c_declarator *declarator,
else
warning (OPT_Wreturn_type,
"type qualifiers ignored on function return type");
type = c_build_qualified_type (type, type_quals);
}
type_quals = TYPE_UNQUALIFIED;
type = build_function_type (type, arg_types);
declarator = declarator->declarator;
/* Set the TYPE_CONTEXTs for each tagged type which is local to
the formal parameter list of this FUNCTION_TYPE to point to
the FUNCTION_TYPE node itself. */
{
tree link;
for (link = arg_info->tags;
link;
link = TREE_CHAIN (link))
@ -4422,7 +4422,7 @@ grokdeclarator (const struct c_declarator *declarator,
size_varies = 0;
type = build_pointer_type (type);
/* Process type qualifiers (such as const or volatile)
that were given inside the `*'. */
type_quals = declarator->u.pointer_quals;
@ -4608,7 +4608,7 @@ grokdeclarator (const struct c_declarator *declarator,
type = build_pointer_type (type);
}
else if (TREE_CODE (type) != ERROR_MARK
&& !COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (type))
&& !COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (type))
{
error ("field %qs has incomplete type", name);
type = error_mark_node;
@ -4640,8 +4640,8 @@ grokdeclarator (const struct c_declarator *declarator,
}
else if (storage_class == csc_static)
{
error ("invalid storage class for function %qs", name);
if (funcdef_flag)
error ("invalid storage class for function %qs", name);
if (funcdef_flag)
storage_class = declspecs->storage_class = csc_none;
else
return 0;
@ -4963,7 +4963,7 @@ get_parm_info (bool ellipsis)
(by 'const' or 'volatile'), or has a storage class specifier
('register'), then the behavior is undefined; issue an error.
Typedefs for 'void' are OK (see DR#157). */
if (b->prev == 0 /* one binding */
if (b->prev == 0 /* one binding */
&& TREE_CODE (b->decl) == PARM_DECL /* which is a parameter */
&& !DECL_NAME (b->decl) /* anonymous */
&& VOID_TYPE_P (TREE_TYPE (b->decl))) /* of void type */
@ -5188,17 +5188,17 @@ start_struct (enum tree_code code, tree name)
if (ref && TREE_CODE (ref) == code)
{
if (TYPE_SIZE (ref))
{
{
if (code == UNION_TYPE)
error ("redefinition of %<union %E%>", name);
else
else
error ("redefinition of %<struct %E%>", name);
}
else if (C_TYPE_BEING_DEFINED (ref))
{
if (code == UNION_TYPE)
error ("nested redefinition of %<union %E%>", name);
else
else
error ("nested redefinition of %<struct %E%>", name);
}
}
@ -5507,46 +5507,46 @@ finish_struct (tree t, tree fieldlist, tree attributes)
for (x = fieldlist; x; x = TREE_CHAIN (x))
{
if (len > 15 || DECL_NAME (x) == NULL)
break;
len += 1;
if (len > 15 || DECL_NAME (x) == NULL)
break;
len += 1;
}
if (len > 15)
{
tree *field_array;
struct lang_type *space;
struct sorted_fields_type *space2;
tree *field_array;
struct lang_type *space;
struct sorted_fields_type *space2;
len += list_length (x);
len += list_length (x);
/* Use the same allocation policy here that make_node uses, to
ensure that this lives as long as the rest of the struct decl.
All decls in an inline function need to be saved. */
/* Use the same allocation policy here that make_node uses, to
ensure that this lives as long as the rest of the struct decl.
All decls in an inline function need to be saved. */
space = GGC_CNEW (struct lang_type);
space2 = GGC_NEWVAR (struct sorted_fields_type,
space = GGC_CNEW (struct lang_type);
space2 = GGC_NEWVAR (struct sorted_fields_type,
sizeof (struct sorted_fields_type) + len * sizeof (tree));
len = 0;
len = 0;
space->s = space2;
field_array = &space2->elts[0];
for (x = fieldlist; x; x = TREE_CHAIN (x))
{
field_array[len++] = x;
for (x = fieldlist; x; x = TREE_CHAIN (x))
{
field_array[len++] = x;
/* If there is anonymous struct or union, break out of the loop. */
if (DECL_NAME (x) == NULL)
break;
}
/* Found no anonymous struct/union. Add the TYPE_LANG_SPECIFIC. */
if (x == NULL)
{
TYPE_LANG_SPECIFIC (t) = space;
TYPE_LANG_SPECIFIC (t)->s->len = len;
field_array = TYPE_LANG_SPECIFIC (t)->s->elts;
qsort (field_array, len, sizeof (tree), field_decl_cmp);
}
/* If there is anonymous struct or union, break out of the loop. */
if (DECL_NAME (x) == NULL)
break;
}
/* Found no anonymous struct/union. Add the TYPE_LANG_SPECIFIC. */
if (x == NULL)
{
TYPE_LANG_SPECIFIC (t) = space;
TYPE_LANG_SPECIFIC (t)->s->len = len;
field_array = TYPE_LANG_SPECIFIC (t)->s->elts;
qsort (field_array, len, sizeof (tree), field_decl_cmp);
}
}
}
@ -6100,7 +6100,7 @@ start_function (struct c_declspecs *declspecs, struct c_declarator *declarator,
|| (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (type)))
!= char_type_node))
pedwarn ("second argument of %q+D should be %<char **%>",
decl1);
decl1);
break;
case 3:
@ -6109,7 +6109,7 @@ start_function (struct c_declspecs *declspecs, struct c_declarator *declarator,
|| (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (type)))
!= char_type_node))
pedwarn ("third argument of %q+D should probably be "
"%<char **%>", decl1);
"%<char **%>", decl1);
break;
}
}
@ -6573,12 +6573,12 @@ finish_function (void)
{
tree args = DECL_ARGUMENTS (fndecl);
for (; args; args = TREE_CHAIN (args))
{
tree type = TREE_TYPE (args);
if (INTEGRAL_TYPE_P (type)
&& TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
DECL_ARG_TYPE (args) = integer_type_node;
}
{
tree type = TREE_TYPE (args);
if (INTEGRAL_TYPE_P (type)
&& TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
DECL_ARG_TYPE (args) = integer_type_node;
}
}
if (DECL_INITIAL (fndecl) && DECL_INITIAL (fndecl) != error_mark_node)
@ -6677,9 +6677,9 @@ finish_function (void)
&& !undef_nested_function)
{
if (!decl_function_context (fndecl))
{
c_genericize (fndecl);
c_gimple_diagnostics_recursively (fndecl);
{
c_genericize (fndecl);
c_gimple_diagnostics_recursively (fndecl);
/* ??? Objc emits functions after finalizing the compilation unit.
This should be cleaned up later and this conditional removed. */
@ -6690,14 +6690,14 @@ finish_function (void)
}
cgraph_finalize_function (fndecl, false);
}
}
else
{
/* Register this function with cgraph just far enough to get it
added to our parent's nested function list. Handy, since the
C front end doesn't have such a list. */
(void) cgraph_node (fndecl);
}
{
/* Register this function with cgraph just far enough to get it
added to our parent's nested function list. Handy, since the
C front end doesn't have such a list. */
(void) cgraph_node (fndecl);
}
}
if (!decl_function_context (fndecl))
@ -6725,11 +6725,11 @@ c_expand_body (tree fndecl)
if (DECL_STATIC_CONSTRUCTOR (fndecl)
&& targetm.have_ctors_dtors)
targetm.asm_out.constructor (XEXP (DECL_RTL (fndecl), 0),
DEFAULT_INIT_PRIORITY);
DEFAULT_INIT_PRIORITY);
if (DECL_STATIC_DESTRUCTOR (fndecl)
&& targetm.have_ctors_dtors)
targetm.asm_out.destructor (XEXP (DECL_RTL (fndecl), 0),
DEFAULT_INIT_PRIORITY);
DEFAULT_INIT_PRIORITY);
}
/* Check the declarations given in a for-loop for satisfying the C99
@ -7815,10 +7815,10 @@ c_write_global_declarations (void)
int flags;
FILE * stream = dump_begin (TDI_tu, &flags);
if (stream && tmp)
{
dump_node (tmp, flags & ~TDF_SLIM, stream);
dump_end (TDI_tu, stream);
}
{
dump_node (tmp, flags & ~TDF_SLIM, stream);
dump_end (TDI_tu, stream);
}
}
/* Process all file scopes in this compilation, and the external_scope,

View File

@ -36,10 +36,10 @@ pedwarn_c99 (const char *gmsgid, ...)
{
diagnostic_info diagnostic;
va_list ap;
va_start (ap, gmsgid);
diagnostic_set_info (&diagnostic, gmsgid, &ap, input_location,
flag_isoc99 ? pedantic_error_kind () : DK_WARNING);
flag_isoc99 ? pedantic_error_kind () : DK_WARNING);
report_diagnostic (&diagnostic);
va_end (ap);
}
@ -57,7 +57,7 @@ pedwarn_c90 (const char *gmsgid, ...)
va_start (ap, gmsgid);
diagnostic_set_info (&diagnostic, gmsgid, &ap, input_location,
flag_isoc99 ? DK_WARNING : pedantic_error_kind ());
flag_isoc99 ? DK_WARNING : pedantic_error_kind ());
report_diagnostic (&diagnostic);
va_end (ap);
}

View File

@ -670,20 +670,20 @@ static const format_char_info time_char_table[] =
{
/* C89 conversion specifiers. */
{ "ABZab", 0, STD_C89, NOLENGTHS, "^#", "", NULL },
{ "cx", 0, STD_C89, NOLENGTHS, "E", "3", NULL },
{ "cx", 0, STD_C89, NOLENGTHS, "E", "3", NULL },
{ "HIMSUWdmw", 0, STD_C89, NOLENGTHS, "-_0Ow", "", NULL },
{ "j", 0, STD_C89, NOLENGTHS, "-_0Ow", "o", NULL },
{ "p", 0, STD_C89, NOLENGTHS, "#", "", NULL },
{ "X", 0, STD_C89, NOLENGTHS, "E", "", NULL },
{ "y", 0, STD_C89, NOLENGTHS, "EO-_0w", "4", NULL },
{ "y", 0, STD_C89, NOLENGTHS, "EO-_0w", "4", NULL },
{ "Y", 0, STD_C89, NOLENGTHS, "-_0EOw", "o", NULL },
{ "%", 0, STD_C89, NOLENGTHS, "", "", NULL },
/* C99 conversion specifiers. */
{ "C", 0, STD_C99, NOLENGTHS, "-_0EOw", "o", NULL },
{ "D", 0, STD_C99, NOLENGTHS, "", "2", NULL },
{ "D", 0, STD_C99, NOLENGTHS, "", "2", NULL },
{ "eVu", 0, STD_C99, NOLENGTHS, "-_0Ow", "", NULL },
{ "FRTnrt", 0, STD_C99, NOLENGTHS, "", "", NULL },
{ "g", 0, STD_C99, NOLENGTHS, "O-_0w", "2o", NULL },
{ "g", 0, STD_C99, NOLENGTHS, "O-_0w", "2o", NULL },
{ "G", 0, STD_C99, NOLENGTHS, "-_0Ow", "o", NULL },
{ "h", 0, STD_C99, NOLENGTHS, "^#", "", NULL },
{ "z", 0, STD_C99, NOLENGTHS, "O", "o", NULL },
@ -702,49 +702,49 @@ static const format_char_info monetary_char_table[] =
/* This must be in the same order as enum format_type. */
static const format_kind_info format_types_orig[] =
{
{ "printf", printf_length_specs, print_char_table, " +#0-'I", NULL,
{ "printf", printf_length_specs, print_char_table, " +#0-'I", NULL,
printf_flag_specs, printf_flag_pairs,
FMT_FLAG_ARG_CONVERT|FMT_FLAG_DOLLAR_MULTIPLE|FMT_FLAG_USE_DOLLAR|FMT_FLAG_EMPTY_PREC_OK,
'w', 0, 'p', 0, 'L',
&integer_type_node, &integer_type_node
},
{ "asm_fprintf", asm_fprintf_length_specs, asm_fprintf_char_table, " +#0-", NULL,
{ "asm_fprintf", asm_fprintf_length_specs, asm_fprintf_char_table, " +#0-", NULL,
asm_fprintf_flag_specs, asm_fprintf_flag_pairs,
FMT_FLAG_ARG_CONVERT|FMT_FLAG_EMPTY_PREC_OK,
'w', 0, 'p', 0, 'L',
NULL, NULL
},
{ "gcc_diag", gcc_diag_length_specs, gcc_diag_char_table, "q+", NULL,
{ "gcc_diag", gcc_diag_length_specs, gcc_diag_char_table, "q+", NULL,
gcc_diag_flag_specs, gcc_diag_flag_pairs,
FMT_FLAG_ARG_CONVERT,
0, 0, 'p', 0, 'L',
NULL, &integer_type_node
},
{ "gcc_tdiag", gcc_tdiag_length_specs, gcc_tdiag_char_table, "q+", NULL,
{ "gcc_tdiag", gcc_tdiag_length_specs, gcc_tdiag_char_table, "q+", NULL,
gcc_tdiag_flag_specs, gcc_tdiag_flag_pairs,
FMT_FLAG_ARG_CONVERT,
0, 0, 'p', 0, 'L',
NULL, &integer_type_node
},
{ "gcc_cdiag", gcc_cdiag_length_specs, gcc_cdiag_char_table, "q+", NULL,
{ "gcc_cdiag", gcc_cdiag_length_specs, gcc_cdiag_char_table, "q+", NULL,
gcc_cdiag_flag_specs, gcc_cdiag_flag_pairs,
FMT_FLAG_ARG_CONVERT,
0, 0, 'p', 0, 'L',
NULL, &integer_type_node
},
{ "gcc_cxxdiag", gcc_cxxdiag_length_specs, gcc_cxxdiag_char_table, "q+#", NULL,
{ "gcc_cxxdiag", gcc_cxxdiag_length_specs, gcc_cxxdiag_char_table, "q+#", NULL,
gcc_cxxdiag_flag_specs, gcc_cxxdiag_flag_pairs,
FMT_FLAG_ARG_CONVERT,
0, 0, 'p', 0, 'L',
NULL, &integer_type_node
},
{ "gcc_gfc", NULL, gcc_gfc_char_table, "", NULL,
{ "gcc_gfc", NULL, gcc_gfc_char_table, "", NULL,
NULL, gcc_gfc_flag_pairs,
FMT_FLAG_ARG_CONVERT,
0, 0, 0, 0, 0,
NULL, NULL
},
{ "scanf", scanf_length_specs, scan_char_table, "*'I", NULL,
{ "scanf", scanf_length_specs, scan_char_table, "*'I", NULL,
scanf_flag_specs, scanf_flag_pairs,
FMT_FLAG_ARG_CONVERT|FMT_FLAG_SCANF_A_KLUDGE|FMT_FLAG_USE_DOLLAR|FMT_FLAG_ZERO_WIDTH_BAD|FMT_FLAG_DOLLAR_GAP_POINTER_OK,
'w', 0, 0, '*', 'L',
@ -755,7 +755,7 @@ static const format_kind_info format_types_orig[] =
FMT_FLAG_FANCY_PERCENT_OK, 'w', 0, 0, 0, 0,
NULL, NULL
},
{ "strfmon", strfmon_length_specs, monetary_char_table, "=^+(!-", NULL,
{ "strfmon", strfmon_length_specs, monetary_char_table, "=^+(!-", NULL,
strfmon_flag_specs, strfmon_flag_pairs,
FMT_FLAG_ARG_CONVERT, 'w', '#', 'p', 0, 'L',
NULL, NULL
@ -1366,7 +1366,7 @@ check_format_arg (void *ctx, tree format_tree,
{
/* Variable length arrays can't be initialized. */
gcc_assert (TREE_CODE (array_size) == INTEGER_CST);
if (host_integerp (array_size, 0))
{
HOST_WIDE_INT array_size_value = TREE_INT_CST_LOW (array_size);
@ -1820,7 +1820,7 @@ check_format_info_main (format_check_results *res,
++fci;
if (fci->format_chars == 0)
{
if (ISGRAPH (format_char))
if (ISGRAPH (format_char))
warning (OPT_Wformat, "unknown conversion type character %qc in format",
format_char);
else
@ -2337,7 +2337,7 @@ find_char_info_specifier_index (const format_char_info *fci, int c)
for (i = 0; fci->format_chars; i++, fci++)
if (strchr (fci->format_chars, c))
return i;
/* We shouldn't be looking for a non-existent specifier. */
gcc_unreachable ();
}
@ -2353,7 +2353,7 @@ find_length_info_modifier_index (const format_length_info *fli, int c)
for (i = 0; fli->name; i++, fli++)
if (strchr (fli->name, c))
return i;
/* We shouldn't be looking for a non-existent modifier. */
gcc_unreachable ();
}
@ -2370,7 +2370,7 @@ init_dynamic_asm_fprintf_info (void)
{
format_length_info *new_asm_fprintf_length_specs;
unsigned int i;
/* Find the underlying type for HOST_WIDE_INT. For the %w
length modifier to work, one must have issued: "typedef
HOST_WIDE_INT __gcc_host_wide_int__;" in one's source code
@ -2424,7 +2424,7 @@ static void
init_dynamic_gfc_info (void)
{
static tree locus;
if (!locus)
{
static format_char_info *gfc_fci;
@ -2458,7 +2458,7 @@ init_dynamic_gfc_info (void)
sizeof (gcc_gfc_char_table),
sizeof (gcc_gfc_char_table));
if (locus)
{
{
const unsigned i = find_char_info_specifier_index (gfc_fci, 'L');
gfc_fci[i].types[0].type = &locus;
gfc_fci[i].pointer_count = 1;
@ -2522,7 +2522,7 @@ init_dynamic_diag_info (void)
t = TREE_TYPE (TREE_TYPE (t));
}
}
/* Find the underlying type for HOST_WIDE_INT. For the %w
length modifier to work, one must have issued: "typedef
HOST_WIDE_INT __gcc_host_wide_int__;" in one's source code
@ -2551,7 +2551,7 @@ init_dynamic_diag_info (void)
}
}
}
/* Assign the new data for use. */
/* All the GCC diag formats use the same length specs. */
@ -2563,9 +2563,9 @@ init_dynamic_diag_info (void)
diag_ls = (format_length_info *)
xmemdup (gcc_diag_length_specs,
sizeof (gcc_diag_length_specs),
sizeof (gcc_diag_length_specs));
sizeof (gcc_diag_length_specs));
if (hwi)
{
{
/* HOST_WIDE_INT must be one of 'long' or 'long long'. */
i = find_length_info_modifier_index (diag_ls, 'w');
if (hwi == long_integer_type_node)
@ -2584,13 +2584,13 @@ init_dynamic_diag_info (void)
sizeof (gcc_diag_char_table),
sizeof (gcc_diag_char_table));
if (loc)
{
{
i = find_char_info_specifier_index (diag_fci, 'H');
diag_fci[i].types[0].type = &loc;
diag_fci[i].pointer_count = 1;
}
if (t)
{
{
i = find_char_info_specifier_index (diag_fci, 'J');
diag_fci[i].types[0].type = &t;
diag_fci[i].pointer_count = 1;
@ -2604,13 +2604,13 @@ init_dynamic_diag_info (void)
sizeof (gcc_tdiag_char_table),
sizeof (gcc_tdiag_char_table));
if (loc)
{
{
i = find_char_info_specifier_index (tdiag_fci, 'H');
tdiag_fci[i].types[0].type = &loc;
tdiag_fci[i].pointer_count = 1;
}
if (t)
{
{
/* All specifiers taking a tree share the same struct. */
i = find_char_info_specifier_index (tdiag_fci, 'D');
tdiag_fci[i].types[0].type = &t;
@ -2628,13 +2628,13 @@ init_dynamic_diag_info (void)
sizeof (gcc_cdiag_char_table),
sizeof (gcc_cdiag_char_table));
if (loc)
{
{
i = find_char_info_specifier_index (cdiag_fci, 'H');
cdiag_fci[i].types[0].type = &loc;
cdiag_fci[i].pointer_count = 1;
}
if (t)
{
{
/* All specifiers taking a tree share the same struct. */
i = find_char_info_specifier_index (cdiag_fci, 'D');
cdiag_fci[i].types[0].type = &t;
@ -2652,13 +2652,13 @@ init_dynamic_diag_info (void)
sizeof (gcc_cxxdiag_char_table),
sizeof (gcc_cxxdiag_char_table));
if (loc)
{
{
i = find_char_info_specifier_index (cxxdiag_fci, 'H');
cxxdiag_fci[i].types[0].type = &loc;
cxxdiag_fci[i].pointer_count = 1;
}
if (t)
{
{
/* All specifiers taking a tree share the same struct. */
i = find_char_info_specifier_index (cxxdiag_fci, 'D');
cxxdiag_fci[i].types[0].type = &t;
@ -2750,14 +2750,14 @@ handle_format_attribute (tree *node, tree ARG_UNUSED (name), tree args,
|| info.format_type == gcc_cxxdiag_format_type)
{
/* Our first time through, we have to make sure that our
format_type data is allocated dynamically and is modifiable. */
format_type data is allocated dynamically and is modifiable. */
if (!dynamic_format_types)
format_types = dynamic_format_types = (format_kind_info *)
xmemdup (format_types_orig, sizeof (format_types_orig),
sizeof (format_types_orig));
/* If this is format __asm_fprintf__, we have to initialize
GCC's notion of HOST_WIDE_INT for checking %wd. */
GCC's notion of HOST_WIDE_INT for checking %wd. */
if (info.format_type == asm_fprintf_format_type)
init_dynamic_asm_fprintf_info ();
/* If this is format __gcc_gfc__, we have to initialize GCC's
@ -2765,7 +2765,7 @@ handle_format_attribute (tree *node, tree ARG_UNUSED (name), tree args,
else if (info.format_type == gcc_gfc_format_type)
init_dynamic_gfc_info ();
/* If this is one of the diagnostic attributes, then we have to
initialize 'location_t' and 'tree' at runtime. */
initialize 'location_t' and 'tree' at runtime. */
else if (info.format_type == gcc_diag_format_type
|| info.format_type == gcc_tdiag_format_type
|| info.format_type == gcc_cdiag_format_type

View File

@ -216,7 +216,7 @@ c_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
&& !warn_init_self)
TREE_NO_WARNING (DECL_EXPR_DECL (*expr_p)) = 1;
return GS_UNHANDLED;
case COMPOUND_LITERAL_EXPR:
return gimplify_compound_literal_expr (expr_p, pre_p);

View File

@ -220,8 +220,8 @@ remove_duplicates (cpp_reader *pfile, struct cpp_dir *head,
/* Remove this one if it is in the system chain. */
reason = REASON_DUP_SYS;
for (tmp = system; tmp; tmp = tmp->next)
if (INO_T_EQ (tmp->ino, cur->ino) && tmp->dev == cur->dev
&& cur->construct == tmp->construct)
if (INO_T_EQ (tmp->ino, cur->ino) && tmp->dev == cur->dev
&& cur->construct == tmp->construct)
break;
if (!tmp)
@ -229,16 +229,16 @@ remove_duplicates (cpp_reader *pfile, struct cpp_dir *head,
/* Duplicate of something earlier in the same chain? */
reason = REASON_DUP;
for (tmp = head; tmp != cur; tmp = tmp->next)
if (INO_T_EQ (cur->ino, tmp->ino) && cur->dev == tmp->dev
&& cur->construct == tmp->construct)
if (INO_T_EQ (cur->ino, tmp->ino) && cur->dev == tmp->dev
&& cur->construct == tmp->construct)
break;
if (tmp == cur
/* Last in the chain and duplicate of JOIN? */
&& !(cur->next == NULL && join
&& INO_T_EQ (cur->ino, join->ino)
&& cur->dev == join->dev
&& cur->construct == join->construct))
&& cur->dev == join->dev
&& cur->construct == join->construct))
{
/* Unique, so keep this directory. */
pcur = &cur->next;
@ -384,7 +384,7 @@ register_include_chains (cpp_reader *pfile, const char *sysroot,
include chain. */
add_env_var_paths ("CPATH", BRACKET);
add_env_var_paths (lang_env_vars[idx], SYSTEM);
target_c_incpath.extra_pre_includes (sysroot, iprefix, stdinc);
/* Finally chain on the standard directories. */

View File

@ -102,7 +102,7 @@ init_c_lex (void)
/* Set the debug callbacks if we can use them. */
if (debug_info_level == DINFO_LEVEL_VERBOSE
&& (write_symbols == DWARF2_DEBUG
|| write_symbols == VMS_AND_DWARF2_DEBUG))
|| write_symbols == VMS_AND_DWARF2_DEBUG))
{
cb->define = cb_define;
cb->undef = cb_undef;
@ -223,12 +223,12 @@ fe_file_change (const struct line_map *new_map)
if (!MAIN_FILE_P (new_map))
{
#ifdef USE_MAPPED_LOCATION
int included_at = LAST_SOURCE_LINE_LOCATION (new_map - 1);
int included_at = LAST_SOURCE_LINE_LOCATION (new_map - 1);
input_location = included_at;
push_srcloc (new_map->start_location);
#else
int included_at = LAST_SOURCE_LINE (new_map - 1);
int included_at = LAST_SOURCE_LINE (new_map - 1);
input_line = included_at;
push_srcloc (new_map->to_file, 1);
@ -339,7 +339,7 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
retry:
tok = cpp_get_token (parse_in);
type = tok->type;
retry_after_at:
#ifdef USE_MAPPED_LOCATION
*loc = tok->src_loc;
@ -350,7 +350,7 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
{
case CPP_PADDING:
goto retry;
case CPP_NAME:
*value = HT_IDENT_TO_GCC_IDENT (HT_NODE (tok->val.node));
break;
@ -389,7 +389,7 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
if (c_dialect_objc ())
{
location_t atloc = input_location;
retry_at:
tok = cpp_get_token (parse_in);
type = tok->type;
@ -397,7 +397,7 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
{
case CPP_PADDING:
goto retry_at;
case CPP_STRING:
case CPP_WSTRING:
type = lex_string (tok, value, true);
@ -425,12 +425,12 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
case CPP_PASTE:
{
unsigned char name[4];
*cpp_spell_token (parse_in, tok, name, true) = 0;
error ("stray %qs in program", name);
}
goto retry;
case CPP_OTHER:
@ -484,9 +484,9 @@ c_lex_with_flags (tree *value, location_t *loc, unsigned char *cpp_flags)
no_more_pch = true;
c_common_no_more_pch ();
}
timevar_pop (TV_CPP);
return type;
}
@ -539,7 +539,7 @@ narrowest_signed_type (unsigned HOST_WIDE_INT low,
for (; itk < itk_none; itk += 2 /* skip signed types */)
{
tree upper = TYPE_MAX_VALUE (integer_types[itk]);
if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (upper) > high
|| ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (upper) == high
&& TREE_INT_CST_LOW (upper) >= low))
@ -740,21 +740,21 @@ lex_string (const cpp_token *tok, tree *valp, bool objc_string)
goto retry;
}
/* FALLTHROUGH */
default:
break;
case CPP_WSTRING:
wide = true;
/* FALLTHROUGH */
case CPP_STRING:
if (!concats)
{
gcc_obstack_init (&str_ob);
obstack_grow (&str_ob, &str, sizeof (cpp_string));
}
concats++;
obstack_grow (&str_ob, &tok->val.str, sizeof (cpp_string));
goto retry;
@ -784,7 +784,7 @@ lex_string (const cpp_token *tok, tree *valp, bool objc_string)
/* Assume that, if we managed to translate the string above,
then the untranslated parsing will always succeed. */
gcc_assert (xlated);
if (TREE_STRING_LENGTH (value) != (int) istr.len
|| 0 != strncmp (TREE_STRING_POINTER (value), (char *) istr.text,
istr.len))

View File

@ -198,7 +198,7 @@ c_tree_printer (pretty_printer *pp, text_info *text, const char *spec,
case 'T':
gcc_assert (TYPE_P (t));
name = TYPE_NAME (t);
if (name && TREE_CODE (name) == TYPE_DECL)
{
if (DECL_NAME (name))

View File

@ -741,7 +741,7 @@ c_common_handle_option (size_t scode, const char *arg, int value)
case OPT_freplace_objc_classes:
flag_replace_objc_classes = value;
break;
case OPT_frepo:
flag_use_repository = value;
if (value)
@ -789,7 +789,7 @@ c_common_handle_option (size_t scode, const char *arg, int value)
case OPT_fuse_cxa_get_exception_ptr:
flag_use_cxa_get_exception_ptr = value;
break;
case OPT_fvisibility_inlines_hidden:
visibility_options.inlines_hidden = value;
break;
@ -1173,7 +1173,7 @@ c_common_parse_file (int set_yydebug)
this_input_filename
= cpp_read_main_file (parse_in, in_fnames[i]);
/* If an input file is missing, abandon further compilation.
cpplib has issued a diagnostic. */
cpplib has issued a diagnostic. */
if (!this_input_filename)
break;
}

View File

@ -2229,7 +2229,7 @@ c_parser_typeof_specifier (c_parser *parser)
parameter-type-list[opt] )
direct-abstract-declarator:
direct-abstract-declarator[opt] ( parameter-forward-declarations
direct-abstract-declarator[opt] ( parameter-forward-declarations
parameter-type-list[opt] )
parameter-forward-declarations:

View File

@ -39,7 +39,7 @@ Boston, MA 02110-1301, USA. */
names for the error message. The possible values for *flag_var must
fit in a 'signed char'. */
static const struct c_pch_matching
static const struct c_pch_matching
{
int *flag_var;
const char *flag_name;
@ -69,7 +69,7 @@ struct c_pch_validity
size_t target_data_length;
};
struct c_pch_header
struct c_pch_header
{
unsigned long asm_size;
};
@ -95,7 +95,7 @@ get_ident (void)
static char result[IDENT_LENGTH];
static const char template[IDENT_LENGTH] = "gpch.013";
static const char c_language_chars[] = "Co+O";
memcpy (result, template, IDENT_LENGTH);
result[4] = c_language_chars[c_language];
@ -103,7 +103,7 @@ get_ident (void)
}
/* Prepare to write a PCH file, if one is being written. This is
called at the start of compilation.
called at the start of compilation.
Also, print out the executable checksum if -fverbose-asm is in effect. */
@ -114,7 +114,7 @@ pch_init (void)
struct c_pch_validity v;
void *target_validity;
static const char partial_pch[IDENT_LENGTH] = "gpcWrite";
#ifdef ASM_COMMENT_START
if (flag_verbose_asm)
{
@ -123,17 +123,17 @@ pch_init (void)
fputc ('\n', asm_out_file);
}
#endif
if (!pch_file)
return;
f = fopen (pch_file, "w+b");
if (f == NULL)
fatal_error ("can%'t create precompiled header %s: %m", pch_file);
pch_outfile = f;
gcc_assert (memcmp (executable_checksum, no_checksum, 16) != 0);
v.debug_info_type = write_symbols;
{
size_t i;
@ -145,7 +145,7 @@ pch_init (void)
}
v.pch_init = &pch_init;
target_validity = targetm.get_pch_validity (&v.target_data_length);
if (fwrite (partial_pch, IDENT_LENGTH, 1, f) != 1
|| fwrite (executable_checksum, 16, 1, f) != 1
|| fwrite (&v, sizeof (v), 1, f) != 1
@ -157,12 +157,12 @@ pch_init (void)
if (asm_file_name == NULL
|| strcmp (asm_file_name, "-") == 0)
fatal_error ("%qs is not a valid output file", asm_file_name);
asm_file_startpos = ftell (asm_out_file);
/* Let the debugging format deal with the PCHness. */
(*debug_hooks->handle_pch) (0);
cpp_save_state (parse_in, f);
}
@ -183,10 +183,10 @@ c_common_write_pch (void)
asm_file_end = ftell (asm_out_file);
h.asm_size = asm_file_end - asm_file_startpos;
if (fwrite (&h, sizeof (h), 1, pch_outfile) != 1)
fatal_error ("can%'t write %s: %m", pch_file);
buf = XNEWVEC (char, 16384);
if (fseek (asm_out_file, asm_file_startpos, SEEK_SET) != 0)
@ -247,7 +247,7 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
name);
return 2;
}
pch_ident = get_ident();
if (memcmp (ident, pch_ident, IDENT_LENGTH) != 0)
{
@ -256,13 +256,13 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
if (memcmp (ident, pch_ident, 5) == 0)
/* It's a PCH, for the right language, but has the wrong version.
*/
cpp_error (pfile, CPP_DL_WARNING,
cpp_error (pfile, CPP_DL_WARNING,
"%s: not compatible with this GCC version", name);
else if (memcmp (ident, pch_ident, 4) == 0)
/* It's a PCH for the wrong language. */
cpp_error (pfile, CPP_DL_WARNING, "%s: not for %s", name,
lang_hooks.name);
else
else
/* Not any kind of PCH. */
cpp_error (pfile, CPP_DL_WARNING, "%s: not a PCH file", name);
}
@ -289,7 +289,7 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
&& write_symbols != NO_DEBUG)
{
if (cpp_get_options (pfile)->warn_invalid_pch)
cpp_error (pfile, CPP_DL_WARNING,
cpp_error (pfile, CPP_DL_WARNING,
"%s: created with -g%s, but used with -g%s", name,
debug_type_names[v.debug_info_type],
debug_type_names[write_symbols]);
@ -303,7 +303,7 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
if (*pch_matching[i].flag_var != v.match[i])
{
if (cpp_get_options (pfile)->warn_invalid_pch)
cpp_error (pfile, CPP_DL_WARNING,
cpp_error (pfile, CPP_DL_WARNING,
"%s: settings for %s do not match", name,
pch_matching[i].flag_name);
return 2;
@ -313,13 +313,13 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
/* If the text segment was not loaded at the same address as it was
when the PCH file was created, function pointers loaded from the
PCH will not be valid. We could in theory remap all the function
pointers, but no support for that exists at present.
pointers, but no support for that exists at present.
Since we have the same executable, it should only be necessary to
check one function. */
if (v.pch_init != &pch_init)
{
if (cpp_get_options (pfile)->warn_invalid_pch)
cpp_error (pfile, CPP_DL_WARNING,
cpp_error (pfile, CPP_DL_WARNING,
"%s: had text segment at different address", name);
return 2;
}
@ -328,7 +328,7 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
{
void *this_file_data = xmalloc (v.target_data_length);
const char *msg;
if ((size_t) read (fd, this_file_data, v.target_data_length)
!= v.target_data_length)
fatal_error ("can%'t read %s: %m", name);
@ -344,7 +344,7 @@ c_common_valid_pch (cpp_reader *pfile, const char *name, int fd)
/* Check the preprocessor macros are the same as when the PCH was
generated. */
result = cpp_valid_state (pfile, name, fd);
if (result == -1)
return 2;
@ -366,7 +366,7 @@ c_common_read_pch (cpp_reader *pfile, const char *name,
FILE *f;
struct c_pch_header h;
struct save_macro_data *smd;
f = fdopen (fd, "rb");
if (f == NULL)
{
@ -415,7 +415,7 @@ c_common_read_pch (cpp_reader *pfile, const char *name,
return;
fclose (f);
/* Give the front end a chance to take action after a PCH file has
been loaded. */
if (lang_post_pch_load)
@ -455,16 +455,16 @@ c_common_pch_pragma (cpp_reader *pfile, const char *name)
fd = open (name, O_RDONLY | O_BINARY, 0666);
if (fd == -1)
fatal_error ("%s: couldn%'t open PCH file: %m", name);
if (c_common_valid_pch (pfile, name, fd) != 1)
{
if (!cpp_get_options (pfile)->warn_invalid_pch)
inform ("use -Winvalid-pch for more information");
fatal_error ("%s: PCH file was invalid", name);
}
c_common_read_pch (pfile, name, fd, name);
close (fd);
}

View File

@ -451,7 +451,7 @@ cb_read_pch (cpp_reader *pfile, const char *name,
int fd, const char *orig_name ATTRIBUTE_UNUSED)
{
c_common_read_pch (pfile, name, fd, orig_name);
fprintf (print.outf, "#pragma GCC pch_preprocess \"%s\"\n", name);
print.src_line++;
}

View File

@ -46,8 +46,8 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
typedef struct align_stack GTY(())
{
int alignment;
tree id;
int alignment;
tree id;
struct align_stack * prev;
} align_stack;
@ -58,8 +58,8 @@ static void handle_pragma_pack (cpp_reader *);
#ifdef HANDLE_PRAGMA_PACK_PUSH_POP
/* If we have a "global" #pragma pack(<n>) in effect when the first
#pragma pack(push,<n>) is encountered, this stores the value of
maximum_field_alignment in effect. When the final pop_alignment()
#pragma pack(push,<n>) is encountered, this stores the value of
maximum_field_alignment in effect. When the final pop_alignment()
happens, we restore the value to this, not to a value of 0 for
maximum_field_alignment. Value is in bits. */
static int default_alignment;
@ -79,15 +79,15 @@ push_alignment (int alignment, tree id)
entry = GGC_NEW (align_stack);
entry->alignment = alignment;
entry->id = id;
entry->prev = alignment_stack;
/* The current value of maximum_field_alignment is not necessarily
0 since there may be a #pragma pack(<n>) in effect; remember it
entry->id = id;
entry->prev = alignment_stack;
/* The current value of maximum_field_alignment is not necessarily
0 since there may be a #pragma pack(<n>) in effect; remember it
so that we can restore it after the final #pragma pop(). */
if (alignment_stack == NULL)
default_alignment = maximum_field_alignment;
alignment_stack = entry;
maximum_field_alignment = alignment;
@ -98,7 +98,7 @@ static void
pop_alignment (tree id)
{
align_stack * entry;
if (alignment_stack == NULL)
GCC_BAD ("#pragma pack (pop) encountered without matching #pragma pack (push)");
@ -134,7 +134,7 @@ pop_alignment (tree id)
/* #pragma pack ()
#pragma pack (N)
#pragma pack (push)
#pragma pack (push, N)
#pragma pack (push, ID)
@ -236,7 +236,7 @@ handle_pragma_pack (cpp_reader * ARG_UNUSED (dummy))
{
case set: SET_GLOBAL_ALIGNMENT (align); break;
case push: push_alignment (align, id); break;
case pop: pop_alignment (id); break;
case pop: pop_alignment (id); break;
}
}
#endif /* HANDLE_PRAGMA_PACK */
@ -263,7 +263,7 @@ apply_pragma_weak (tree decl, tree value)
&& !DECL_WEAK (decl) /* Don't complain about a redundant #pragma. */
&& TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
warning (OPT_Wpragmas, "applying #pragma weak %q+D after first use "
"results in unspecified behavior", decl);
"results in unspecified behavior", decl);
declare_weak (decl);
}
@ -468,7 +468,7 @@ add_to_renaming_pragma_list (tree oldname, tree newname)
"conflict with previous #pragma redefine_extname");
return;
}
pending_redefine_extname
= tree_cons (oldname, newname, pending_redefine_extname);
}
@ -545,7 +545,7 @@ maybe_apply_renaming_pragma (tree decl, tree asmname)
*p = TREE_CHAIN (t);
/* If we already have an asmname, #pragma redefine_extname is
ignored (with a warning if it conflicts). */
ignored (with a warning if it conflicts). */
if (asmname)
{
if (strcmp (TREE_STRING_POINTER (asmname),
@ -573,7 +573,7 @@ maybe_apply_renaming_pragma (tree decl, tree asmname)
const char *id = IDENTIFIER_POINTER (DECL_NAME (decl));
size_t ilen = IDENTIFIER_LENGTH (DECL_NAME (decl));
char *newname = (char *) alloca (plen + ilen + 1);
memcpy (newname, prefix, plen);
@ -608,7 +608,7 @@ push_visibility (const char *str)
else if (!strcmp (str, "internal"))
default_visibility = VISIBILITY_INTERNAL;
else if (!strcmp (str, "hidden"))
default_visibility = VISIBILITY_HIDDEN;
default_visibility = VISIBILITY_HIDDEN;
else if (!strcmp (str, "protected"))
default_visibility = VISIBILITY_PROTECTED;
else
@ -624,7 +624,7 @@ pop_visibility (void)
default_visibility = VEC_pop (visibility, visstack);
visibility_options.inpragma
= VEC_length (visibility, visstack) != 0;
}
}
/* Sets the default visibility for symbols to something other than that
specified on the command line. */
@ -636,39 +636,39 @@ handle_pragma_visibility (cpp_reader *dummy ATTRIBUTE_UNUSED)
tree x;
enum cpp_ttype token;
enum { bad, push, pop } action = bad;
token = pragma_lex (&x);
if (token == CPP_NAME)
{
const char *op = IDENTIFIER_POINTER (x);
if (!strcmp (op, "push"))
action = push;
action = push;
else if (!strcmp (op, "pop"))
action = pop;
action = pop;
}
if (bad == action)
GCC_BAD ("#pragma GCC visibility must be followed by push or pop");
else
{
if (pop == action)
{
if (!VEC_length (visibility, visstack))
{
if (!VEC_length (visibility, visstack))
GCC_BAD ("no matching push for %<#pragma GCC visibility pop%>");
else
else
pop_visibility ();
}
}
else
{
if (pragma_lex (&x) != CPP_OPEN_PAREN)
GCC_BAD ("missing %<(%> after %<#pragma GCC visibility push%> - ignored");
token = pragma_lex (&x);
if (token != CPP_NAME)
{
if (pragma_lex (&x) != CPP_OPEN_PAREN)
GCC_BAD ("missing %<(%> after %<#pragma GCC visibility push%> - ignored");
token = pragma_lex (&x);
if (token != CPP_NAME)
GCC_BAD ("malformed #pragma GCC visibility push");
else
else
push_visibility (IDENTIFIER_POINTER (x));
if (pragma_lex (&x) != CPP_CLOSE_PAREN)
GCC_BAD ("missing %<(%> after %<#pragma GCC visibility push%> - ignored");
}
if (pragma_lex (&x) != CPP_CLOSE_PAREN)
GCC_BAD ("missing %<(%> after %<#pragma GCC visibility push%> - ignored");
}
}
if (pragma_lex (&x) != CPP_EOF)
warning (OPT_Wpragmas, "junk at end of %<#pragma GCC visibility%>");

View File

@ -99,7 +99,7 @@ extern enum cpp_ttype pragma_lex (tree *);
having enum cpp_ttype declared. */
extern enum cpp_ttype c_lex_with_flags (tree *, location_t *, unsigned char *);
/* If 1, then lex strings into the execution character set.
/* If 1, then lex strings into the execution character set.
If 0, lex strings into the host character set.
If -1, lex both, and chain them together, such that the former
is the TREE_CHAIN of the latter. */

View File

@ -199,8 +199,8 @@ pp_c_space_for_pointer_operator (c_pretty_printer *pp, tree t)
{
tree pointee = strip_pointer_operator (TREE_TYPE (t));
if (TREE_CODE (pointee) != ARRAY_TYPE
&& TREE_CODE (pointee) != FUNCTION_TYPE)
pp_c_whitespace (pp);
&& TREE_CODE (pointee) != FUNCTION_TYPE)
pp_c_whitespace (pp);
}
}
@ -253,11 +253,11 @@ pp_c_pointer (c_pretty_printer *pp, tree t)
/* It is easier to handle C++ reference types here. */
case REFERENCE_TYPE:
if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
pp_c_pointer (pp, TREE_TYPE (t));
pp_c_pointer (pp, TREE_TYPE (t));
if (TREE_CODE (t) == POINTER_TYPE)
pp_c_star (pp);
pp_c_star (pp);
else
pp_c_ampersand (pp);
pp_c_ampersand (pp);
pp_c_type_qualifier_list (pp, t);
break;
@ -406,18 +406,18 @@ pp_c_specifier_qualifier_list (c_pretty_printer *pp, tree t)
case REFERENCE_TYPE:
case POINTER_TYPE:
{
/* Get the types-specifier of this type. */
tree pointee = strip_pointer_operator (TREE_TYPE (t));
pp_c_specifier_qualifier_list (pp, pointee);
if (TREE_CODE (pointee) == ARRAY_TYPE
|| TREE_CODE (pointee) == FUNCTION_TYPE)
{
pp_c_whitespace (pp);
pp_c_left_paren (pp);
}
/* Get the types-specifier of this type. */
tree pointee = strip_pointer_operator (TREE_TYPE (t));
pp_c_specifier_qualifier_list (pp, pointee);
if (TREE_CODE (pointee) == ARRAY_TYPE
|| TREE_CODE (pointee) == FUNCTION_TYPE)
{
pp_c_whitespace (pp);
pp_c_left_paren (pp);
}
else if (!c_dialect_cxx ())
pp_c_whitespace (pp);
pp_ptr_operator (pp, t);
pp_ptr_operator (pp, t);
}
break;
@ -430,9 +430,9 @@ pp_c_specifier_qualifier_list (c_pretty_printer *pp, tree t)
case COMPLEX_TYPE:
pp_c_specifier_qualifier_list (pp, TREE_TYPE (t));
if (code == COMPLEX_TYPE)
pp_c_identifier (pp, flag_isoc99 ? "_Complex" : "__complex__");
pp_c_identifier (pp, flag_isoc99 ? "_Complex" : "__complex__");
else if (code == VECTOR_TYPE)
pp_c_identifier (pp, "__vector__");
pp_c_identifier (pp, "__vector__");
break;
default:
@ -465,17 +465,17 @@ pp_c_parameter_type_list (c_pretty_printer *pp, tree t)
{
bool first = true;
for ( ; parms && parms != void_list_node; parms = TREE_CHAIN (parms))
{
if (!first)
pp_separate_with (pp, ',');
first = false;
pp_declaration_specifiers
(pp, want_parm_decl ? parms : TREE_VALUE (parms));
if (want_parm_decl)
pp_declarator (pp, parms);
else
pp_abstract_declarator (pp, TREE_VALUE (parms));
}
{
if (!first)
pp_separate_with (pp, ',');
first = false;
pp_declaration_specifiers
(pp, want_parm_decl ? parms : TREE_VALUE (parms));
if (want_parm_decl)
pp_declarator (pp, parms);
else
pp_abstract_declarator (pp, TREE_VALUE (parms));
}
}
pp_c_right_paren (pp);
}
@ -490,8 +490,8 @@ pp_c_abstract_declarator (c_pretty_printer *pp, tree t)
if (TREE_CODE (t) == POINTER_TYPE)
{
if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE)
pp_c_right_paren (pp);
|| TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE)
pp_c_right_paren (pp);
t = TREE_TYPE (t);
}
@ -579,9 +579,9 @@ pp_c_storage_class_specifier (c_pretty_printer *pp, tree t)
else if (DECL_P (t))
{
if (DECL_REGISTER (t))
pp_c_identifier (pp, "register");
pp_c_identifier (pp, "register");
else if (TREE_STATIC (t) && TREE_CODE (t) == VAR_DECL)
pp_c_identifier (pp, "static");
pp_c_identifier (pp, "static");
}
}
@ -647,12 +647,12 @@ pp_c_direct_declarator (c_pretty_printer *pp, tree t)
pp_c_space_for_pointer_operator (pp, TREE_TYPE (TREE_TYPE (t)));
pp_c_tree_decl_identifier (pp, t);
if (pp_c_base (pp)->flags & pp_c_flag_abstract)
pp_abstract_declarator (pp, TREE_TYPE (t));
pp_abstract_declarator (pp, TREE_TYPE (t));
else
{
pp_parameter_list (pp, t);
pp_abstract_declarator (pp, TREE_TYPE (TREE_TYPE (t)));
}
{
pp_parameter_list (pp, t);
pp_abstract_declarator (pp, TREE_TYPE (TREE_TYPE (t)));
}
break;
case INTEGER_TYPE:
@ -726,7 +726,7 @@ pp_c_attributes (c_pretty_printer *pp, tree attributes)
{
pp_tree_identifier (pp, TREE_PURPOSE (attributes));
if (TREE_VALUE (attributes))
pp_c_call_argument_list (pp, TREE_VALUE (attributes));
pp_c_call_argument_list (pp, TREE_VALUE (attributes));
if (TREE_CHAIN (attributes))
pp_separate_with (pp, ',');
@ -811,16 +811,16 @@ pp_c_integer_constant (c_pretty_printer *pp, tree i)
else
{
if (tree_int_cst_sgn (i) < 0)
{
pp_character (pp, '-');
i = build_int_cst_wide (NULL_TREE,
{
pp_character (pp, '-');
i = build_int_cst_wide (NULL_TREE,
-TREE_INT_CST_LOW (i),
~TREE_INT_CST_HIGH (i)
+ !TREE_INT_CST_LOW (i));
}
}
sprintf (pp_buffer (pp)->digit_buffer,
HOST_WIDE_INT_PRINT_DOUBLE_HEX,
TREE_INT_CST_HIGH (i), TREE_INT_CST_LOW (i));
HOST_WIDE_INT_PRINT_DOUBLE_HEX,
TREE_INT_CST_HIGH (i), TREE_INT_CST_LOW (i));
pp_string (pp, pp_buffer (pp)->digit_buffer);
}
if (TYPE_UNSIGNED (type))
@ -828,7 +828,7 @@ pp_c_integer_constant (c_pretty_printer *pp, tree i)
if (type == long_integer_type_node || type == long_unsigned_type_node)
pp_character (pp, 'l');
else if (type == long_long_integer_type_node
|| type == long_long_unsigned_type_node)
|| type == long_long_unsigned_type_node)
pp_string (pp, "ll");
}
@ -967,16 +967,16 @@ pp_c_constant (c_pretty_printer *pp, tree e)
{
case INTEGER_CST:
{
tree type = TREE_TYPE (e);
if (type == boolean_type_node)
pp_c_bool_constant (pp, e);
else if (type == char_type_node)
pp_c_character_constant (pp, e);
else if (TREE_CODE (type) == ENUMERAL_TYPE
&& pp_c_enumeration_constant (pp, e))
;
else
pp_c_integer_constant (pp, e);
tree type = TREE_TYPE (e);
if (type == boolean_type_node)
pp_c_bool_constant (pp, e);
else if (type == char_type_node)
pp_c_character_constant (pp, e);
else if (TREE_CODE (type) == ENUMERAL_TYPE
&& pp_c_enumeration_constant (pp, e))
;
else
pp_c_integer_constant (pp, e);
}
break;
@ -1097,22 +1097,22 @@ pp_c_init_declarator (c_pretty_printer *pp, tree t)
{
tree init = DECL_INITIAL (t);
/* This C++ bit is handled here because it is easier to do so.
In templates, the C++ parser builds a TREE_LIST for a
direct-initialization; the TREE_PURPOSE is the variable to
initialize and the TREE_VALUE is the initializer. */
In templates, the C++ parser builds a TREE_LIST for a
direct-initialization; the TREE_PURPOSE is the variable to
initialize and the TREE_VALUE is the initializer. */
if (TREE_CODE (init) == TREE_LIST)
{
pp_c_left_paren (pp);
pp_expression (pp, TREE_VALUE (init));
pp_right_paren (pp);
}
{
pp_c_left_paren (pp);
pp_expression (pp, TREE_VALUE (init));
pp_right_paren (pp);
}
else
{
pp_space (pp);
pp_equal (pp);
pp_space (pp);
pp_c_initializer (pp, init);
}
{
pp_space (pp);
pp_equal (pp);
pp_space (pp);
pp_c_initializer (pp, init);
}
}
}
@ -1143,38 +1143,38 @@ pp_c_initializer_list (c_pretty_printer *pp, tree e)
case UNION_TYPE:
case ARRAY_TYPE:
{
tree init = TREE_OPERAND (e, 0);
for (; init != NULL_TREE; init = TREE_CHAIN (init))
{
if (code == RECORD_TYPE || code == UNION_TYPE)
{
pp_c_dot (pp);
pp_c_primary_expression (pp, TREE_PURPOSE (init));
}
else
{
pp_c_left_bracket (pp);
if (TREE_PURPOSE (init))
pp_c_constant (pp, TREE_PURPOSE (init));
pp_c_right_bracket (pp);
}
pp_c_whitespace (pp);
pp_equal (pp);
pp_c_whitespace (pp);
pp_initializer (pp, TREE_VALUE (init));
if (TREE_CHAIN (init))
pp_separate_with (pp, ',');
}
tree init = TREE_OPERAND (e, 0);
for (; init != NULL_TREE; init = TREE_CHAIN (init))
{
if (code == RECORD_TYPE || code == UNION_TYPE)
{
pp_c_dot (pp);
pp_c_primary_expression (pp, TREE_PURPOSE (init));
}
else
{
pp_c_left_bracket (pp);
if (TREE_PURPOSE (init))
pp_c_constant (pp, TREE_PURPOSE (init));
pp_c_right_bracket (pp);
}
pp_c_whitespace (pp);
pp_equal (pp);
pp_c_whitespace (pp);
pp_initializer (pp, TREE_VALUE (init));
if (TREE_CHAIN (init))
pp_separate_with (pp, ',');
}
}
return;
case VECTOR_TYPE:
if (TREE_CODE (e) == VECTOR_CST)
pp_c_expression_list (pp, TREE_VECTOR_CST_ELTS (e));
pp_c_expression_list (pp, TREE_VECTOR_CST_ELTS (e));
else if (TREE_CODE (e) == CONSTRUCTOR)
pp_c_constructor_elts (pp, CONSTRUCTOR_ELTS (e));
pp_c_constructor_elts (pp, CONSTRUCTOR_ELTS (e));
else
break;
break;
return;
case COMPLEX_TYPE:
@ -1379,10 +1379,10 @@ pp_c_postfix_expression (c_pretty_printer *pp, tree e)
case ADDR_EXPR:
if (TREE_CODE (TREE_OPERAND (e, 0)) == FUNCTION_DECL)
{
pp_c_id_expression (pp, TREE_OPERAND (e, 0));
break;
}
{
pp_c_id_expression (pp, TREE_OPERAND (e, 0));
break;
}
/* else fall through. */
default:

View File

@ -30,7 +30,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
typedef enum
{
pp_c_flag_abstract = 1 << 1,
pp_c_flag_last_bit = 2
pp_c_flag_last_bit = 2
} pp_c_pretty_print_flags;
@ -60,7 +60,7 @@ struct c_pretty_print_info
int *offset_list;
pp_flags flags;
/* These must be overridden by each of the C and C++ front-end to
reflect their understanding of syntactic productions when they differ. */
c_pretty_print_fn declaration;

View File

@ -79,7 +79,7 @@ pop_stmt_list (tree t)
cur_stmt_list = chain;
/* If the statement list is completely empty, just return it. This is
just as good small as build_empty_stmt, with the advantage that
just as good small as build_empty_stmt, with the advantage that
statement lists are merged when they appended to one another. So
using the STATEMENT_LIST avoids pathological buildup of EMPTY_STMT_P
statements. */
@ -132,7 +132,7 @@ build_stmt (enum tree_code code, ...)
{
tree t = va_arg (p, tree);
if (t && !TYPE_P (t))
side_effects |= TREE_SIDE_EFFECTS (t);
side_effects |= TREE_SIDE_EFFECTS (t);
TREE_OPERAND (ret, i) = t;
}

View File

@ -1,6 +1,6 @@
/* Build expressions with type checking for C compiler.
Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
This file is part of GCC.
@ -236,8 +236,8 @@ c_type_promotes_to (tree type)
{
/* Preserve unsignedness if not really getting any wider. */
if (TYPE_UNSIGNED (type)
&& (TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)))
return unsigned_type_node;
&& (TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)))
return unsigned_type_node;
return integer_type_node;
}
@ -331,7 +331,7 @@ composite_type (tree t1, tree t2)
/* We should not have any type quals on arrays at all. */
gcc_assert (!TYPE_QUALS (t1) && !TYPE_QUALS (t2));
d1_zero = d1 == 0 || !TYPE_MAX_VALUE (d1);
d2_zero = d2 == 0 || !TYPE_MAX_VALUE (d2);
@ -351,12 +351,12 @@ composite_type (tree t1, tree t2)
if (elt == TREE_TYPE (t2) && TYPE_DOMAIN (t2)
&& (d1_variable || d1_zero || !d2_variable))
return build_type_attribute_variant (t2, attributes);
if (elt == TREE_TYPE (t1) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1))
return build_type_attribute_variant (t1, attributes);
if (elt == TREE_TYPE (t2) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1))
return build_type_attribute_variant (t2, attributes);
/* Merge the element types, and have a size if either arg has
one. We may have qualifiers on the element types. To set
up TYPE_MAIN_VARIANT correctly, we need to form the
@ -530,7 +530,7 @@ common_pointer_type (tree t1, tree t2)
return t1;
gcc_assert (TREE_CODE (t1) == POINTER_TYPE
&& TREE_CODE (t2) == POINTER_TYPE);
&& TREE_CODE (t2) == POINTER_TYPE);
/* Merge the attributes. */
attributes = targetm.merge_type_attributes (t1, t2);
@ -667,7 +667,7 @@ c_common_type (tree t1, tree t2)
if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2))
return long_long_unsigned_type_node;
else
return long_long_integer_type_node;
return long_long_integer_type_node;
}
if (TYPE_MAIN_VARIANT (t1) == long_unsigned_type_node
@ -737,9 +737,10 @@ comptypes (tree type1, tree type2)
val = comptypes_internal (type1, type2);
free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1);
return val;
}
}
/* Return 1 if TYPE1 and TYPE2 are compatible types for assignment
or various other operations. Return 2 if they are compatible
but a warning may be needed if you use them together. This
@ -859,14 +860,14 @@ comptypes_internal (tree type1, tree type2)
|| !tree_int_cst_equal (TYPE_MAX_VALUE (d1), TYPE_MAX_VALUE (d2)))
val = 0;
break;
break;
}
case ENUMERAL_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
if (val != 1 && !same_translation_unit_p (t1, t2))
{
{
if (attrval != 2)
return tagged_types_tu_compatible_p (t1, t2);
val = tagged_types_tu_compatible_p (t1, t2);
@ -953,9 +954,9 @@ alloc_tagged_tu_seen_cache (tree t1, tree t2)
tu->next = tagged_tu_seen_base;
tu->t1 = t1;
tu->t2 = t2;
tagged_tu_seen_base = tu;
/* The C standard says that two structures in different translation
units are compatible with each other only if the types of their
fields are compatible (among other things). We assume that they
@ -1038,31 +1039,31 @@ tagged_types_tu_compatible_p (tree t1, tree t2)
case ENUMERAL_TYPE:
{
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
/* Speed up the case where the type values are in the same order. */
tree tv1 = TYPE_VALUES (t1);
tree tv2 = TYPE_VALUES (t2);
/* Speed up the case where the type values are in the same order. */
tree tv1 = TYPE_VALUES (t1);
tree tv2 = TYPE_VALUES (t2);
if (tv1 == tv2)
if (tv1 == tv2)
{
return 1;
}
for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2))
{
if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2))
break;
if (simple_cst_equal (TREE_VALUE (tv1), TREE_VALUE (tv2)) != 1)
for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2))
{
if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2))
break;
if (simple_cst_equal (TREE_VALUE (tv1), TREE_VALUE (tv2)) != 1)
{
tu->val = 0;
tu->val = 0;
return 0;
}
}
}
if (tv1 == NULL_TREE && tv2 == NULL_TREE)
if (tv1 == NULL_TREE && tv2 == NULL_TREE)
{
return 1;
}
if (tv1 == NULL_TREE || tv2 == NULL_TREE)
if (tv1 == NULL_TREE || tv2 == NULL_TREE)
{
tu->val = 0;
return 0;
@ -1095,16 +1096,16 @@ tagged_types_tu_compatible_p (tree t1, tree t2)
tu->val = 0;
return 0;
}
/* Speed up the common case where the fields are in the same order. */
for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); s1 && s2;
s1 = TREE_CHAIN (s1), s2 = TREE_CHAIN (s2))
{
int result;
if (DECL_NAME (s1) == NULL
|| DECL_NAME (s1) != DECL_NAME (s2))
|| DECL_NAME (s1) != DECL_NAME (s2))
break;
result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2));
if (result == 0)
@ -1167,7 +1168,7 @@ tagged_types_tu_compatible_p (tree t1, tree t2)
case RECORD_TYPE:
{
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2);
for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2);
s1 && s2;
@ -1962,7 +1963,7 @@ build_array_ref (tree array, tree index)
type = TYPE_MAIN_VARIANT (type);
rval = build4 (ARRAY_REF, type, array, index, NULL_TREE, NULL_TREE);
/* Array ref is const/volatile if the array elements are
or if the array is. */
or if the array is. */
TREE_READONLY (rval)
|= (TYPE_READONLY (TREE_TYPE (TREE_TYPE (array)))
| TREE_READONLY (array));
@ -2268,7 +2269,7 @@ build_function_call (tree function, tree params)
if (require_constant_value)
{
result = fold_build3_initializer (CALL_EXPR, TREE_TYPE (fntype),
function, coerced_params, NULL_TREE);
function, coerced_params, NULL_TREE);
if (TREE_CONSTANT (result)
&& (name == NULL_TREE
@ -2277,7 +2278,7 @@ build_function_call (tree function, tree params)
}
else
result = fold_build3 (CALL_EXPR, TREE_TYPE (fntype),
function, coerced_params, NULL_TREE);
function, coerced_params, NULL_TREE);
if (VOID_TYPE_P (TREE_TYPE (result)))
return result;
@ -2419,16 +2420,16 @@ convert_arguments (tree typelist, tree values, tree function, tree fundecl)
else if (type != TREE_TYPE (val)
&& (type == dfloat32_type_node
|| type == dfloat64_type_node
|| type == dfloat128_type_node
|| type == dfloat128_type_node
|| TREE_TYPE (val) == dfloat32_type_node
|| TREE_TYPE (val) == dfloat64_type_node
|| TREE_TYPE (val) == dfloat128_type_node)
&& (formal_prec
&& (formal_prec
<= TYPE_PRECISION (TREE_TYPE (val))
|| (type == dfloat128_type_node
&& (TREE_TYPE (val)
!= dfloat64_type_node
&& (TREE_TYPE (val)
!= dfloat64_type_node
&& (TREE_TYPE (val)
!= dfloat32_type_node)))
|| (type == dfloat64_type_node
&& (TREE_TYPE (val)
@ -2498,16 +2499,16 @@ convert_arguments (tree typelist, tree values, tree function, tree fundecl)
result = tree_cons (NULL_TREE, parmval, result);
}
else if (TREE_CODE (TREE_TYPE (val)) == REAL_TYPE
&& (TYPE_PRECISION (TREE_TYPE (val))
< TYPE_PRECISION (double_type_node))
&& (TYPE_PRECISION (TREE_TYPE (val))
< TYPE_PRECISION (double_type_node))
&& !DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (val))))
/* Convert `float' to `double'. */
result = tree_cons (NULL_TREE, convert (double_type_node, val), result);
else if ((invalid_func_diag =
targetm.calls.invalid_arg_for_unprototyped_fn (typelist, fundecl, val)))
else if ((invalid_func_diag =
targetm.calls.invalid_arg_for_unprototyped_fn (typelist, fundecl, val)))
{
error (invalid_func_diag);
return error_mark_node;
return error_mark_node;
}
else
/* Convert `short' and `char' to full-size `int'. */
@ -2895,9 +2896,9 @@ build_unary_op (enum tree_code code, tree xarg, int flag)
&& typecode != INTEGER_TYPE && typecode != REAL_TYPE)
{
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
error ("wrong type argument to increment");
else
error ("wrong type argument to decrement");
error ("wrong type argument to increment");
else
error ("wrong type argument to decrement");
return error_mark_node;
}
@ -2925,7 +2926,7 @@ build_unary_op (enum tree_code code, tree xarg, int flag)
else if ((pedantic || warn_pointer_arith)
&& (TREE_CODE (TREE_TYPE (result_type)) == FUNCTION_TYPE
|| TREE_CODE (TREE_TYPE (result_type)) == VOID_TYPE))
{
{
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
pedwarn ("wrong type argument to increment");
else
@ -3002,7 +3003,7 @@ build_unary_op (enum tree_code code, tree xarg, int flag)
argtype = TREE_TYPE (arg);
/* If the lvalue is const or volatile, merge that into the type
to which the address will point. Note that you can't get a
to which the address will point. Note that you can't get a
restricted pointer by taking the address of something, so we
only have to deal with `const' and `volatile' here. */
if ((DECL_P (arg) || REFERENCE_CLASS_P (arg))
@ -3042,7 +3043,7 @@ build_unary_op (enum tree_code code, tree xarg, int flag)
if (argtype == 0)
argtype = TREE_TYPE (arg);
return require_constant_value ? fold_build1_initializer (code, argtype, arg)
: fold_build1 (code, argtype, arg);
: fold_build1 (code, argtype, arg);
}
/* Return nonzero if REF is an lvalue valid for this language.
@ -3093,7 +3094,7 @@ readonly_error (tree arg, enum lvalue_use use)
ensures that all the format strings are checked at compile
time. */
#define READONLY_MSG(A, I, D, AS) (use == lv_assign ? (A) \
: (use == lv_increment ? (I) \
: (use == lv_increment ? (I) \
: (use == lv_decrement ? (D) : (AS))))
if (TREE_CODE (arg) == COMPONENT_REF)
{
@ -3249,9 +3250,9 @@ build_conditional_expr (tree ifexp, tree op1, tree op2)
result_type = TYPE_MAIN_VARIANT (type1);
}
else if ((code1 == INTEGER_TYPE || code1 == REAL_TYPE
|| code1 == COMPLEX_TYPE)
&& (code2 == INTEGER_TYPE || code2 == REAL_TYPE
|| code2 == COMPLEX_TYPE))
|| code1 == COMPLEX_TYPE)
&& (code2 == INTEGER_TYPE || code2 == REAL_TYPE
|| code2 == COMPLEX_TYPE))
{
result_type = c_common_type (type1, type2);
@ -3374,7 +3375,7 @@ build_compound_expr (tree expr1, tree expr2)
if (!TREE_SIDE_EFFECTS (expr1))
{
/* The left-hand operand of a comma expression is like an expression
statement: with -Wextra or -Wunused, we should warn if it doesn't have
statement: with -Wextra or -Wunused, we should warn if it doesn't have
any side-effects, unless it was explicitly cast to (void). */
if (warn_unused_value)
{
@ -3535,10 +3536,10 @@ build_c_cast (tree type, tree expr)
if (TREE_CODE (type) == INTEGER_TYPE
&& TREE_CODE (otype) == POINTER_TYPE
&& TYPE_PRECISION (type) != TYPE_PRECISION (otype))
/* Unlike conversion of integers to pointers, where the
warning is disabled for converting constants because
of cases such as SIG_*, warn about converting constant
pointers to integers. In some cases it may cause unwanted
/* Unlike conversion of integers to pointers, where the
warning is disabled for converting constants because
of cases such as SIG_*, warn about converting constant
pointers to integers. In some cases it may cause unwanted
sign extension, and a warning is appropriate. */
warning (OPT_Wpointer_to_int_cast,
"cast from pointer to integer of different size");
@ -3621,7 +3622,6 @@ c_cast_expr (struct c_type_name *type_name, tree expr)
return build_c_cast (type, expr);
}
/* Build an assignment expression of lvalue LHS from value RHS.
MODIFYCODE is the code for a binary operator that we use
@ -3871,7 +3871,7 @@ convert_for_assignment (tree type, tree rhs, enum impl_conv errtype,
}
/* Some types can interconvert without explicit casts. */
else if (codel == VECTOR_TYPE && coder == VECTOR_TYPE
&& vector_types_convertible_p (type, TREE_TYPE (rhs)))
&& vector_types_convertible_p (type, TREE_TYPE (rhs)))
return convert (type, rhs);
/* Arithmetic types all interconvert, and enum is treated like int. */
else if ((codel == INTEGER_TYPE || codel == REAL_TYPE
@ -4005,26 +4005,26 @@ convert_for_assignment (tree type, tree rhs, enum impl_conv errtype,
mvr = TYPE_MAIN_VARIANT (mvr);
/* Opaque pointers are treated like void pointers. */
is_opaque_pointer = (targetm.vector_opaque_p (type)
|| targetm.vector_opaque_p (rhstype))
&& TREE_CODE (ttl) == VECTOR_TYPE
&& TREE_CODE (ttr) == VECTOR_TYPE;
|| targetm.vector_opaque_p (rhstype))
&& TREE_CODE (ttl) == VECTOR_TYPE
&& TREE_CODE (ttr) == VECTOR_TYPE;
/* C++ does not allow the implicit conversion void* -> T*. However,
for the purpose of reducing the number of false positives, we
tolerate the special case of
for the purpose of reducing the number of false positives, we
tolerate the special case of
int *p = NULL;
int *p = NULL;
where NULL is typically defined in C to be '(void *) 0'. */
where NULL is typically defined in C to be '(void *) 0'. */
if (VOID_TYPE_P (ttr) && rhs != null_pointer_node && !VOID_TYPE_P (ttl))
warning (OPT_Wc___compat, "request for implicit conversion from "
"%qT to %qT not permitted in C++", rhstype, type);
warning (OPT_Wc___compat, "request for implicit conversion from "
"%qT to %qT not permitted in C++", rhstype, type);
/* Check if the right-hand side has a format attribute but the
left-hand side doesn't. */
if (warn_missing_format_attribute
&& check_missing_format_attribute (type, rhstype))
{
{
switch (errtype)
{
case ic_argpass:
@ -4053,7 +4053,7 @@ convert_for_assignment (tree type, tree rhs, enum impl_conv errtype,
gcc_unreachable ();
}
}
/* Any non-function converts to a [const][volatile] void *
and vice versa; otherwise, targets must be the same.
Meanwhile, the lhs target must have all the qualifiers of the rhs. */
@ -5359,7 +5359,7 @@ pop_init_level (int implicit)
else
{
gcc_assert (!TYPE_SIZE (constructor_type));
if (constructor_depth > 2)
error_init ("initialization of flexible array member in a nested context");
else if (pedantic)
@ -5969,7 +5969,7 @@ set_nonincremental_init_from_string (tree str)
else if (bitpos == HOST_BITS_PER_WIDE_INT)
{
if (val[1] < 0)
val[0] = -1;
val[0] = -1;
}
else if (val[0] & (((HOST_WIDE_INT) 1)
<< (bitpos - 1 - HOST_BITS_PER_WIDE_INT)))
@ -6375,7 +6375,7 @@ process_init_element (struct c_expr value)
&& integer_zerop (constructor_unfilled_index))
{
if (constructor_stack->replacement_value.value)
error_init ("excess elements in char array initializer");
error_init ("excess elements in char array initializer");
constructor_stack->replacement_value = value;
return;
}
@ -6478,10 +6478,10 @@ process_init_element (struct c_expr value)
{
/* For a record, keep track of end position of last field. */
if (DECL_SIZE (constructor_fields))
constructor_bit_index
constructor_bit_index
= size_binop (PLUS_EXPR,
bit_position (constructor_fields),
DECL_SIZE (constructor_fields));
bit_position (constructor_fields),
DECL_SIZE (constructor_fields));
/* If the current field was the first one not yet written out,
it isn't now, so update. */
@ -6624,8 +6624,8 @@ process_init_element (struct c_expr value)
{
tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type));
/* Do a basic check of initializer size. Note that vectors
always have a fixed size derived from their type. */
/* Do a basic check of initializer size. Note that vectors
always have a fixed size derived from their type. */
if (tree_int_cst_lt (constructor_max_index, constructor_index))
{
pedwarn_init ("excess elements in vector initializer");
@ -6788,7 +6788,7 @@ build_asm_expr (tree string, tree outputs, tree inputs, tree clobbers,
output = error_mark_node;
}
else
output = error_mark_node;
output = error_mark_node;
TREE_VALUE (tail) = output;
}
@ -6966,7 +6966,7 @@ c_finish_return (tree retval)
inner = TREE_OPERAND (inner, 0);
while (REFERENCE_CLASS_P (inner)
&& TREE_CODE (inner) != INDIRECT_REF)
&& TREE_CODE (inner) != INDIRECT_REF)
inner = TREE_OPERAND (inner, 0);
if (DECL_P (inner)
@ -7229,41 +7229,41 @@ c_finish_loop (location_t start_locus, tree cond, tree incr, tree body,
else
{
tree top = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
/* If we have an exit condition, then we build an IF with gotos either
out of the loop, or to the top of it. If there's no exit condition,
then we just build a jump back to the top. */
out of the loop, or to the top of it. If there's no exit condition,
then we just build a jump back to the top. */
exit = build_and_jump (&LABEL_EXPR_LABEL (top));
if (cond && !integer_nonzerop (cond))
{
/* Canonicalize the loop condition to the end. This means
generating a branch to the loop condition. Reuse the
continue label, if possible. */
if (cond_is_first)
{
if (incr || !clab)
{
entry = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
t = build_and_jump (&LABEL_EXPR_LABEL (entry));
}
else
t = build1 (GOTO_EXPR, void_type_node, clab);
SET_EXPR_LOCATION (t, start_locus);
add_stmt (t);
}
t = build_and_jump (&blab);
exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
{
/* Canonicalize the loop condition to the end. This means
generating a branch to the loop condition. Reuse the
continue label, if possible. */
if (cond_is_first)
SET_EXPR_LOCATION (exit, start_locus);
{
if (incr || !clab)
{
entry = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
t = build_and_jump (&LABEL_EXPR_LABEL (entry));
}
else
t = build1 (GOTO_EXPR, void_type_node, clab);
SET_EXPR_LOCATION (t, start_locus);
add_stmt (t);
}
t = build_and_jump (&blab);
exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
if (cond_is_first)
SET_EXPR_LOCATION (exit, start_locus);
else
SET_EXPR_LOCATION (exit, input_location);
}
SET_EXPR_LOCATION (exit, input_location);
}
add_stmt (top);
}
if (body)
add_stmt (body);
if (clab)
@ -7306,7 +7306,7 @@ c_finish_bc_stmt (tree *label_p, bool is_break)
if (is_break)
error ("break statement not within loop or switch");
else
error ("continue statement not within a loop");
error ("continue statement not within a loop");
return NULL_TREE;
case 1:
@ -7994,7 +7994,7 @@ build_binary_op (enum tree_code code, tree orig_op0, tree orig_op1,
else if (code0 == POINTER_TYPE && null_pointer_constant_p (orig_op1))
{
if (TREE_CODE (op0) == ADDR_EXPR
&& DECL_P (TREE_OPERAND (op0, 0))
&& DECL_P (TREE_OPERAND (op0, 0))
&& !DECL_WEAK (TREE_OPERAND (op0, 0)))
warning (OPT_Walways_true, "the address of %qD will never be NULL",
TREE_OPERAND (op0, 0));
@ -8002,7 +8002,7 @@ build_binary_op (enum tree_code code, tree orig_op0, tree orig_op1,
}
else if (code1 == POINTER_TYPE && null_pointer_constant_p (orig_op0))
{
if (TREE_CODE (op1) == ADDR_EXPR
if (TREE_CODE (op1) == ADDR_EXPR
&& DECL_P (TREE_OPERAND (op1, 0))
&& !DECL_WEAK (TREE_OPERAND (op1, 0)))
warning (OPT_Walways_true, "the address of %qD will never be NULL",
@ -8273,9 +8273,9 @@ build_binary_op (enum tree_code code, tree orig_op0, tree orig_op1,
all the values of the unsigned type. */
if (!TYPE_UNSIGNED (result_type))
/* OK */;
/* Do not warn if both operands are the same signedness. */
else if (op0_signed == op1_signed)
/* OK */;
/* Do not warn if both operands are the same signedness. */
else if (op0_signed == op1_signed)
/* OK */;
else
{
tree sop, uop;
@ -8465,7 +8465,6 @@ c_expr_to_decl (tree expr, bool *tc ATTRIBUTE_UNUSED,
else
return expr;
}
/* Like c_begin_compound_stmt, except force the retention of the BLOCK. */

View File

@ -7,12 +7,12 @@
; the terms of the GNU General Public License as published by the Free
; Software Foundation; either version 2, or (at your option) any later
; version.
;
;
; GCC is distributed in the hope that it will be useful, but WITHOUT ANY
; WARRANTY; without even the implied warranty of MERCHANTABILITY or
; FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
; for more details.
;
;
; You should have received a copy of the GNU General Public License
; along with GCC; see the file COPYING. If not, write to the Free
; Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
@ -691,7 +691,7 @@ C ObjC C++ ObjC++ Joined RejectNegative UInteger
-ftabstop=<number> Distance between tab stops for column reporting
ftemplate-depth-
C++ ObjC++ Joined RejectNegative UInteger
C++ ObjC++ Joined RejectNegative UInteger
-ftemplate-depth-<number> Specify maximum template instantiation depth
fthis-is-variable

View File

@ -195,7 +195,7 @@ init_caller_save (void)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
for (mode = 0 ; mode < MAX_MACHINE_MODE; mode++)
if (HARD_REGNO_MODE_OK (i, mode))
{
{
int ok;
/* Update the register number and modes of the register
@ -212,7 +212,7 @@ init_caller_save (void)
reg_restore_code[i][mode] = recog_memoized (restinsn);
/* Now extract both insns and see if we can meet their
constraints. */
constraints. */
ok = (reg_save_code[i][mode] != -1
&& reg_restore_code[i][mode] != -1);
if (ok)
@ -228,7 +228,7 @@ init_caller_save (void)
reg_save_code[i][mode] = -1;
reg_restore_code[i][mode] = -1;
}
}
}
else
{
reg_save_code[i][mode] = -1;
@ -847,7 +847,7 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
registers from the live sets, and observe REG_UNUSED notes. */
COPY_REG_SET (&new->live_throughout, &chain->live_throughout);
/* Registers that are set in CHAIN->INSN live in the new insn.
(Unless there is a REG_UNUSED note for them, but we don't
(Unless there is a REG_UNUSED note for them, but we don't
look for them here.) */
note_stores (PATTERN (chain->insn), add_stored_regs,
&new->live_throughout);

View File

@ -399,7 +399,7 @@ emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
if (ecf_flags & ECF_RETURNS_TWICE)
{
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
REG_NOTES (call_insn));
REG_NOTES (call_insn));
current_function_calls_setjmp = 1;
}
@ -476,10 +476,10 @@ special_function_p (tree fndecl, int flags)
/* Exclude functions not at the file scope, or not `extern',
since they are not the magic functions we would otherwise
think they are.
FIXME: this should be handled with attributes, not with this
hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
because you can declare fork() inside a function if you
wish. */
FIXME: this should be handled with attributes, not with this
hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
because you can declare fork() inside a function if you
wish. */
&& (DECL_CONTEXT (fndecl) == NULL_TREE
|| TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
&& TREE_PUBLIC (fndecl))
@ -2169,12 +2169,12 @@ expand_call (tree exp, rtx target, int ignore)
into a sibcall. */
|| !targetm.function_ok_for_sibcall (fndecl, exp)
/* Functions that do not return exactly once may not be sibcall
optimized. */
optimized. */
|| (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
|| TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
/* If the called function is nested in the current one, it might access
some of the caller's arguments, but could clobber them beforehand if
the argument areas are shared. */
some of the caller's arguments, but could clobber them beforehand if
the argument areas are shared. */
|| (fndecl && decl_function_context (fndecl) == current_function_decl)
/* If this function requires more stack slots than the current
function, we cannot change it into a sibling call.
@ -2287,7 +2287,7 @@ expand_call (tree exp, rtx target, int ignore)
old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
/* The argument block when performing a sibling call is the
incoming argument block. */
incoming argument block. */
if (pass == 0)
{
argblock = virtual_incoming_args_rtx;
@ -2696,7 +2696,7 @@ expand_call (tree exp, rtx target, int ignore)
rtx insn;
bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
insns = get_insns ();
insns = get_insns ();
/* Expansion of block moves possibly introduced a loop that may
not appear inside libcall block. */
@ -2922,11 +2922,11 @@ expand_call (tree exp, rtx target, int ignore)
int unsignedp = TYPE_UNSIGNED (type);
int offset = 0;
enum machine_mode pmode;
pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
/* If we don't promote as expected, something is wrong. */
gcc_assert (GET_MODE (target) == pmode);
if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
&& (GET_MODE_SIZE (GET_MODE (target))
> GET_MODE_SIZE (TYPE_MODE (type))))
@ -3122,7 +3122,7 @@ split_complex_values (tree values)
tree type = TREE_TYPE (TREE_VALUE (p));
if (type && TREE_CODE (type) == COMPLEX_TYPE
&& targetm.calls.split_complex_arg (type))
goto found;
goto found;
}
return values;
@ -3176,7 +3176,7 @@ split_complex_types (tree types)
tree type = TREE_VALUE (p);
if (TREE_CODE (type) == COMPLEX_TYPE
&& targetm.calls.split_complex_arg (type))
goto found;
goto found;
}
return types;
@ -3367,7 +3367,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
if (mem_value && struct_value == 0 && ! pcc_struct_value)
{
rtx addr = XEXP (mem_value, 0);
nargs++;
/* Make sure it is a reasonable operand for a move or push insn. */
@ -3385,7 +3385,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
1,
#else
argvec[count].reg != 0,
#endif
@ -3552,7 +3552,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
needed = 0;
/* We must be careful to use virtual regs before they're instantiated,
and real regs afterwards. Loop optimization, for example, can create
and real regs afterwards. Loop optimization, for example, can create
new libcalls after we've instantiated the virtual regs, and if we
use virtuals anyway, they won't match the rtl patterns. */
@ -3649,11 +3649,11 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
{
argvec[argnum].save_area
= assign_stack_temp (BLKmode,
argvec[argnum].locate.size.constant,
argvec[argnum].locate.size.constant,
0);
emit_block_move (validize_mem (argvec[argnum].save_area),
stack_area,
stack_area,
GEN_INT (argvec[argnum].locate.size.constant),
BLOCK_OP_CALL_PARM);
}
@ -3694,7 +3694,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
auto-increment causes confusion. So we merely indicate
that we access something with a known mode somewhere on
the stack. */
use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
gen_rtx_SCRATCH (Pmode));
use = gen_rtx_MEM (argvec[argnum].mode, use);
use = gen_rtx_USE (VOIDmode, use);
@ -3905,7 +3905,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
if (save_mode == BLKmode)
emit_block_move (stack_area,
validize_mem (argvec[count].save_area),
validize_mem (argvec[count].save_area),
GEN_INT (argvec[count].locate.size.constant),
BLOCK_OP_CALL_PARM);
else
@ -4083,7 +4083,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags,
/* Being passed entirely in a register. We shouldn't be called in
this case. */
gcc_assert (reg == 0 || partial != 0);
/* If this arg needs special alignment, don't load the registers
here. */
if (arg->n_aligned_regs != 0)

View File

@ -683,7 +683,7 @@ alloc_aux_for_blocks (int size)
else
/* Check whether AUX data are still allocated. */
gcc_assert (!first_block_aux_obj);
first_block_aux_obj = obstack_alloc (&block_aux_obstack, 0);
if (size)
{
@ -861,7 +861,7 @@ brief_dump_cfg (FILE *file)
/* An edge originally destinating BB of FREQUENCY and COUNT has been proved to
leave the block by TAKEN_EDGE. Update profile of BB such that edge E can be
redirected to destination of TAKEN_EDGE.
redirected to destination of TAKEN_EDGE.
This function may leave the profile inconsistent in the case TAKEN_EDGE
frequency or count is believed to be lower than FREQUENCY or COUNT
@ -972,8 +972,8 @@ scale_bbs_frequencies_int (basic_block *bbs, int nbbs, int num, int den)
by NUM/DEN, in gcov_type arithmetic. More accurate than previous
function but considerably slower. */
void
scale_bbs_frequencies_gcov_type (basic_block *bbs, int nbbs, gcov_type num,
gcov_type den)
scale_bbs_frequencies_gcov_type (basic_block *bbs, int nbbs, gcov_type num,
gcov_type den)
{
int i;
edge e;

View File

@ -981,23 +981,23 @@ dfs_enumerate_from (basic_block bb, int reverse,
edge_iterator ei;
lbb = st[--sp];
if (reverse)
{
{
FOR_EACH_EDGE (e, ei, lbb->preds)
if (!VISITED_P (e->src) && predicate (e->src, data))
{
gcc_assert (tv != rslt_max);
rslt[tv++] = st[sp++] = e->src;
MARK_VISITED (e->src);
gcc_assert (tv != rslt_max);
rslt[tv++] = st[sp++] = e->src;
MARK_VISITED (e->src);
}
}
}
else
{
{
FOR_EACH_EDGE (e, ei, lbb->succs)
if (!VISITED_P (e->dest) && predicate (e->dest, data))
{
gcc_assert (tv != rslt_max);
rslt[tv++] = st[sp++] = e->dest;
MARK_VISITED (e->dest);
gcc_assert (tv != rslt_max);
rslt[tv++] = st[sp++] = e->dest;
MARK_VISITED (e->dest);
}
}
}
@ -1012,24 +1012,24 @@ dfs_enumerate_from (basic_block bb, int reverse,
/* Compute dominance frontiers, ala Harvey, Ferrante, et al.
This algorithm can be found in Timothy Harvey's PhD thesis, at
http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
dominance algorithms.
First, we identify each join point, j (any node with more than one
incoming edge is a join point).
incoming edge is a join point).
We then examine each predecessor, p, of j and walk up the dominator tree
starting at p.
starting at p.
We stop the walk when we reach j's immediate dominator - j is in the
dominance frontier of each of the nodes in the walk, except for j's
immediate dominator. Intuitively, all of the rest of j's dominators are
shared by j's predecessors as well.
Since they dominate j, they will not have j in their dominance frontiers.
The number of nodes touched by this algorithm is equal to the size
The number of nodes touched by this algorithm is equal to the size
of the dominance frontiers, no more, no less.
*/
@ -1050,11 +1050,11 @@ compute_dominance_frontiers_1 (bitmap *frontiers)
basic_block domsb;
if (runner == ENTRY_BLOCK_PTR)
continue;
domsb = get_immediate_dominator (CDI_DOMINATORS, b);
while (runner != domsb)
{
bitmap_set_bit (frontiers[runner->index],
bitmap_set_bit (frontiers[runner->index],
b->index);
runner = get_immediate_dominator (CDI_DOMINATORS,
runner);
@ -1062,8 +1062,8 @@ compute_dominance_frontiers_1 (bitmap *frontiers)
}
}
}
}
}
void
compute_dominance_frontiers (bitmap *frontiers)

View File

@ -28,7 +28,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
Available functionality:
- CFG construction
find_basic_blocks */
find_basic_blocks */
#include "config.h"
#include "system.h"
@ -124,8 +124,8 @@ control_flow_insn_p (rtx insn)
case BARRIER:
/* It is nonsense to reach barrier when looking for the
end of basic block, but before dead code is eliminated
this may happen. */
end of basic block, but before dead code is eliminated
this may happen. */
return false;
default:
@ -145,7 +145,7 @@ count_basic_blocks (rtx f)
for (insn = f; insn; insn = NEXT_INSN (insn))
{
/* Code labels and barriers causes current basic block to be
terminated at previous real insn. */
terminated at previous real insn. */
if ((LABEL_P (insn) || BARRIER_P (insn))
&& saw_insn)
count++, saw_insn = false;
@ -612,13 +612,13 @@ purge_dead_tablejump_edges (basic_block bb, rtx table)
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
SET_STATE (e->dest, FULL_STATE (e->dest)
& ~(size_t) BLOCK_USED_BY_TABLEJUMP);
SET_STATE (e->dest, FULL_STATE (e->dest)
& ~(size_t) BLOCK_USED_BY_TABLEJUMP);
else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
{
remove_edge (e);
continue;
}
{
remove_edge (e);
continue;
}
ei_next (&ei);
}
}

View File

@ -55,7 +55,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "expr.h"
#define FORWARDER_BLOCK_P(BB) ((BB)->flags & BB_FORWARDER_BLOCK)
/* Set to true when we are running first pass of try_optimize_cfg loop. */
static bool first_pass;
static bool try_crossjump_to_edge (int, edge, edge);
@ -134,12 +134,12 @@ try_simplify_condjump (basic_block cbranch_block)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
@ -192,7 +192,7 @@ mark_effect (rtx exp, regset nonequal)
switch (GET_CODE (exp))
{
/* In case we do clobber the register, mark it as equal, as we know the
value is dead so it don't have to match. */
value is dead so it don't have to match. */
case CLOBBER:
if (REG_P (XEXP (exp, 0)))
{
@ -413,12 +413,12 @@ try_forward_edges (int mode, basic_block b)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really m
ust be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
ust be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX))
@ -434,9 +434,9 @@ try_forward_edges (int mode, basic_block b)
/* Skip complex edges because we don't know how to update them.
Still handle fallthru edges, as we can succeed to forward fallthru
edge to the same place as the branch edge of conditional branch
and turn conditional branch to an unconditional branch. */
Still handle fallthru edges, as we can succeed to forward fallthru
edge to the same place as the branch edge of conditional branch
and turn conditional branch to an unconditional branch. */
if (e->flags & EDGE_COMPLEX)
{
ei_next (&ei);
@ -450,8 +450,8 @@ try_forward_edges (int mode, basic_block b)
up jumps that cross between hot/cold sections.
Basic block partitioning may result in some jumps that appear
to be optimizable (or blocks that appear to be mergeable), but which
really must be left untouched (they are required to make it safely
to be optimizable (or blocks that appear to be mergeable), but which
really must be left untouched (they are required to make it safely
across partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete
details. */
@ -467,7 +467,7 @@ try_forward_edges (int mode, basic_block b)
may_thread |= target->flags & BB_DIRTY;
if (FORWARDER_BLOCK_P (target)
&& !(single_succ_edge (target)->flags & EDGE_CROSSING)
&& !(single_succ_edge (target)->flags & EDGE_CROSSING)
&& single_succ (target) != EXIT_BLOCK_PTR)
{
/* Bypass trivial infinite loops. */
@ -627,11 +627,11 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (a) != BB_PARTITION (b))
@ -682,12 +682,12 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (a) != BB_PARTITION (b))
@ -717,7 +717,7 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
necessary. */
only_notes = squeeze_notes (&BB_HEAD (b), &BB_END (b));
gcc_assert (!only_notes);
/* Scramble the insn chain. */
reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
@ -752,18 +752,18 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (b) != BB_PARTITION (c))
return NULL;
/* If B has a fallthru edge to C, no need to move anything. */
if (e->flags & EDGE_FALLTHRU)
@ -789,7 +789,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
edge_iterator ei;
/* Avoid overactive code motion, as the forwarder blocks should be
eliminated by edge redirection instead. One exception might have
eliminated by edge redirection instead. One exception might have
been if B is a forwarder block and C has no fallthru edge, but
that should be cleaned up by bb-reorder instead. */
if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
@ -821,7 +821,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
if (! c_has_outgoing_fallthru)
{
merge_blocks_move_successor_nojumps (b, c);
return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
}
/* If B does not have an incoming fallthru, then it can be moved
@ -878,7 +878,7 @@ merge_memattrs (rtx x, rtx y)
MEM_ATTRS (y) = 0;
else if (! MEM_ATTRS (y))
MEM_ATTRS (x) = 0;
else
else
{
rtx mem_size;
@ -887,7 +887,7 @@ merge_memattrs (rtx x, rtx y)
set_mem_alias_set (x, 0);
set_mem_alias_set (y, 0);
}
if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
{
set_mem_expr (x, 0);
@ -900,7 +900,7 @@ merge_memattrs (rtx x, rtx y)
set_mem_offset (x, 0);
set_mem_offset (y, 0);
}
if (!MEM_SIZE (x))
mem_size = NULL_RTX;
else if (!MEM_SIZE (y))
@ -915,7 +915,7 @@ merge_memattrs (rtx x, rtx y)
set_mem_align (y, MEM_ALIGN (x));
}
}
fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
@ -968,7 +968,7 @@ old_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
if (CALL_P (i1)
&& (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
CALL_INSN_FUNCTION_USAGE (i2))
CALL_INSN_FUNCTION_USAGE (i2))
|| SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)))
return false;
@ -980,8 +980,8 @@ old_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
{
/* If register stack conversion has already been done, then
death notes must also be compared before it is certain that
the two instruction streams match. */
death notes must also be compared before it is certain that
the two instruction streams match. */
rtx note;
HARD_REG_SET i1_regset, i2_regset;
@ -1334,7 +1334,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
f2 = FALLTHRU_EDGE (bb2);
/* Get around possible forwarders on fallthru edges. Other cases
should be optimized out already. */
should be optimized out already. */
if (FORWARDER_BLOCK_P (f1->dest))
f1 = single_succ_edge (f1->dest);
@ -1513,7 +1513,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
FOR_EACH_EDGE (e1, ei, bb1->succs)
{
e2 = EDGE_SUCC (bb2, ei.index);
if (e1->flags & EDGE_EH)
nehedges1++;
@ -1609,12 +1609,12 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
newpos1 = newpos2 = NULL_RTX;
/* If we have partitioned hot/cold basic blocks, it is a bad idea
to try this optimization.
to try this optimization.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (flag_reorder_blocks_and_partition && no_new_pseudos)
@ -1758,8 +1758,8 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
s->count += s2->count;
/* Take care to update possible forwarder blocks. We verified
that there is no more than one in the chain, so we can't run
into infinite loop. */
that there is no more than one in the chain, so we can't run
into infinite loop. */
if (FORWARDER_BLOCK_P (s->dest))
{
single_succ_edge (s->dest)->count += s2->count;
@ -1839,16 +1839,16 @@ try_crossjump_bb (int mode, basic_block bb)
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections.
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (EDGE_PRED (bb, 0)->src) !=
BB_PARTITION (EDGE_PRED (bb, 1)->src)
if (BB_PARTITION (EDGE_PRED (bb, 0)->src) !=
BB_PARTITION (EDGE_PRED (bb, 1)->src)
|| (EDGE_PRED (bb, 0)->flags & EDGE_CROSSING))
return false;
@ -1864,7 +1864,7 @@ try_crossjump_bb (int mode, basic_block bb)
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (e->flags & EDGE_FALLTHRU)
fallthru = e;
fallthru = e;
}
changed = false;
@ -2075,7 +2075,7 @@ try_optimize_cfg (int mode)
does not fit merge_blocks interface and is kept here in
hope that it will become useless once more of compiler
is transformed to use cfg_layout mode. */
if ((mode & CLEANUP_CFGLAYOUT)
&& can_merge_blocks_p (b, c))
{
@ -2247,7 +2247,7 @@ cleanup_cfg (int mode)
PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
| PROP_KILL_DEAD_CODE
| ((mode & CLEANUP_LOG_LINKS)
| ((mode & CLEANUP_LOG_LINKS)
? PROP_LOG_LINKS : 0)))
break;
}
@ -2281,8 +2281,8 @@ rest_of_handle_jump (void)
struct tree_opt_pass pass_jump =
{
"sibling", /* name */
NULL, /* gate */
rest_of_handle_jump, /* execute */
NULL, /* gate */
rest_of_handle_jump, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
@ -2310,7 +2310,7 @@ rest_of_handle_jump2 (void)
if (dump_file)
dump_flow_info (dump_file, dump_flags);
cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0)
| (flag_thread_jumps ? CLEANUP_THREADING : 0));
| (flag_thread_jumps ? CLEANUP_THREADING : 0));
purge_line_number_notes ();
@ -2330,8 +2330,8 @@ rest_of_handle_jump2 (void)
struct tree_opt_pass pass_jump2 =
{
"jump", /* name */
NULL, /* gate */
rest_of_handle_jump2, /* execute */
NULL, /* gate */
rest_of_handle_jump2, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */

View File

@ -513,7 +513,7 @@ expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
{
HOST_WIDE_INT align;
rtx x;
/* If this fails, we've overflowed the stack frame. Error nicely? */
gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
@ -557,7 +557,7 @@ expand_stack_vars (bool (*pred) (tree))
if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
continue;
/* Check the predicate to see whether this variable should be
/* Check the predicate to see whether this variable should be
allocated in this pass. */
if (pred && !pred (stack_vars[i].decl))
continue;
@ -674,7 +674,7 @@ expand_one_error_var (tree var)
SET_DECL_RTL (var, x);
}
/* A subroutine of expand_one_var. VAR is a variable that will be
/* A subroutine of expand_one_var. VAR is a variable that will be
allocated to the local stack frame. Return true if we wish to
add VAR to STACK_VARS so that it will be coalesced with other
variables. Return false to allocate VAR immediately.
@ -699,7 +699,7 @@ defer_stack_allocation (tree var, bool toplevel)
/* Without optimization, *most* variables are allocated from the
stack, which makes the quadratic problem large exactly when we
want compilation to proceed as quickly as possible. On the
want compilation to proceed as quickly as possible. On the
other hand, we don't want the function's stack frame size to
get completely out of hand. So we avoid adding scalars and
"small" aggregates to the list at all. */
@ -1001,16 +1001,16 @@ expand_used_vars (void)
if (stack_vars_num > 0)
{
/* Due to the way alias sets work, no variables with non-conflicting
alias sets may be assigned the same address. Add conflicts to
alias sets may be assigned the same address. Add conflicts to
reflect this. */
add_alias_set_conflicts ();
/* If stack protection is enabled, we don't share space between
/* If stack protection is enabled, we don't share space between
vulnerable data and non-vulnerable data. */
if (flag_stack_protect)
add_stack_protection_conflicts ();
/* Now that we have collected all stack variables, and have computed a
/* Now that we have collected all stack variables, and have computed a
minimal interference graph, attempt to save some stack space. */
partition_stack_vars ();
if (dump_file)
@ -1029,7 +1029,7 @@ expand_used_vars (void)
{
/* Reorder decls to be protected by iterating over the variables
array multiple times, and allocating out of each phase in turn. */
/* ??? We could probably integrate this into the qsort we did
/* ??? We could probably integrate this into the qsort we did
earlier, such that we naturally see these variables first,
and thus naturally allocate things in the right order. */
if (has_protected_decls)
@ -1158,7 +1158,7 @@ expand_gimple_cond_expr (basic_block bb, tree stmt)
update_bb_for_insn (new_bb);
maybe_dump_rtl_for_tree_stmt (stmt, last2);
if (EXPR_LOCUS (else_exp))
emit_line_note (*(EXPR_LOCUS (else_exp)));
@ -1221,9 +1221,9 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
e->dest->count -= e->count;
e->dest->frequency -= EDGE_FREQUENCY (e);
if (e->dest->count < 0)
e->dest->count = 0;
e->dest->count = 0;
if (e->dest->frequency < 0)
e->dest->frequency = 0;
e->dest->frequency = 0;
}
count += e->count;
probability += e->probability;
@ -1303,7 +1303,7 @@ expand_gimple_basic_block (basic_block bb)
expand_expr_stmt (stmt);
/* Java emits line number notes in the top of labels.
??? Make this go away once line number notes are obsoleted. */
??? Make this go away once line number notes are obsoleted. */
BB_HEAD (bb) = NEXT_INSN (last);
if (NOTE_P (BB_HEAD (bb)))
BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
@ -1323,8 +1323,8 @@ expand_gimple_basic_block (basic_block bb)
e->flags &= ~EDGE_EXECUTABLE;
/* At the moment not all abnormal edges match the RTL representation.
It is safe to remove them here as find_many_sub_basic_blocks will
rediscover them. In the future we should get this fixed properly. */
It is safe to remove them here as find_many_sub_basic_blocks will
rediscover them. In the future we should get this fixed properly. */
if (e->flags & EDGE_ABNORMAL)
remove_edge (e);
else
@ -1492,7 +1492,7 @@ construct_exit_block (void)
FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
if (e2 != e)
{
e->count -= e2->count;
e->count -= e2->count;
exit_block->count -= e2->count;
exit_block->frequency -= EDGE_FREQUENCY (e2);
}
@ -1505,7 +1505,7 @@ construct_exit_block (void)
update_bb_for_insn (exit_block);
}
/* Helper function for discover_nonconstant_array_refs.
/* Helper function for discover_nonconstant_array_refs.
Look for ARRAY_REF nodes with non-constant indexes and mark them
addressable. */
@ -1668,15 +1668,15 @@ tree_expand_cfg (void)
/* If we're emitting a nested function, make sure its parent gets
emitted as well. Doing otherwise confuses debug info. */
{
{
tree parent;
for (parent = DECL_CONTEXT (current_function_decl);
parent != NULL_TREE;
parent = get_containing_scope (parent))
parent != NULL_TREE;
parent = get_containing_scope (parent))
if (TREE_CODE (parent) == FUNCTION_DECL)
TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
}
/* We are now committed to emitting code for this function. Do any
preparation, such as emitting abstract debug info for the inline
before it gets mangled by optimization. */
@ -1693,13 +1693,13 @@ tree_expand_cfg (void)
struct tree_opt_pass pass_expand =
{
"expand", /* name */
"expand", /* name */
NULL, /* gate */
tree_expand_cfg, /* execute */
tree_expand_cfg, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_EXPAND, /* tv_id */
TV_EXPAND, /* tv_id */
/* ??? If TER is enabled, we actually receive GENERIC. */
PROP_gimple_leh | PROP_cfg, /* properties_required */
PROP_rtl, /* properties_provided */

View File

@ -111,13 +111,13 @@ verify_flow_info (void)
if (bb->count < 0)
{
error ("verify_flow_info: Wrong count of block %i %i",
bb->index, (int)bb->count);
bb->index, (int)bb->count);
err = 1;
}
if (bb->frequency < 0)
{
error ("verify_flow_info: Wrong frequency of block %i %i",
bb->index, bb->frequency);
bb->index, bb->frequency);
err = 1;
}
FOR_EACH_EDGE (e, ei, bb->succs)
@ -238,7 +238,7 @@ dump_bb (basic_block bb, FILE *outf, int indent)
edge e;
edge_iterator ei;
char *s_indent;
s_indent = alloca ((size_t) indent + 1);
memset (s_indent, ' ', (size_t) indent);
s_indent[indent] = '\0';
@ -767,7 +767,7 @@ duplicate_block (basic_block bb, edge e, basic_block after)
/* Return 1 if BB ends with a call, possibly followed by some
instructions that must stay with the call, 0 otherwise. */
bool
bool
block_ends_with_call_p (basic_block bb)
{
if (!cfg_hooks->block_ends_with_call_p)
@ -778,7 +778,7 @@ block_ends_with_call_p (basic_block bb)
/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
bool
bool
block_ends_with_condjump_p (basic_block bb)
{
if (!cfg_hooks->block_ends_with_condjump_p)
@ -800,7 +800,7 @@ int
flow_call_edges_add (sbitmap blocks)
{
if (!cfg_hooks->flow_call_edges_add)
internal_error ("%s does not support flow_call_edges_add",
internal_error ("%s does not support flow_call_edges_add",
cfg_hooks->name);
return (cfg_hooks->flow_call_edges_add) (blocks);
@ -826,8 +826,8 @@ execute_on_shrinking_pred (edge e)
cfg_hooks->execute_on_shrinking_pred (e);
}
/* This is used inside loop versioning when we want to insert
stmts/insns on the edges, which have a different behavior
/* This is used inside loop versioning when we want to insert
stmts/insns on the edges, which have a different behavior
in tree's and in RTL, so we made a CFG hook. */
void
lv_flush_pending_stmts (edge e)
@ -851,7 +851,7 @@ cfg_hook_duplicate_loop_to_header_edge (struct loop *loop, edge e,
unsigned int *n_to_remove, int flags)
{
gcc_assert (cfg_hooks->cfg_hook_duplicate_loop_to_header_edge);
return cfg_hooks->cfg_hook_duplicate_loop_to_header_edge (loop, e, loops,
return cfg_hooks->cfg_hook_duplicate_loop_to_header_edge (loop, e, loops,
ndupl, wont_exit,
orig, to_remove,
n_to_remove, flags);
@ -887,4 +887,4 @@ lv_add_condition_to_bb (basic_block first, basic_block second,
{
gcc_assert (cfg_hooks->lv_add_condition_to_bb);
cfg_hooks->lv_add_condition_to_bb (first, second, new, cond);
}
}

View File

@ -122,16 +122,16 @@ struct cfg_hooks
/* Add condition to new basic block and update CFG used in loop
versioning. */
void (*lv_add_condition_to_bb) (basic_block, basic_block, basic_block,
void *);
void *);
/* Update the PHI nodes in case of loop versioning. */
void (*lv_adjust_loop_header_phi) (basic_block, basic_block,
basic_block, edge);
/* Given a condition BB extract the true/false taken/not taken edges
(depending if we are on tree's or RTL). */
void (*extract_cond_bb_edges) (basic_block, edge *, edge *);
/* Add PHI arguments queued in PENDINT_STMT list on edge E to edge
E->dest (only in tree-ssa loop versioning. */
void (*flush_pending_stmts) (edge);

View File

@ -117,7 +117,7 @@ skip_insns_after_block (basic_block bb)
if (NEXT_INSN (insn)
&& JUMP_P (NEXT_INSN (insn))
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
{
insn = NEXT_INSN (insn);
last_insn = insn;
@ -196,7 +196,7 @@ record_effective_endpoints (void)
continue;
/* No basic blocks at all? */
gcc_assert (insn);
if (PREV_INSN (insn))
cfg_layout_function_header =
unlink_insn_chain (get_insns (), PREV_INSN (insn));
@ -264,7 +264,7 @@ insn_locators_initialize (void)
for (insn = get_insns (); insn; insn = next)
{
int active = 0;
next = NEXT_INSN (insn);
if (NOTE_P (insn))
@ -283,7 +283,7 @@ insn_locators_initialize (void)
active = (active_insn_p (insn)
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
check_block_change (insn, &block);
if (active
@ -331,8 +331,8 @@ insn_locators_initialize (void)
struct tree_opt_pass pass_insn_locators_initialize =
{
"locators", /* name */
NULL, /* gate */
insn_locators_initialize, /* execute */
NULL, /* gate */
insn_locators_initialize, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
@ -549,7 +549,7 @@ reemit_insn_block_notes (void)
this_block = insn_scope (insn);
/* For sequences compute scope resulting from merging all scopes
of instructions nested inside. */
of instructions nested inside. */
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
@ -673,7 +673,7 @@ fixup_reorder_chain (void)
{
/* If the old fallthru is still next, nothing to do. */
if (bb->aux == e_fall->dest
|| e_fall->dest == EXIT_BLOCK_PTR)
|| e_fall->dest == EXIT_BLOCK_PTR)
continue;
/* The degenerated case of conditional jump jumping to the next
@ -767,7 +767,7 @@ fixup_reorder_chain (void)
bb->aux = nb;
/* Don't process this new block. */
bb = nb;
/* Make sure new bb is tagged for correct section (same as
fall-thru source, since you cannot fall-throu across
section boundaries). */
@ -979,7 +979,7 @@ duplicate_insn_chain (rtx from, rtx to)
switch (NOTE_LINE_NUMBER (insn))
{
/* In case prologue is empty and function contain label
in first BB, we may want to copy the block. */
in first BB, we may want to copy the block. */
case NOTE_INSN_PROLOGUE_END:
case NOTE_INSN_DELETED:
@ -988,9 +988,9 @@ duplicate_insn_chain (rtx from, rtx to)
case NOTE_INSN_EPILOGUE_BEG:
case NOTE_INSN_FUNCTION_END:
/* Debug code expect these notes to exist just once.
Keep them in the master copy.
??? It probably makes more sense to duplicate them for each
epilogue copy. */
Keep them in the master copy.
??? It probably makes more sense to duplicate them for each
epilogue copy. */
case NOTE_INSN_FUNCTION_BEG:
/* There is always just single entry to function. */
case NOTE_INSN_BASIC_BLOCK:
@ -1005,9 +1005,9 @@ duplicate_insn_chain (rtx from, rtx to)
/* All other notes should have already been eliminated.
*/
gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
/* It is possible that no_line_number is set and the note
won't be emitted. */
won't be emitted. */
emit_note_copy (insn);
}
break;
@ -1209,7 +1209,7 @@ end:
Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
also in the same order.
Newly created basic blocks are put after the basic block AFTER in the
instruction stream, and the order of the blocks in BBS array is preserved. */

View File

@ -885,31 +885,31 @@ get_loop_body_in_bfs_order (const struct loop *loop)
{
edge e;
edge_iterator ei;
if (!bitmap_bit_p (visited, bb->index))
{
/* This basic block is now visited */
bitmap_set_bit (visited, bb->index);
blocks[i++] = bb;
}
{
/* This basic block is now visited */
bitmap_set_bit (visited, bb->index);
blocks[i++] = bb;
}
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
if (!bitmap_bit_p (visited, e->dest->index))
{
bitmap_set_bit (visited, e->dest->index);
blocks[i++] = e->dest;
}
}
}
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
if (!bitmap_bit_p (visited, e->dest->index))
{
bitmap_set_bit (visited, e->dest->index);
blocks[i++] = e->dest;
}
}
}
gcc_assert (i >= vc);
bb = blocks[vc++];
}
BITMAP_FREE (visited);
return blocks;
}
@ -1072,7 +1072,7 @@ verify_loop_structure (struct loops *loops)
for (i = 0; i < loops->num; i++)
{
if (!loops->parray[i])
continue;
continue;
if (loops->parray[i]->num_nodes != sizes[i])
{

View File

@ -270,7 +270,7 @@ extern bool remove_path (struct loops *, edge);
If first_special is true, the value in the first iteration is
delta + mult * base
If extend = UNKNOWN, first_special must be false, delta 0, mult 1 and value is
subreg_{mode} (base + i * step)

View File

@ -178,7 +178,7 @@ dfs (struct graph *g, int *qs, int nq, int *qt, bool forward)
{
if (qt)
qt[tick] = v;
g->vertices[v].post = tick++;
g->vertices[v].post = tick++;
if (!top)
break;
@ -257,7 +257,7 @@ free_graph (struct graph *g)
for parts of cycles that only "pass" through some loop -- i.e. for
each cycle, we want to mark blocks that belong directly to innermost
loop containing the whole cycle.
LOOPS is the loop tree. */
#define LOOP_REPR(LOOP) ((LOOP)->num + last_basic_block)
@ -290,8 +290,8 @@ mark_irreducible_loops (struct loops *loops)
FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
FOR_EACH_EDGE (e, ei, act->succs)
{
/* Ignore edges to exit. */
if (e->dest == EXIT_BLOCK_PTR)
/* Ignore edges to exit. */
if (e->dest == EXIT_BLOCK_PTR)
continue;
/* And latch edges. */
@ -433,9 +433,9 @@ expected_loop_iterations (const struct loop *loop)
count_in += e->count;
if (count_in == 0)
expected = count_latch * 2;
expected = count_latch * 2;
else
expected = (count_latch + count_in - 1) / count_in;
expected = (count_latch + count_in - 1) / count_in;
/* Avoid overflows. */
return (expected > REG_BR_PROB_BASE ? REG_BR_PROB_BASE : expected);
@ -526,7 +526,7 @@ init_set_costs (void)
target_res_regs = 3;
/* These are really just heuristic values. */
start_sequence ();
emit_move_insn (reg1, reg2);
seq = get_insns ();
@ -572,7 +572,7 @@ mark_loop_exit_edges (struct loops *loops)
{
basic_block bb;
edge e;
if (loops->num <= 1)
return;

View File

@ -455,7 +455,7 @@ scale_loop_frequencies (struct loop *loop, int num, int den)
Returns newly created loop. */
struct loop *
loopify (struct loops *loops, edge latch_edge, edge header_edge,
loopify (struct loops *loops, edge latch_edge, edge header_edge,
basic_block switch_bb, edge true_edge, edge false_edge,
bool redirect_all_edges)
{
@ -490,8 +490,8 @@ loopify (struct loops *loops, edge latch_edge, edge header_edge,
if (redirect_all_edges)
{
loop_redirect_edge (header_edge, switch_bb);
loop_redirect_edge (false_edge, loop->header);
loop_redirect_edge (false_edge, loop->header);
/* Update dominators. */
set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
@ -652,7 +652,7 @@ fix_loop_placements (struct loops *loops, struct loop *loop)
{
outer = loop->outer;
if (!fix_loop_placement (loop))
break;
break;
/* Changing the placement of a loop in the loop tree may alter the
validity of condition 2) of the description of fix_bb_placement
@ -746,7 +746,7 @@ loop_delete_branch_edge (edge e, int really_delete)
edge snd;
gcc_assert (EDGE_COUNT (src->succs) > 1);
/* Cannot handle more than two exit edges. */
if (EDGE_COUNT (src->succs) > 2)
return false;
@ -770,7 +770,7 @@ loop_delete_branch_edge (edge e, int really_delete)
return false;
single_succ_edge (src)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
single_succ_edge (src)->flags |= irr;
return true;
}
@ -783,7 +783,7 @@ can_duplicate_loop_p (struct loop *loop)
ret = can_copy_bbs_p (bbs, loop->num_nodes);
free (bbs);
return ret;
}
@ -902,7 +902,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
: prob_pass_thru;
/* Complete peeling is special as the probability of exit in last
copy becomes 1. */
copy becomes 1. */
if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
{
int wanted_freq = EDGE_FREQUENCY (e);
@ -919,7 +919,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
/* Now simulate the duplication adjustments and compute header
frequency of the last copy. */
for (i = 0; i < ndupl; i++)
wanted_freq = RDIV (wanted_freq * scale_step[i], REG_BR_PROB_BASE);
wanted_freq = RDIV (wanted_freq * scale_step[i], REG_BR_PROB_BASE);
scale_main = RDIV (wanted_freq * REG_BR_PROB_BASE, freq_in);
}
else if (is_latch)
@ -1061,7 +1061,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
}
free (new_bbs);
free (orig_loops);
/* Update the original loop. */
if (!is_latch)
set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
@ -1088,7 +1088,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
continue;
dom_bb = nearest_common_dominator (
CDI_DOMINATORS, first_active[i], first_active_latch);
set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
}
free (dom_bbs);
}
@ -1286,8 +1286,8 @@ loop_split_edge_with (edge e, rtx insns)
Split it and insert new conditional expression and adjust edges.
--- edge e ---> [cond expr] ---> [first_head]
|
+---------> [second_head]
|
+---------> [second_head]
*/
static basic_block
@ -1321,7 +1321,7 @@ lv_adjust_loop_entry_edge (basic_block first_head,
}
/* Main entry point for Loop Versioning transformation.
This transformation given a condition and a loop, creates
-if (condition) { loop_copy1 } else { loop_copy2 },
where loop_copy1 is the loop transformed in one way, and loop_copy2
@ -1333,7 +1333,7 @@ lv_adjust_loop_entry_edge (basic_block first_head,
instruction stream, otherwise it is placed before LOOP. */
struct loop *
loop_version (struct loops *loops, struct loop * loop,
loop_version (struct loops *loops, struct loop * loop,
void *cond_expr, basic_block *condition_bb,
bool place_after)
{
@ -1351,13 +1351,13 @@ loop_version (struct loops *loops, struct loop * loop,
entry = loop_preheader_edge (loop);
irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
/* Note down head of loop as first_head. */
first_head = entry->dest;
/* Duplicate loop. */
if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, loops, 1,
NULL, NULL, NULL, NULL, 0))
NULL, NULL, NULL, NULL, 0))
return NULL;
/* After duplication entry edge now points to new loop head block.
@ -1377,7 +1377,7 @@ loop_version (struct loops *loops, struct loop * loop,
}
latch_edge = single_succ_edge (get_bb_copy (loop->latch));
extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
nloop = loopify (loops,
latch_edge,
@ -1389,10 +1389,10 @@ loop_version (struct loops *loops, struct loop * loop,
if (exit)
nloop->single_exit = find_edge (get_bb_copy (exit->src), exit->dest);
/* loopify redirected latch_edge. Update its PENDING_STMTS. */
/* loopify redirected latch_edge. Update its PENDING_STMTS. */
lv_flush_pending_stmts (latch_edge);
/* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
/* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
lv_flush_pending_stmts (false_edge);
/* Adjust irreducible flag. */
@ -1419,8 +1419,8 @@ loop_version (struct loops *loops, struct loop * loop,
free (bbs);
}
/* At this point condition_bb is loop predheader with two successors,
first_head and second_head. Make sure that loop predheader has only
/* At this point condition_bb is loop predheader with two successors,
first_head and second_head. Make sure that loop predheader has only
one successor. */
loop_split_edge_with (loop_preheader_edge (loop), NULL);
loop_split_edge_with (loop_preheader_edge (nloop), NULL);
@ -1435,7 +1435,7 @@ loop_version (struct loops *loops, struct loop * loop,
to be correct). But still for the remaining loops the header dominates
the latch, and loops did not get new subloobs (new loops might possibly
get created, but we are not interested in them). Fix up the mess.
If CHANGED_BBS is not NULL, basic blocks whose loop has changed are
marked in it. */
@ -1454,7 +1454,7 @@ fix_loop_structure (struct loops *loops, bitmap changed_bbs)
}
/* Remove the dead loops from structures. */
loops->tree_root->num_nodes = n_basic_blocks;
loops->tree_root->num_nodes = n_basic_blocks;
for (i = 1; i < loops->num; i++)
{
loop = loops->parray[i];

View File

@ -112,8 +112,8 @@ delete_insn (rtx insn)
if (LABEL_P (insn))
{
/* Some labels can't be directly removed from the INSN chain, as they
might be references via variables, constant pool etc.
Convert them to the special NOTE_INSN_DELETED_LABEL note. */
might be references via variables, constant pool etc.
Convert them to the special NOTE_INSN_DELETED_LABEL note. */
if (! can_delete_label_p (insn))
{
const char *name = LABEL_NAME (insn);
@ -442,7 +442,7 @@ struct tree_opt_pass pass_free_cfg =
rtx
entry_of_function (void)
{
return (n_basic_blocks > NUM_FIXED_BLOCKS ?
return (n_basic_blocks > NUM_FIXED_BLOCKS ?
BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
}
@ -545,7 +545,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
/* This might have been an EH label that no longer has incoming
EH edges. Update data structures to match. */
maybe_remove_eh_handler (b_head);
/* Detect basic blocks with nothing but a label. This can happen
in particular at the end of a function. */
if (b_head == b_end)
@ -631,9 +631,9 @@ rtl_can_merge_blocks (basic_block a,basic_block b)
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (a) != BB_PARTITION (b))
@ -690,11 +690,11 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
|| BB_PARTITION (src) != BB_PARTITION (target))
return NULL;
@ -739,7 +739,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
{
rtx insn = src->il.rtl->footer;
delete_insn_chain (kill_from, BB_END (src));
delete_insn_chain (kill_from, BB_END (src));
/* Remove barriers but keep jumptables. */
while (insn)
@ -759,7 +759,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
}
}
else
delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
}
/* If this already is simplejump, redirect it. */
@ -1002,7 +1002,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
gcc_assert (redirected);
note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
if (note)
{
@ -1041,9 +1041,9 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
edge tmp;
edge_iterator ei;
bool found = false;
basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
/* Change the existing edge's source to be the new block, and add
a new edge from the entry block to the new block. */
e->src = bb;
@ -1058,9 +1058,9 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
else
ei_next (&ei);
}
gcc_assert (found);
VEC_safe_push (edge, gc, bb->succs, e);
make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
}
@ -1104,8 +1104,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
NULL_RTX,
REG_NOTES
(BB_END
(jump_block)));
(jump_block)));
/* Wire edge in. */
new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
new_edge->probability = e->probability;
@ -1370,7 +1370,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
if (!before && !after)
{
/* Figure out where to put these things. If the destination has
one predecessor, insert there. Except for the exit block. */
one predecessor, insert there. Except for the exit block. */
if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
{
bb = e->dest;
@ -1391,7 +1391,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
}
/* If the source has one successor and the edge is not abnormal,
insert there. Except for the entry block. */
insert there. Except for the entry block. */
else if ((e->flags & EDGE_ABNORMAL) == 0
&& single_succ_p (e->src)
&& e->src != ENTRY_BLOCK_PTR)
@ -1441,8 +1441,8 @@ commit_one_edge_insertion (edge e, int watch_calls)
if (JUMP_P (BB_END (bb))
&& !any_condjump_p (BB_END (bb))
&& (single_succ_edge (bb)->flags & EDGE_CROSSING))
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
&& (single_succ_edge (bb)->flags & EDGE_CROSSING))
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
(REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
}
}
@ -1461,9 +1461,9 @@ commit_one_edge_insertion (edge e, int watch_calls)
if (returnjump_p (last))
{
/* ??? Remove all outgoing edges from BB and add one for EXIT.
This is not currently a problem because this only happens
for the (single) epilogue, which already has a fallthru edge
to EXIT. */
This is not currently a problem because this only happens
for the (single) epilogue, which already has a fallthru edge
to EXIT. */
e = single_succ_edge (bb);
gcc_assert (e->dest == EXIT_BLOCK_PTR
@ -1516,7 +1516,7 @@ commit_edge_insertions (void)
FOR_EACH_BB (bb)
if (bb->aux)
{
SET_BIT (blocks, bb->index);
SET_BIT (blocks, bb->index);
/* Check for forgotten bb->aux values before commit_edge_insertions
call. */
gcc_assert (bb->aux == &bb->aux);
@ -1561,7 +1561,7 @@ commit_edge_insertions_watch_calls (void)
FOR_EACH_BB (bb)
if (bb->aux)
{
SET_BIT (blocks, bb->index);
SET_BIT (blocks, bb->index);
/* Check for forgotten bb->aux values before commit_edge_insertions
call. */
gcc_assert (bb->aux == &bb->aux);
@ -1806,7 +1806,7 @@ rtl_verify_flow_info_1 (void)
|| (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
&& e->src != ENTRY_BLOCK_PTR
&& e->dest != EXIT_BLOCK_PTR))
{
{
error ("fallthru edge crosses section boundary (bb %i)",
e->src->index);
err = 1;
@ -1894,7 +1894,7 @@ rtl_verify_flow_info_1 (void)
}
/* OK pointers are correct. Now check the header of basic
block. It ought to contain optional CODE_LABEL followed
block. It ought to contain optional CODE_LABEL followed
by NOTE_BASIC_BLOCK. */
x = BB_HEAD (bb);
if (LABEL_P (x))
@ -1996,7 +1996,7 @@ rtl_verify_flow_info (void)
}
else if (e->src != ENTRY_BLOCK_PTR
&& e->dest != EXIT_BLOCK_PTR)
{
{
rtx insn;
if (e->src->next_bb != e->dest)
@ -2016,7 +2016,7 @@ rtl_verify_flow_info (void)
fatal_insn ("wrong insn in the fallthru edge", insn);
err = 1;
}
}
}
}
num_bb_notes = 0;
@ -2371,7 +2371,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
BB_END (src)))
{
edge redirected;
if (dump_file)
fprintf (dump_file, "Fallthru edge unified with branch "
"%i->%i redirected to %i\n",
@ -2380,11 +2380,11 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
redirected = redirect_branch_edge (e, dest);
gcc_assert (redirected);
e->flags |= EDGE_FALLTHRU;
e->src->flags |= BB_DIRTY;
e->src->flags |= BB_DIRTY;
return e;
}
/* In case we are redirecting fallthru edge to the branch edge
of conditional jump, remove it. */
of conditional jump, remove it. */
if (EDGE_COUNT (src->succs) == 2)
{
/* Find the edge that is different from E. */
@ -2512,9 +2512,9 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
and cold sections.
Basic block partitioning may result in some jumps that appear to
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
be optimizable (or blocks that appear to be mergeable), but which really
must be left untouched (they are required to make it safely across
partition boundaries). See the comments at the top of
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
if (BB_PARTITION (a) != BB_PARTITION (b))
@ -2550,7 +2550,7 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
/* This might have been an EH label that no longer has incoming
EH edges. Update data structures to match. */
maybe_remove_eh_handler (BB_HEAD (b));
delete_insn (BB_HEAD (b));
}
@ -2790,14 +2790,14 @@ rtl_flow_call_edges_add (sbitmap blocks)
rtx split_at_insn = insn;
/* Don't split the block between a call and an insn that should
remain in the same block as the call. */
remain in the same block as the call. */
if (CALL_P (insn))
while (split_at_insn != BB_END (bb)
&& keep_with_call_p (NEXT_INSN (split_at_insn)))
split_at_insn = NEXT_INSN (split_at_insn);
/* The handling above of the final block before the epilogue
should be enough to verify that there is no edge to the exit
should be enough to verify that there is no edge to the exit
block in CFG already. Calling make_edge in such case would
cause us to mark that edge as fake and remove it later. */
@ -2839,8 +2839,8 @@ rtl_flow_call_edges_add (sbitmap blocks)
in trees, and this should be of the same type since it is a hook. */
static void
rtl_lv_add_condition_to_bb (basic_block first_head ,
basic_block second_head ATTRIBUTE_UNUSED,
basic_block cond_bb, void *comp_rtx)
basic_block second_head ATTRIBUTE_UNUSED,
basic_block cond_bb, void *comp_rtx)
{
rtx label, seq, jump;
rtx op0 = XEXP ((rtx)comp_rtx, 0);
@ -3015,7 +3015,7 @@ struct cfg_hooks rtl_cfg_hooks = {
NULL, /* lv_add_condition_to_bb */
NULL, /* lv_adjust_loop_header_phi*/
NULL, /* extract_cond_bb_edges */
NULL /* flush_pending_stmts */
NULL /* flush_pending_stmts */
};
/* Implementation of CFG manipulation for cfg layout RTL, where
@ -3058,5 +3058,5 @@ struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
NULL, /* lv_adjust_loop_header_phi*/
rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
NULL /* flush_pending_stmts */
NULL /* flush_pending_stmts */
};

View File

@ -20,7 +20,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
/* This file contains basic routines manipulating call graph and variable pool
The callgraph:
The call-graph is data structure designed for intra-procedural optimization
@ -37,7 +37,7 @@ The callgraph:
not change once the declaration is inserted into the call-graph.
The call-graph nodes are created lazily using cgraph_node function when
called for unknown declaration.
When built, there is one edge for each direct call. It is possible that
the reference will be later optimized out. The call-graph is built
conservatively in order to make conservative data flow analysis possible.
@ -65,7 +65,7 @@ The callgraph:
Each inlined call gets a unique corresponding clone node of the callee
and the data structure is updated while inlining is performed, so
the clones are eliminated and their callee edges redirected to the
caller.
caller.
Each edge has "inline_failed" field. When the field is set to NULL,
the call will be inlined. When it is non-NULL it contains a reason
@ -477,15 +477,15 @@ cgraph_remove_node (struct cgraph_node *node)
struct cgraph_node *n;
/* Make the next clone be the master clone */
for (n = new_node; n; n = n->next_clone)
for (n = new_node; n; n = n->next_clone)
n->master_clone = new_node;
*slot = new_node;
node->next_clone->prev_clone = NULL;
}
else
{
htab_clear_slot (cgraph_hash, slot);
htab_clear_slot (cgraph_hash, slot);
kill_body = true;
}
}
@ -493,10 +493,10 @@ cgraph_remove_node (struct cgraph_node *node)
{
node->prev_clone->next_clone = node->next_clone;
if (node->next_clone)
node->next_clone->prev_clone = node->prev_clone;
node->next_clone->prev_clone = node->prev_clone;
}
/* While all the clones are removed after being proceeded, the function
/* While all the clones are removed after being proceeded, the function
itself is kept in the cgraph even after it is compiled. Check whether
we are done with this body and reclaim it proactively if this is the case.
*/
@ -551,7 +551,7 @@ struct cgraph_local_info *
cgraph_local_info (tree decl)
{
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
return &node->local;
@ -563,7 +563,7 @@ struct cgraph_global_info *
cgraph_global_info (tree decl)
{
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
node = cgraph_node (decl);
return &node->global;
@ -575,7 +575,7 @@ struct cgraph_rtl_info *
cgraph_rtl_info (tree decl)
{
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
if (decl != current_function_decl
@ -599,7 +599,7 @@ cgraph_varpool_node_name (struct cgraph_varpool_node *node)
}
/* Names used to print out the availability enum. */
static const char * const availability_names[] =
static const char * const availability_names[] =
{"unset", "not_available", "overwrittable", "available", "local"};
/* Dump given cgraph node. */
@ -613,7 +613,7 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
cgraph_node_name (node->global.inlined_to),
node->global.inlined_to->uid);
if (cgraph_function_flags_ready)
fprintf (f, " availability:%s",
fprintf (f, " availability:%s",
availability_names [cgraph_function_body_availability (node)]);
if (node->master_clone && node->master_clone->uid != node->uid)
fprintf (f, "(%i)", node->master_clone->uid);
@ -753,7 +753,7 @@ cgraph_varpool_node (tree decl)
if (!cgraph_varpool_hash)
cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
eq_varpool_node, NULL);
eq_varpool_node, NULL);
key.decl = decl;
slot = (struct cgraph_varpool_node **)
htab_find_slot (cgraph_varpool_hash, &key, INSERT);
@ -879,7 +879,7 @@ void
cgraph_varpool_finalize_decl (tree decl)
{
struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
/* The first declaration of a variable that comes through this function
decides whether it is global (in C, has external linkage)
or local (in C, has internal linkage). So do nothing more
@ -942,8 +942,8 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
struct cgraph_edge *new;
new = cgraph_create_edge (n, e->callee, call_stmt,
e->count * count_scale / REG_BR_PROB_BASE,
e->loop_nest + loop_nest);
e->count * count_scale / REG_BR_PROB_BASE,
e->loop_nest + loop_nest);
new->inline_failed = e->inline_failed;
if (update_original)
@ -956,7 +956,7 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
}
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too.
the execution counts from original node too.
When UPDATE_ORIGINAL is true, the counts are subtracted from the original
function's profile to reflect the fact that part of execution is handled
@ -1018,13 +1018,13 @@ struct cgraph_node *
cgraph_master_clone (struct cgraph_node *n)
{
enum availability avail = cgraph_function_body_availability (n);
if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
return NULL;
if (!n->master_clone)
if (!n->master_clone)
n->master_clone = cgraph_node (n->decl);
return n->master_clone;
}
@ -1069,7 +1069,7 @@ cgraph_function_body_availability (struct cgraph_node *node)
document the requirement of both versions of function (extern
inline and offline) having same side effect characteristics as
good optimization is what this optimization is about. */
else if (!(*targetm.binds_local_p) (node->decl)
&& !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
avail = AVAIL_OVERWRITABLE;

View File

@ -133,13 +133,13 @@ struct cgraph_node GTY((chain_next ("%h.next"), chain_prev ("%h.previous")))
/* Pointer to a single unique cgraph node for this function. If the
function is to be output, this is the copy that will survive. */
struct cgraph_node *master_clone;
PTR GTY ((skip)) aux;
struct cgraph_local_info local;
struct cgraph_global_info global;
struct cgraph_rtl_info rtl;
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
/* Unique id of the node. */
@ -262,7 +262,7 @@ void cgraph_remove_node (struct cgraph_node *);
void cgraph_node_remove_callees (struct cgraph_node *node);
struct cgraph_edge *cgraph_create_edge (struct cgraph_node *,
struct cgraph_node *,
tree, gcov_type, int);
tree, gcov_type, int);
struct cgraph_node *cgraph_node (tree);
struct cgraph_node *cgraph_node_for_asm (tree asmname);
struct cgraph_edge *cgraph_edge (struct cgraph_node *, tree);
@ -271,8 +271,8 @@ struct cgraph_global_info *cgraph_global_info (tree);
struct cgraph_rtl_info *cgraph_rtl_info (tree);
const char * cgraph_node_name (struct cgraph_node *);
struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *,
struct cgraph_node *,
tree, gcov_type, int, bool);
struct cgraph_node *,
tree, gcov_type, int, bool);
struct cgraph_node * cgraph_clone_node (struct cgraph_node *, gcov_type,
int, bool);
@ -312,7 +312,7 @@ void cgraph_build_static_cdtor (char which, tree body, int priority);
void cgraph_reset_static_var_maps (void);
void init_cgraph (void);
struct cgraph_node *cgraph_function_versioning (struct cgraph_node *,
VEC(cgraph_edge_p,heap)*,
VEC(cgraph_edge_p,heap)*,
varray_type);
void cgraph_analyze_function (struct cgraph_node *);
struct cgraph_node *save_inline_function_body (struct cgraph_node *);

View File

@ -216,7 +216,7 @@ decide_is_function_needed (struct cgraph_node *node, tree decl)
return true;
/* Externally visible functions must be output. The exception is
COMDAT functions that must be output only when they are needed.
COMDAT functions that must be output only when they are needed.
When not optimizing, also output the static functions. (see
PR25962), but don't do so for always_inline functions.
@ -254,7 +254,7 @@ decide_is_function_needed (struct cgraph_node *node, tree decl)
|| (!node->local.disregard_inline_limits
/* When declared inline, defer even the uninlinable functions.
This allows them to be eliminated when unused. */
&& !DECL_DECLARED_INLINE_P (decl)
&& !DECL_DECLARED_INLINE_P (decl)
&& (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
return true;
@ -280,7 +280,7 @@ cgraph_varpool_analyze_pending_decls (void)
if (DECL_INITIAL (decl))
{
visited_nodes = pointer_set_create ();
walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
@ -318,7 +318,7 @@ cgraph_varpool_remove_unreferenced_decls (void)
&& TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
|| node->force_output
|| decide_is_variable_needed (node, decl)
/* ??? Cgraph does not yet rule the world with an iron hand,
/* ??? Cgraph does not yet rule the world with an iron hand,
and does not control the emission of debug information.
After a variable has its DECL_RTL set, we must assume that
it may be referenced by the debug information, and we can
@ -394,7 +394,7 @@ cgraph_reset_node (struct cgraph_node *node)
/* If node->output is set, then this is a unit-at-a-time compilation
and we have already begun whole-unit analysis. This is *not*
testing for whether we've already emitted the function. That
case can be sort-of legitimately seen with real function
case can be sort-of legitimately seen with real function
redefinition errors. I would argue that the front end should
never present us with such a case, but don't enforce that for now. */
gcc_assert (!node->output);
@ -512,7 +512,7 @@ record_reference (tree *tp, int *walk_subtrees, void *data)
{
cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
if (lang_hooks.callgraph.analyze_expr)
return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
data);
}
break;
@ -558,7 +558,7 @@ cgraph_create_edges (struct cgraph_node *node, tree body)
tree step;
visited_nodes = pointer_set_create ();
/* Reach the trees by walking over the CFG, and note the
/* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (bb, this_cfun)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
@ -578,7 +578,7 @@ cgraph_create_edges (struct cgraph_node *node, tree body)
walk_tree (&TREE_OPERAND (stmt, 0),
record_reference, node, visited_nodes);
}
else
else
walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
}
@ -595,7 +595,7 @@ cgraph_create_edges (struct cgraph_node *node, tree body)
else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
}
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
@ -742,7 +742,7 @@ verify_cgraph_node (struct cgraph_node *node)
error ("node not found in cgraph_hash");
error_found = true;
}
if (node->analyzed
&& DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
&& (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
@ -844,7 +844,7 @@ cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
assemble_variable (decl, 0, 1, 0);
/* Local static variables are never seen by check_global_declarations
so we need to output debug info by hand. */
if (DECL_CONTEXT (decl)
if (DECL_CONTEXT (decl)
&& (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
|| TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
&& errorcount == 0 && sorrycount == 0)
@ -867,7 +867,7 @@ cgraph_varpool_assemble_pending_decls (void)
if (errorcount || sorrycount)
return false;
/* EH might mark decls as needed during expansion. This should be safe since
we don't create references to new function, but it should not be used
elsewhere. */
@ -1019,7 +1019,7 @@ cgraph_finalize_compilation_unit (void)
tree decl = node->decl;
if (node->local.finalized && !DECL_SAVED_TREE (decl))
cgraph_reset_node (node);
cgraph_reset_node (node);
if (!node->reachable && DECL_SAVED_TREE (decl))
{
@ -1053,7 +1053,7 @@ cgraph_mark_functions_to_output (void)
{
tree decl = node->decl;
struct cgraph_edge *e;
gcc_assert (!node->output);
for (e = node->callers; e; e = e->next_caller)
@ -1085,7 +1085,7 @@ cgraph_mark_functions_to_output (void)
|| DECL_EXTERNAL (decl));
}
}
}
@ -1283,7 +1283,7 @@ cgraph_output_in_order (void)
}
/* Mark visibility of all functions.
A local function is one whose calls can occur only in the current
compilation unit and all its calls are explicit, so we can change
its calling convention. We simply mark all static functions whose
@ -1395,7 +1395,7 @@ cgraph_optimize (void)
}
process_pending_assemble_externals ();
/* Frontend may output common variables after the unit has been finalized.
It is safe to deal with them here as they are always zero initialized. */
cgraph_varpool_analyze_pending_decls ();
@ -1463,11 +1463,11 @@ cgraph_optimize (void)
for (node = cgraph_nodes; node; node = node->next)
if (node->analyzed
&& (node->global.inlined_to
|| DECL_SAVED_TREE (node->decl)))
|| DECL_SAVED_TREE (node->decl)))
{
error_found = true;
dump_cgraph_node (stderr, node);
}
}
if (error_found)
internal_error ("nodes with no released memory found");
}
@ -1475,7 +1475,7 @@ cgraph_optimize (void)
}
/* Generate and emit a static constructor or destructor. WHICH must be
one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
GENERIC statements. */
void
@ -1536,7 +1536,7 @@ cgraph_build_static_cdtor (char which, tree body, int priority)
}
else
cgraph_finalize_function (decl, 0);
if (targetm.have_ctors_dtors)
{
void (*fn) (rtx, int);
@ -1555,7 +1555,7 @@ init_cgraph (void)
cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
}
/* The edges representing the callers of the NEW_VERSION node were
/* The edges representing the callers of the NEW_VERSION node were
fixed by cgraph_function_versioning (), now the call_expr in their
respective tree code should be updated to call the NEW_VERSION. */
@ -1590,7 +1590,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
unsigned i;
gcc_assert (old_version);
new_version = cgraph_node (new_decl);
new_version->analyzed = true;
@ -1617,7 +1617,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
next_callee = e->next_callee;
if (e->callee == old_version)
cgraph_redirect_edge_callee (e, new_version);
if (!next_callee)
break;
}
@ -1632,7 +1632,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
}
/* Perform function versioning.
Function versioning includes copying of the tree and
Function versioning includes copying of the tree and
a callgraph update (creating a new cgraph node and updating
its callees and callers).
@ -1671,9 +1671,9 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
/* Update the new version's properties.
/* Update the new version's properties.
Make The new version visible only within this translation unit.
??? We cannot use COMDAT linkage because there is no
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
DECL_EXTERNAL (new_version_node->decl) = 0;
DECL_ONE_ONLY (new_version_node->decl) = 0;

View File

@ -161,7 +161,7 @@ struct head
enum pass {
PASS_FIRST, /* without constructors */
PASS_OBJ, /* individual objects */
PASS_LIB, /* looking for shared libraries */
PASS_LIB, /* looking for shared libraries */
PASS_SECOND /* with constructors linked in */
};
@ -177,12 +177,12 @@ static int aixrtl_flag; /* true if -brtl */
int debug; /* true if -debug */
static int shared_obj; /* true if -shared */
static int shared_obj; /* true if -shared */
static const char *c_file; /* <xxx>.c for constructor/destructor list. */
static const char *o_file; /* <xxx>.o for constructor/destructor list. */
#ifdef COLLECT_EXPORT_LIST
static const char *export_file; /* <xxx>.x for AIX export list. */
static const char *export_file; /* <xxx>.x for AIX export list. */
#endif
const char *ldout; /* File for ld stdout. */
const char *lderrout; /* File for ld stderr. */
@ -192,7 +192,7 @@ static const char *nm_file_name; /* pathname of nm */
static const char *ldd_file_name; /* pathname of ldd (or equivalent) */
#endif
static const char *strip_file_name; /* pathname of strip */
const char *c_file_name; /* pathname of gcc */
const char *c_file_name; /* pathname of gcc */
static char *initname, *fininame; /* names of init and fini funcs */
static struct head constructors; /* list of constructors found */
@ -1062,7 +1062,7 @@ main (int argc, char **argv)
explicitly puts an export list in command line */
case 'b':
if (arg[2] == 'E' || strncmp (&arg[2], "export", 6) == 0)
export_flag = 1;
export_flag = 1;
else if (arg[2] == '6' && arg[3] == '4')
aix64_flag = 1;
else if (arg[2] == 'r' && arg[3] == 't' && arg[4] == 'l')
@ -1078,7 +1078,7 @@ main (int argc, char **argv)
ld2--;
}
if (!strcmp (arg, "-dynamic-linker") && argv[1])
{
{
++argv;
*ld1++ = *ld2++ = *argv;
}
@ -1095,7 +1095,7 @@ main (int argc, char **argv)
}
#ifdef COLLECT_EXPORT_LIST
{
/* Resolving full library name. */
/* Resolving full library name. */
const char *s = resolve_lib_name (arg+2);
/* Saving a full library name. */
@ -1205,8 +1205,8 @@ main (int argc, char **argv)
else
{
/* Saving a full library name. */
add_to_list (&libs, arg);
}
add_to_list (&libs, arg);
}
#endif
}
}
@ -1670,7 +1670,7 @@ sort_ids (struct head *head_ptr)
|| id->sequence > (*id_ptr)->sequence
/* Hack: do lexical compare, too.
|| (id->sequence == (*id_ptr)->sequence
&& strcmp (id->name, (*id_ptr)->name) > 0) */
&& strcmp (id->name, (*id_ptr)->name) > 0) */
)
{
id->next = *id_ptr;
@ -2246,7 +2246,7 @@ scan_libraries (const char *prog_name)
*end = '\0';
if (access (name, R_OK) == 0)
add_to_list (&libraries, name);
add_to_list (&libraries, name);
else
fatal ("unable to open dynamic dependency '%s'", buf);
@ -2296,20 +2296,20 @@ scan_libraries (const char *prog_name)
# if defined (C_WEAKEXT)
# define GCC_OK_SYMBOL(X) \
(((X).n_sclass == C_EXT || (X).n_sclass == C_WEAKEXT) && \
((X).n_scnum > N_UNDEF) && \
(aix64_flag \
|| (((X).n_type & N_TMASK) == (DT_NON << N_BTSHFT) \
|| ((X).n_type & N_TMASK) == (DT_FCN << N_BTSHFT))))
((X).n_scnum > N_UNDEF) && \
(aix64_flag \
|| (((X).n_type & N_TMASK) == (DT_NON << N_BTSHFT) \
|| ((X).n_type & N_TMASK) == (DT_FCN << N_BTSHFT))))
# define GCC_UNDEF_SYMBOL(X) \
(((X).n_sclass == C_EXT || (X).n_sclass == C_WEAKEXT) && \
((X).n_scnum == N_UNDEF))
((X).n_scnum == N_UNDEF))
# else
# define GCC_OK_SYMBOL(X) \
(((X).n_sclass == C_EXT) && \
((X).n_scnum > N_UNDEF) && \
(aix64_flag \
|| (((X).n_type & N_TMASK) == (DT_NON << N_BTSHFT) \
|| ((X).n_type & N_TMASK) == (DT_FCN << N_BTSHFT))))
((X).n_scnum > N_UNDEF) && \
(aix64_flag \
|| (((X).n_type & N_TMASK) == (DT_NON << N_BTSHFT) \
|| ((X).n_type & N_TMASK) == (DT_FCN << N_BTSHFT))))
# define GCC_UNDEF_SYMBOL(X) \
(((X).n_sclass == C_EXT) && ((X).n_scnum == N_UNDEF))
# endif
@ -2398,9 +2398,9 @@ scan_prog_file (const char *prog_name, enum pass which_pass)
{
#endif
/* Some platforms (e.g. OSF4) declare ldopen as taking a
non-const char * filename parameter, even though it will not
modify that string. So we must cast away const-ness here,
which will cause -Wcast-qual to burp. */
non-const char * filename parameter, even though it will not
modify that string. So we must cast away const-ness here,
which will cause -Wcast-qual to burp. */
if ((ldptr = ldopen ((char *)prog_name, ldptr)) != NULL)
{
if (! MY_ISCOFF (HEADER (ldptr).f_magic))

View File

@ -161,7 +161,7 @@ struct reg_stat {
(1) We do not want to reinitialize at each label.
(2) It is useful, but not critical, to know the actual value assigned
to a register. Often just its form is helpful.
to a register. Often just its form is helpful.
Therefore, we maintain the following fields:
@ -249,7 +249,7 @@ struct reg_stat {
truncation if we know that value already contains a truncated
value. */
ENUM_BITFIELD(machine_mode) truncated_to_mode : 8;
ENUM_BITFIELD(machine_mode) truncated_to_mode : 8;
};
static struct reg_stat *reg_stat;
@ -791,7 +791,7 @@ combine_instructions (rtx f, unsigned int nregs)
FOR_EACH_BB (this_basic_block)
{
for (insn = BB_HEAD (this_basic_block);
insn != NEXT_INSN (BB_END (this_basic_block));
insn != NEXT_INSN (BB_END (this_basic_block));
insn = next ? next : NEXT_INSN (insn))
{
next = 0;
@ -1040,7 +1040,7 @@ set_nonzero_bits_and_sign_copies (rtx x, rtx set,
/* If this register is undefined at the start of the file, we can't
say what its contents were. */
&& ! REGNO_REG_SET_P
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
&& GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
{
if (set == 0 || GET_CODE (set) == CLOBBER)
@ -1327,7 +1327,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
{
/* Don't substitute for a register intended as a clobberable
/* Don't substitute for a register intended as a clobberable
operand. */
rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
if (rtx_equal_p (reg, dest))
@ -1354,10 +1354,10 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
{
/* Make sure succ doesn't contain a volatile reference. */
if (succ != 0 && volatile_refs_p (PATTERN (succ)))
return 0;
return 0;
for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
return 0;
}
@ -1428,12 +1428,12 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
Consider:
(set (reg:DI 101) (reg:DI 100))
(set (reg:DI 101) (reg:DI 100))
(set (subreg:SI (reg:DI 101) 0) <foo>)
This is NOT equivalent to:
(parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
(parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
(set (reg:DI 101) (reg:DI 100))])
Not only does this modify 100 (in which case it might still be valid
@ -2073,7 +2073,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
make up a dummy I1 that is
(set Y OP)
and change I2 to be
(set (reg:CC X) (compare:CC Y (const_int 0)))
(set (reg:CC X) (compare:CC Y (const_int 0)))
(We can ignore any trailing CLOBBERs.)
@ -3013,12 +3013,12 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
rtx ni2dest;
/* I3 now uses what used to be its destination and which is now
I2's destination. This requires us to do a few adjustments. */
I2's destination. This requires us to do a few adjustments. */
PATTERN (i3) = newpat;
adjust_for_new_dest (i3);
/* We need a LOG_LINK from I3 to I2. But we used to have one,
so we still will.
so we still will.
However, some later insn might be using I2's dest and have
a LOG_LINK pointing at I3. We must remove this link.
@ -3431,7 +3431,6 @@ undo_commit (void)
}
undobuf.undos = 0;
}
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
where we have an arithmetic expression and return that point. LOC will
@ -3604,7 +3603,7 @@ find_split_point (rtx *loc, rtx insn)
rtx negmask = gen_int_mode (~(mask << pos), mode);
SUBST (SET_SRC (x),
simplify_gen_binary (IOR, mode,
simplify_gen_binary (AND, mode,
simplify_gen_binary (AND, mode,
dest, negmask),
or_mask));
}
@ -4157,7 +4156,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
&& ! (GET_CODE (XEXP (x, 1)) == SUBREG
&& OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
|| (UNARY_P (x)
&& (!OBJECT_P (XEXP (x, 0))
&& (!OBJECT_P (XEXP (x, 0))
&& ! (GET_CODE (XEXP (x, 0)) == SUBREG
&& OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
{
@ -4201,7 +4200,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
&& ((reversed = reversed_comparison_code_parts
(cond_code, cond, cop1, NULL))
!= UNKNOWN))
!= UNKNOWN))
x = simplify_gen_relational (reversed, mode, VOIDmode,
cond, cop1);
@ -4220,7 +4219,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
&& true_rtx == const0_rtx
&& ((reversed = reversed_comparison_code_parts
(cond_code, cond, cop1, NULL))
!= UNKNOWN))
!= UNKNOWN))
x = simplify_gen_unary (NEG, mode,
simplify_gen_relational (reversed,
mode, VOIDmode,
@ -4364,7 +4363,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
return gen_lowpart (mode, SUBREG_REG (x));
if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
break;
break;
{
rtx temp;
temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
@ -5186,13 +5185,13 @@ simplify_set (rtx x)
tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
op0, op1);
if (!tmp)
new_code = old_code;
new_code = old_code;
else if (!CONSTANT_P (tmp))
{
new_code = GET_CODE (tmp);
op0 = XEXP (tmp, 0);
op1 = XEXP (tmp, 1);
}
{
new_code = GET_CODE (tmp);
op0 = XEXP (tmp, 0);
op1 = XEXP (tmp, 1);
}
else
{
rtx pat = PATTERN (other_insn);
@ -5317,7 +5316,7 @@ simplify_set (rtx x)
{
SUBST(SET_SRC (x), op0);
src = SET_SRC (x);
}
}
else
{
/* Otherwise, update the COMPARE if needed. */
@ -5353,7 +5352,7 @@ simplify_set (rtx x)
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
#ifndef WORD_REGISTER_OPERATIONS
&& (GET_MODE_SIZE (GET_MODE (src))
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
#endif
#ifdef CANNOT_CHANGE_MODE_CLASS
&& ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
@ -5682,8 +5681,8 @@ expand_compound_operation (rtx x)
if (GET_CODE (x) == ZERO_EXTEND)
{
/* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
know that the last value didn't have any inappropriate bits
set. */
know that the last value didn't have any inappropriate bits
set. */
if (GET_CODE (XEXP (x, 0)) == TRUNCATE
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
&& GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
@ -5701,9 +5700,9 @@ expand_compound_operation (rtx x)
return SUBREG_REG (XEXP (x, 0));
/* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
is a comparison and STORE_FLAG_VALUE permits. This is like
the first case, but it works even when GET_MODE (x) is larger
than HOST_WIDE_INT. */
is a comparison and STORE_FLAG_VALUE permits. This is like
the first case, but it works even when GET_MODE (x) is larger
than HOST_WIDE_INT. */
if (GET_CODE (XEXP (x, 0)) == TRUNCATE
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
&& COMPARISON_P (XEXP (XEXP (x, 0), 0))
@ -6168,7 +6167,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
else
{
/* Be careful not to go beyond the extracted object and maintain the
natural alignment of the memory. */
natural alignment of the memory. */
wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
> GET_MODE_BITSIZE (wanted_inner_mode))
@ -6599,12 +6598,12 @@ make_compound_operation (rtx x, enum rtx_code in_code)
{
rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
0);
/* If we have something other than a SUBREG, we might have
done an expansion, so rerun ourselves. */
if (GET_CODE (newer) != SUBREG)
newer = make_compound_operation (newer, in_code);
return newer;
}
@ -6733,9 +6732,9 @@ canon_reg_for_combine (rtx x, rtx reg)
fmt = GET_RTX_FORMAT (code);
copied = false;
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
if (fmt[i] == 'e')
{
rtx op = canon_reg_for_combine (XEXP (x, i), reg);
if (fmt[i] == 'e')
{
rtx op = canon_reg_for_combine (XEXP (x, i), reg);
if (op != XEXP (x, i))
{
if (!copied)
@ -6744,15 +6743,15 @@ canon_reg_for_combine (rtx x, rtx reg)
x = copy_rtx (x);
}
XEXP (x, i) = op;
}
}
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
}
}
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
{
rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
if (op != XVECEXP (x, i, j))
rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
if (op != XVECEXP (x, i, j))
{
if (!copied)
{
@ -6760,7 +6759,7 @@ canon_reg_for_combine (rtx x, rtx reg)
x = copy_rtx (x);
}
XVECEXP (x, i, j) = op;
}
}
}
}
@ -6856,7 +6855,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
if (GET_CODE (x) == CONST_INT)
{
if (SCALAR_INT_MODE_P (mode))
return gen_int_mode (INTVAL (x) & mask, mode);
return gen_int_mode (INTVAL (x) & mask, mode);
else
{
x = GEN_INT (INTVAL (x) & mask);
@ -7391,7 +7390,7 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
*ptrue = simplify_gen_relational (code, mode, VOIDmode,
true0, true1);
*pfalse = simplify_gen_relational (code, mode, VOIDmode,
false0, false1);
false0, false1);
}
else
{
@ -7668,7 +7667,7 @@ known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
if (XEXP (x, 0) != r)
{
/* We must simplify the zero_extend here, before we lose
track of the original inner_mode. */
track of the original inner_mode. */
new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
r, inner_mode);
if (new)
@ -8033,7 +8032,7 @@ apply_distributive_law (rtx x)
expanding a bit field assignment. When we apply the distributive
law to this, we get (ior (and (A (not B))) (and (B (not B)))),
which then simplifies to (and (A (not B))).
Note that no checks happen on the validity of applying the inverse
distributive law. This is pointless since we can do it in the
few places where this routine is called.
@ -8247,13 +8246,13 @@ reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
if (reg_stat[REGNO (x)].last_set_value != 0
&& (reg_stat[REGNO (x)].last_set_mode == mode
|| (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
|| (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
&& GET_MODE_CLASS (mode) == MODE_INT))
&& (reg_stat[REGNO (x)].last_set_label == label_tick
|| (REGNO (x) >= FIRST_PSEUDO_REGISTER
&& REG_N_SETS (REGNO (x)) == 1
&& ! REGNO_REG_SET_P
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
REGNO (x))))
&& INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
{
@ -8267,13 +8266,13 @@ reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
{
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
/* If X is narrower than MODE and TEM is a non-negative
constant that would appear negative in the mode of X,
sign-extend it for use in reg_nonzero_bits because some
machines (maybe most) will actually do the sign-extension
and this is the conservative approach.
constant that would appear negative in the mode of X,
sign-extend it for use in reg_nonzero_bits because some
machines (maybe most) will actually do the sign-extension
and this is the conservative approach.
??? For 2.5, try to tighten up the MD files in this regard
instead of this kludge. */
??? For 2.5, try to tighten up the MD files in this regard
instead of this kludge. */
if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
&& GET_CODE (tem) == CONST_INT
@ -8292,8 +8291,8 @@ reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
/* We don't know anything about the upper bits. */
mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
/* We don't know anything about the upper bits. */
mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
*nonzero &= mask;
}
@ -8318,10 +8317,10 @@ reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
if (reg_stat[REGNO (x)].last_set_value != 0
&& reg_stat[REGNO (x)].last_set_mode == mode
&& (reg_stat[REGNO (x)].last_set_label == label_tick
|| (REGNO (x) >= FIRST_PSEUDO_REGISTER
|| (REGNO (x) >= FIRST_PSEUDO_REGISTER
&& REG_N_SETS (REGNO (x)) == 1
&& ! REGNO_REG_SET_P
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
(ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
REGNO (x))))
&& INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
{
@ -8336,7 +8335,7 @@ reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
&& GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
*result = reg_stat[REGNO (x)].sign_bit_copies;
return NULL;
}
@ -8940,8 +8939,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
&& 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
shift_mode))
&& (new = simplify_const_binary_operation (code, result_mode,
XEXP (varop, 1),
GEN_INT (count))) != 0
XEXP (varop, 1),
GEN_INT (count))) != 0
&& GET_CODE (new) == CONST_INT
&& merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
INTVAL (new), result_mode, &complement_p))
@ -8956,7 +8955,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
for some (ashiftrt (xor)). */
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
&& !(code == ASHIFTRT && GET_CODE (varop) == XOR
&& 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
&& 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
shift_mode)))
{
rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
@ -8969,7 +8968,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
varop = apply_distributive_law (varop);
count = 0;
continue;
continue;
}
break;
@ -9068,8 +9067,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
if (code == ASHIFT
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
&& (new = simplify_const_binary_operation (ASHIFT, result_mode,
XEXP (varop, 1),
GEN_INT (count))) != 0
XEXP (varop, 1),
GEN_INT (count))) != 0
&& GET_CODE (new) == CONST_INT
&& merge_outer_ops (&outer_op, &outer_const, PLUS,
INTVAL (new), result_mode, &complement_p))
@ -9087,8 +9086,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
&& mode_signbit_p (result_mode, XEXP (varop, 1))
&& (new = simplify_const_binary_operation (code, result_mode,
XEXP (varop, 1),
GEN_INT (count))) != 0
XEXP (varop, 1),
GEN_INT (count))) != 0
&& GET_CODE (new) == CONST_INT
&& merge_outer_ops (&outer_op, &outer_const, XOR,
INTVAL (new), result_mode, &complement_p))
@ -10025,7 +10024,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
&& ! unsigned_comparison_p
&& (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
&& ((unsigned HOST_WIDE_INT) const_op
< (((unsigned HOST_WIDE_INT) 1
< (((unsigned HOST_WIDE_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1))))
&& cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
@ -10066,7 +10065,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
if ((c1 > 0
&& (unsigned HOST_WIDE_INT) c1
&& (unsigned HOST_WIDE_INT) c1
< (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
&& (equality_comparison_p || unsigned_comparison_p)
/* (A - C1) zero-extends if it is positive and sign-extends
@ -10089,7 +10088,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
{
op0 = SUBREG_REG (op0);
continue;
}
}
}
/* If the inner mode is narrower and we are extracting the low part,
@ -10197,7 +10196,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
case UNEQ: case LTGT:
case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
case UNORDERED: case ORDERED:
case UNORDERED: case ORDERED:
/* We can't do anything if OP0 is a condition code value, rather
than an actual data value. */
if (const_op != 0
@ -10259,8 +10258,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
{
op0 = simplify_and_const_int
(NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
XEXP (op0, 1),
XEXP (XEXP (op0, 0), 1)),
XEXP (op0, 1),
XEXP (XEXP (op0, 0), 1)),
(HOST_WIDE_INT) 1);
continue;
}
@ -10606,7 +10605,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
{
/* For paradoxical subregs, allow case 1 as above. Case 3 isn't
implemented. */
if (REG_P (SUBREG_REG (op0)))
if (REG_P (SUBREG_REG (op0)))
{
op0 = SUBREG_REG (op0);
op1 = gen_lowpart (GET_MODE (op0), op1);
@ -11111,7 +11110,7 @@ static void
record_truncated_value (rtx x)
{
enum machine_mode truncated_mode;
if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
{
enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
@ -12017,7 +12016,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
if (place && JUMP_P (place))
{
rtx label = JUMP_LABEL (place);
if (!label)
JUMP_LABEL (place) = XEXP (note, 0);
else
@ -12031,7 +12030,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
if (place2 && JUMP_P (place2))
{
rtx label = JUMP_LABEL (place2);
if (!label)
JUMP_LABEL (place2) = XEXP (note, 0);
else

View File

@ -8,12 +8,12 @@
; the terms of the GNU General Public License as published by the Free
; Software Foundation; either version 2, or (at your option) any later
; version.
;
;
; GCC is distributed in the hope that it will be useful, but WITHOUT ANY
; WARRANTY; without even the implied warranty of MERCHANTABILITY or
; FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
; for more details.
;
;
; You should have received a copy of the GNU General Public License
; along with GCC; see the file COPYING. If not, write to the Free
; Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
@ -786,7 +786,7 @@ Common Report Var(flag_sched_stalled_insns)
Allow premature scheduling of queued insns
fsched-stalled-insns=
Common RejectNegative Joined UInteger
Common RejectNegative Joined UInteger
-fsched-stalled-insns=<number> Set number of queued insns that can be prematurely scheduled
; sched_stalled_insns_dep controls how many recently scheduled cycles will
@ -830,7 +830,7 @@ Common Report Var(flag_split_ivs_in_unroller) Init(1)
Split lifetimes of induction variables when loops are unrolled
fvariable-expansion-in-unroller
Common Report Var(flag_variable_expansion_in_unroller)
Common Report Var(flag_variable_expansion_in_unroller)
Apply variable expansion when loops are unrolled
; Emit code to probe the stack, to help detect stack overflow; also