alias.c: Fix comment typos.
* alias.c: Fix comment typos. * builtins.c: Likewise. * cfg.c: Likewise. * df.c: Likewise. * dominance.c: Likewise. * dwarf2out.c: Likewise. * emit-rtl.c: Likewise. * expr.c: Likewise. * final.c: Likewise. * fold-const.c: Likewise. * gcse.c: Likewise. * genattrtab.c: Likewise. * genrecog.c: Likewise. * gensupport.c: Likewise. * ggc-zone.c: Likewise. * integrate.c: Likewise. * local-alloc.c: Likewise. * loop.c: Likewise. * recog.c: Likewise. * regmove.c: Likewise. * reg-stack.c: Likewise. * reorg.c: Likewise. * rtlanal.c: Likewise. * rtl.h: Likewise. * sched-ebb.c: Likewise. * simplify-rtx.c: Likewise. * toplev.c: Likewise. * varasm.c: Likewise. From-SVN: r75475
This commit is contained in:
parent
95ea367d2d
commit
5d3cc25206
|
@ -1,3 +1,34 @@
|
|||
2004-01-06 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* alias.c: Fix comment typos.
|
||||
* builtins.c: Likewise.
|
||||
* cfg.c: Likewise.
|
||||
* df.c: Likewise.
|
||||
* dominance.c: Likewise.
|
||||
* dwarf2out.c: Likewise.
|
||||
* emit-rtl.c: Likewise.
|
||||
* expr.c: Likewise.
|
||||
* final.c: Likewise.
|
||||
* fold-const.c: Likewise.
|
||||
* gcse.c: Likewise.
|
||||
* genattrtab.c: Likewise.
|
||||
* genrecog.c: Likewise.
|
||||
* gensupport.c: Likewise.
|
||||
* ggc-zone.c: Likewise.
|
||||
* integrate.c: Likewise.
|
||||
* local-alloc.c: Likewise.
|
||||
* loop.c: Likewise.
|
||||
* recog.c: Likewise.
|
||||
* regmove.c: Likewise.
|
||||
* reg-stack.c: Likewise.
|
||||
* reorg.c: Likewise.
|
||||
* rtlanal.c: Likewise.
|
||||
* rtl.h: Likewise.
|
||||
* sched-ebb.c: Likewise.
|
||||
* simplify-rtx.c: Likewise.
|
||||
* toplev.c: Likewise.
|
||||
* varasm.c: Likewise.
|
||||
|
||||
2004-01-06 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* doc/install.texi: Fix typos.
|
||||
|
|
|
@ -2386,7 +2386,7 @@ nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
@ -2480,7 +2480,7 @@ nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
@ -2556,7 +2556,7 @@ nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
|
|
@ -4354,7 +4354,7 @@ expand_builtin_fputs (tree arglist, rtx target, bool unlocked)
|
|||
break;
|
||||
}
|
||||
}
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
case 1: /* length is greater than 1, call fwrite. */
|
||||
{
|
||||
tree string_arg;
|
||||
|
|
|
@ -334,7 +334,7 @@ cached_make_edge (sbitmap *edge_cache, basic_block src, basic_block dst, int fla
|
|||
if (flags == 0)
|
||||
return NULL;
|
||||
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case 0:
|
||||
for (e = src->succ; e; e = e->succ_next)
|
||||
if (e->dest == dst)
|
||||
|
|
2
gcc/df.c
2
gcc/df.c
|
@ -1006,7 +1006,7 @@ df_uses_record (struct df *df, rtx *loc, enum df_ref_type ref_type,
|
|||
insn, DF_REF_READ_WRITE);
|
||||
break;
|
||||
}
|
||||
/* ... FALLTHRU ... */
|
||||
/* Fall through. */
|
||||
case REG:
|
||||
case PARALLEL:
|
||||
case PC:
|
||||
|
|
|
@ -537,7 +537,7 @@ assign_dfs_numbers (struct et_node *node, int *num)
|
|||
node->dfs_num_out = (*num)++;
|
||||
}
|
||||
|
||||
/* Compute the data neccesary for fast resolving of dominator queries in a
|
||||
/* Compute the data necessary for fast resolving of dominator queries in a
|
||||
static dominator tree. */
|
||||
|
||||
static void
|
||||
|
@ -659,7 +659,7 @@ set_immediate_dominator (enum cdi_direction dir, basic_block bb,
|
|||
dom_computed[dir] = DOM_NO_FAST_QUERY;
|
||||
}
|
||||
|
||||
/* Store all basic blocks immediatelly dominated by BB into BBS and return
|
||||
/* Store all basic blocks immediately dominated by BB into BBS and return
|
||||
their number. */
|
||||
int
|
||||
get_dominated_by (enum cdi_direction dir, basic_block bb, basic_block **bbs)
|
||||
|
|
|
@ -8503,7 +8503,7 @@ loc_descriptor_from_tree (tree loc, int addressp)
|
|||
indirect_p = 1;
|
||||
break;
|
||||
}
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case PARM_DECL:
|
||||
{
|
||||
|
|
|
@ -2876,7 +2876,7 @@ repeat:
|
|||
if (copied && len > 0)
|
||||
XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
|
||||
|
||||
/* Call recsusively on all inside the vector. */
|
||||
/* Call recursively on all inside the vector. */
|
||||
for (j = 0; j < len; j++)
|
||||
{
|
||||
if (last_ptr)
|
||||
|
|
|
@ -5829,7 +5829,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
case '<':
|
||||
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
|
||||
return 0;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case '1':
|
||||
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
|
||||
|
|
|
@ -3779,7 +3779,7 @@ leaf_renumber_regs_insn (rtx in_rtx)
|
|||
calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
|
||||
Those routines may also be called from a higher level intercepted routine. So
|
||||
to prevent recording data for an inner call to one of these for an intercept,
|
||||
we maintain a intercept nesting counter (debug_nesting). We only save the
|
||||
we maintain an intercept nesting counter (debug_nesting). We only save the
|
||||
intercepted arguments if the nesting is 1. */
|
||||
int debug_nesting = 0;
|
||||
|
||||
|
|
|
@ -8425,7 +8425,7 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
|
|||
case WITH_CLEANUP_EXPR: len = 2; break;
|
||||
default: break;
|
||||
}
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
case 'r':
|
||||
case '<':
|
||||
case '1':
|
||||
|
|
|
@ -817,7 +817,7 @@ gcse_main (rtx f, FILE *file)
|
|||
partial redundancy elimination. */
|
||||
free_gcse_mem ();
|
||||
|
||||
/* It does not make sense to run code hoisting unless we optimizing
|
||||
/* It does not make sense to run code hoisting unless we are optimizing
|
||||
for code size -- it rarely makes programs faster, and can make
|
||||
them bigger if we did partial redundancy elimination (when optimizing
|
||||
for space, we use a classic gcse algorithm instead of partial
|
||||
|
|
|
@ -1155,7 +1155,7 @@ check_attr_value (rtx exp, struct attr_desc *attr)
|
|||
have_error = 1;
|
||||
break;
|
||||
}
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case IOR:
|
||||
case AND:
|
||||
|
|
|
@ -378,7 +378,7 @@ find_operand (rtx pattern, int n)
|
|||
case 'V':
|
||||
if (! XVEC (pattern, i))
|
||||
break;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case 'E':
|
||||
for (j = 0; j < XVECLEN (pattern, i); j++)
|
||||
|
@ -429,7 +429,7 @@ find_matching_operand (rtx pattern, int n)
|
|||
case 'V':
|
||||
if (! XVEC (pattern, i))
|
||||
break;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case 'E':
|
||||
for (j = 0; j < XVECLEN (pattern, i); j++)
|
||||
|
@ -812,7 +812,7 @@ add_to_sequence (rtx pattern, struct decision_head *last, const char *position,
|
|||
beyond the end of the vector. */
|
||||
test = new_decision_test (DT_veclen_ge, &place);
|
||||
test->u.veclen = XVECLEN (pattern, 2);
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case MATCH_OPERAND:
|
||||
case MATCH_SCRATCH:
|
||||
|
|
|
@ -512,7 +512,7 @@ collect_insn_data (rtx pattern, int *palt, int *pmax)
|
|||
case MATCH_OPERAND:
|
||||
i = n_alternatives (XSTR (pattern, 2));
|
||||
*palt = (i > *palt ? i : *palt);
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case MATCH_OPERATOR:
|
||||
case MATCH_SCRATCH:
|
||||
|
@ -540,7 +540,7 @@ collect_insn_data (rtx pattern, int *palt, int *pmax)
|
|||
case 'V':
|
||||
if (XVEC (pattern, i) == NULL)
|
||||
break;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case 'E':
|
||||
for (j = XVECLEN (pattern, i) - 1; j >= 0; --j)
|
||||
collect_insn_data (XVECEXP (pattern, i, j), palt, pmax);
|
||||
|
@ -595,7 +595,7 @@ alter_predicate_for_insn (rtx pattern, int alt, int max_op, int lineno)
|
|||
XSTR (pattern, 2) = new_c;
|
||||
}
|
||||
}
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case MATCH_OPERATOR:
|
||||
case MATCH_SCRATCH:
|
||||
|
|
|
@ -172,7 +172,7 @@ struct alloc_chunk {
|
|||
Similar with increasing max_free_bin_size without increasing num_free_bins.
|
||||
|
||||
After much histogramming of allocation sizes and time spent on gc,
|
||||
on a powerpc G4 7450 - 667 mhz, and an pentium 4 - 2.8ghz,
|
||||
on a PowerPC G4 7450 - 667 mhz, and a Pentium 4 - 2.8ghz,
|
||||
these were determined to be the optimal values. */
|
||||
#define NUM_FREE_BINS 64
|
||||
#define MAX_FREE_BIN_SIZE 256
|
||||
|
|
|
@ -2090,7 +2090,7 @@ copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
|
|||
if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
|
||||
break;
|
||||
|
||||
/* ... FALLTHRU ... */
|
||||
/* Fall through. */
|
||||
case CODE_LABEL:
|
||||
LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
|
||||
= LABEL_PRESERVE_P (orig);
|
||||
|
|
|
@ -538,7 +538,7 @@ equiv_init_varies_p (rtx x)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
@ -603,7 +603,7 @@ equiv_init_movable_p (rtx x, int regno)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 0;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
@ -2412,7 +2412,7 @@ requires_inout (const char *p)
|
|||
if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
|
||||
&& !EXTRA_ADDRESS_CONSTRAINT (c, p))
|
||||
break;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case 'p':
|
||||
case 'g': case 'r':
|
||||
reg_allowed = 1;
|
||||
|
|
|
@ -2643,7 +2643,7 @@ prescan_loop (struct loop *loop)
|
|||
loop_info->has_multiple_exit_targets = 1;
|
||||
}
|
||||
}
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case INSN:
|
||||
if (volatile_refs_p (PATTERN (insn)))
|
||||
|
|
|
@ -1707,7 +1707,7 @@ asm_operand_ok (rtx op, const char *constraint)
|
|||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
break;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case 'i':
|
||||
if (CONSTANT_P (op)
|
||||
|
|
|
@ -1769,7 +1769,7 @@ subst_stack_regs_pat (rtx insn, stack regstack, rtx pat)
|
|||
if (GET_CODE (pat_src) != UNSPEC
|
||||
|| XINT (pat_src, 1) != UNSPEC_FNSTSW)
|
||||
abort ();
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case UNSPEC_FNSTSW:
|
||||
/* Combined fcomp+fnstsw generated for doing well with
|
||||
|
|
|
@ -375,7 +375,7 @@ static int perhaps_ends_bb_p (rtx insn)
|
|||
very conservative. */
|
||||
if (nonlocal_goto_handler_labels)
|
||||
return 1;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
default:
|
||||
return can_throw_internal (insn);
|
||||
}
|
||||
|
|
|
@ -1621,7 +1621,7 @@ try_merge_delay_insns (rtx insn, rtx thread)
|
|||
If we are not careful, this routine can take up a significant fraction
|
||||
of the total compilation time (4%), but only wins rarely. Hence we
|
||||
speed this routine up by making two passes. The first pass goes back
|
||||
until it hits a label and sees if it find an insn with an identical
|
||||
until it hits a label and sees if it finds an insn with an identical
|
||||
pattern. Only in this (relatively rare) event does it check for
|
||||
data conflicts.
|
||||
|
||||
|
|
|
@ -1986,7 +1986,7 @@ struct cse_basic_block_data;
|
|||
N times that of a fast register-to-register instruction. */
|
||||
#define COSTS_N_INSNS(N) ((N) * 4)
|
||||
|
||||
/* Maximum cost of a rtl expression. This value has the special meaning
|
||||
/* Maximum cost of an rtl expression. This value has the special meaning
|
||||
not to use an rtx with this cost under any circumstances. */
|
||||
#define MAX_COST INT_MAX
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ rtx_unstable_p (rtx x)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
@ -189,7 +189,7 @@ rtx_varies_p (rtx x, int for_alias)
|
|||
if (MEM_VOLATILE_P (x))
|
||||
return 1;
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
|
||||
default:
|
||||
break;
|
||||
|
|
|
@ -445,7 +445,7 @@ add_deps_for_risky_insns (rtx head, rtx tail)
|
|||
prev = BB_END (bb);
|
||||
}
|
||||
}
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case TRAP_RISKY:
|
||||
case IRISKY:
|
||||
case PRISKY_CANDIDATE:
|
||||
|
|
|
@ -3148,7 +3148,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
|||
pretend this is actually an integer. */
|
||||
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
|
||||
|
||||
/* FALLTHROUGH */
|
||||
/* Fall through. */
|
||||
case CONST_INT:
|
||||
if (GET_CODE (op) == CONST_INT)
|
||||
val = INTVAL (op);
|
||||
|
|
|
@ -2100,7 +2100,7 @@ rest_of_handle_stack_regs (tree decl, rtx insns)
|
|||
#if defined (HAVE_ATTR_length)
|
||||
/* If flow2 creates new instructions which need splitting
|
||||
and scheduling after reload is not done, they might not be
|
||||
splitten until final which doesn't allow splitting
|
||||
split until final which doesn't allow splitting
|
||||
if HAVE_ATTR_length. */
|
||||
#ifdef INSN_SCHEDULING
|
||||
if (optimize && !flag_schedule_insns_after_reload)
|
||||
|
|
|
@ -2704,7 +2704,7 @@ decode_rtx_const (enum machine_mode mode, rtx x, struct rtx_const *value)
|
|||
break;
|
||||
case rvc_normal:
|
||||
value->un.du.exp = r->exp;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case rvc_nan:
|
||||
memcpy (value->un.du.sig, r->sig, sizeof (r->sig));
|
||||
break;
|
||||
|
@ -2764,7 +2764,7 @@ decode_rtx_const (enum machine_mode mode, rtx x, struct rtx_const *value)
|
|||
break;
|
||||
case rvc_normal:
|
||||
d->exp = r->exp;
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
case rvc_nan:
|
||||
memcpy (d->sig, r->sig, sizeof (r->sig));
|
||||
break;
|
||||
|
@ -3132,7 +3132,7 @@ output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
|
|||
|| GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
|
||||
break;
|
||||
tmp = XEXP (XEXP (x, 0), 0);
|
||||
/* FALLTHRU */
|
||||
/* Fall through. */
|
||||
|
||||
case LABEL_REF:
|
||||
tmp = XEXP (x, 0);
|
||||
|
|
Loading…
Reference in New Issue