calls.c: Fix comment formatting.

* calls.c: Fix comment formatting.
	* cfgloopanal.c: Likewise.
	* cfgloopmanip.c: Likewise.
	* combine.c: Likewise.
	* dwarf2out.c: Likewise.
	* ggc-common.c: Likewise.
	* langhooks.c: Likewise.
	* loop-unroll.c: Likewise.
	* loop.c: Likewise.
	* ra-build.c: Likewise.
	* sbitmap.c: Likewise.
	* toplev.c: Likewise.

From-SVN: r63966
This commit is contained in:
Kazu Hirata 2003-03-08 01:38:27 +00:00 committed by Kazu Hirata
parent 89d11511b0
commit 3dc575ffdb
13 changed files with 38 additions and 23 deletions

View File

@ -1,3 +1,18 @@
2003-03-07 Kazu Hirata <kazu@cs.umass.edu>
* calls.c: Fix comment formatting.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* combine.c: Likewise.
* dwarf2out.c: Likewise.
* ggc-common.c: Likewise.
* langhooks.c: Likewise.
* loop-unroll.c: Likewise.
* loop.c: Likewise.
* ra-build.c: Likewise.
* sbitmap.c: Likewise.
* toplev.c: Likewise.
2003-03-07 James E Wilson <wilson@tuliptree.org>
        * config/sh/sh.h (HARD_REGNO_NREGS): Round up the XD register count.

View File

@ -2572,7 +2572,7 @@ expand_call (exp, target, ignore)
if (try_tail_recursion)
actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
}
/* Do the same for the function address if it is an expression. */
/* Do the same for the function address if it is an expression. */
if (!fndecl)
addr = fix_unsafe_tree (addr);
/* Expanding one of those dangerous arguments could have added

View File

@ -214,7 +214,7 @@ invariant_rtx_wrto_regs_p_helper (expr, invariant_regs)
}
}
/* Checks that EXPR is invariant provided that INVARIANT_REGS are invariant. */
/* Checks that EXPR is invariant provided that INVARIANT_REGS are invariant. */
static bool
invariant_rtx_wrto_regs_p (expr, invariant_regs)
rtx expr;
@ -278,7 +278,7 @@ simple_condition_p (loop, condition, invariant_regs, desc)
return true;
}
/* Check the other operand. */
/* Check the other operand. */
if (!invariant_rtx_wrto_regs_p (op1, invariant_regs))
return false;
if (!REG_P (op0))
@ -494,7 +494,7 @@ constant_iterations (desc, niter, may_be_zero)
These cases needs to be either cared by copying the loop test in the front
of loop or keeping the test in first iteration of loop.
When INIT/LIM are set, they are used instead of var/lim of DESC. */
When INIT/LIM are set, they are used instead of var/lim of DESC. */
rtx
count_loop_iterations (desc, init, lim)
struct loop_desc *desc;
@ -578,7 +578,7 @@ count_loop_iterations (desc, init, lim)
if (stride != const1_rtx)
{
/* Number of iterations is now (EXP + STRIDE - 1 / STRIDE),
but we need to take care for overflows. */
but we need to take care for overflows. */
mod = simplify_gen_binary (UMOD, GET_MODE (desc->var), exp, stride);
@ -719,7 +719,7 @@ simple_loop_exit_p (loops, loop, exit_edge, invariant_regs, single_set_regs, des
desc->var_alts = variable_initial_values (e, desc->var);
desc->lim_alts = variable_initial_values (e, desc->lim);
/* Number of iterations. */
/* Number of iterations. */
if (!count_loop_iterations (desc, NULL, NULL))
return false;
desc->const_iter =

View File

@ -875,7 +875,7 @@ loop_delete_branch_edge (e, really_delete)
else
{
/* Cannot happen -- we are using this only to remove an edge
from branch. */
from branch. */
abort ();
}
@ -1411,7 +1411,7 @@ create_preheader (loop, dom, flags)
add_to_dominance_info (dom, fallthru->dest);
/* Redirect edges. */
/* Redirect edges. */
for (e = dummy->pred; e; e = e->pred_next)
{
src = e->src;

View File

@ -4193,7 +4193,7 @@ combine_simplify_rtx (x, op0_mode, last, in_dest)
= (float_truncate:SF foo:DF).
(float_truncate:DF (float_extend:XF foo:SF))
= (float_extend:SF foo:DF). */
= (float_extend:SF foo:DF). */
if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
&& flag_unsafe_math_optimizations)
|| GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)

View File

@ -12755,7 +12755,7 @@ output_indirect_string (h, v)
/* Clear the marks for a die and its children.
Be cool if the mark isn't set. */
Be cool if the mark isn't set. */
static void
prune_unmark_dies (die)
@ -12829,7 +12829,7 @@ prune_unused_types_mark (die, dokids)
for (c = die->die_child; c; c = c->die_sib)
{
/* If this is an array type, we need to make sure our
kids get marked, even if they're types. */
kids get marked, even if they're types. */
if (die->die_tag == DW_TAG_array_type)
prune_unused_types_mark (c, 1);
else

View File

@ -694,7 +694,7 @@ ggc_min_heapsize_heuristic()
/* Adjust for rlimits. */
min_heap_kbytes = ggc_rlimit_bound (min_heap_kbytes);
min_heap_kbytes /= 1024; /* convert to Kbytes. */
min_heap_kbytes /= 1024; /* convert to Kbytes. */
/* The heuristic is RAM/8, with a lower bound of 4M and an upper
bound of 128M (when RAM >= 1GB). */

View File

@ -450,7 +450,7 @@ lhd_expr_size (exp)
}
/* lang_hooks.decls.final_write_globals: perform final processing on
global variables. */
global variables. */
void
write_global_declarations ()
{

View File

@ -346,7 +346,7 @@ decide_peel_completely (loops, loop, flags)
return;
}
/* npeel = number of iterations to peel. */
/* npeel = number of iterations to peel. */
npeel = PARAM_VALUE (PARAM_MAX_COMPLETELY_PEELED_INSNS) / loop->ninsns;
if (npeel > (unsigned) PARAM_VALUE (PARAM_MAX_COMPLETELY_PEEL_TIMES))
npeel = PARAM_VALUE (PARAM_MAX_COMPLETELY_PEEL_TIMES);
@ -974,7 +974,7 @@ decide_peel_simple (loops, loop, flags)
if (rtl_dump_file)
fprintf (rtl_dump_file, ";; Considering simply peeling loop\n");
/* npeel = number of iterations to peel. */
/* npeel = number of iterations to peel. */
npeel = PARAM_VALUE (PARAM_MAX_PEELED_INSNS) / loop->ninsns;
if (npeel > (unsigned) PARAM_VALUE (PARAM_MAX_PEEL_TIMES))
npeel = PARAM_VALUE (PARAM_MAX_PEEL_TIMES);

View File

@ -225,7 +225,7 @@ struct movable
unsigned int is_equiv : 1; /* 1 means a REG_EQUIV is present on INSN. */
unsigned int insert_temp : 1; /* 1 means we copy to a new pseudo and replace
the original insn with a copy from that
pseudo, rather than deleting it. */
pseudo, rather than deleting it. */
struct movable *match; /* First entry for same value */
struct movable *forces; /* An insn that must be moved if this is */
struct movable *next;
@ -846,7 +846,7 @@ scan_loop (loop, flags)
;
/* Don't move the source and add a reg-to-reg copy with -Os
(this certainly increases size) or if the source is
already a reg (the motion will gain nothing). */
already a reg (the motion will gain nothing). */
else if (insert_temp
&& (optimize_size || GET_CODE (SET_SRC (set)) == REG
|| (CONSTANT_P (SET_SRC (set))
@ -1977,7 +1977,7 @@ move_movables (loop, movables, threshold, insn_count)
if (m->insert_temp)
{
/* Replace the original insn with a move from
our newly created temp. */
our newly created temp. */
start_sequence ();
emit_move_insn (m->set_dest, newreg);
seq = get_insns ();
@ -2221,7 +2221,7 @@ move_movables (loop, movables, threshold, insn_count)
{
rtx seq;
/* Replace the original insn with a move from
our newly created temp. */
our newly created temp. */
start_sequence ();
emit_move_insn (m->set_dest, newreg);
seq = get_insns ();

View File

@ -1729,7 +1729,7 @@ compare_and_free_webs (link)
&& (web1->num_uses != web2->num_uses
|| web1->num_defs != web2->num_defs))
/* Similarly, if the framepointer was unreferenced originally
but we added spills, these fields may not match. */
but we added spills, these fields may not match. */
|| (web1->type != PRECOLORED
&& web1->crosses_call != web2->crosses_call)
|| (web1->type != PRECOLORED

View File

@ -94,7 +94,7 @@ sbitmap_resize (bmap, n_elms, def)
}
else if (n_elms < bmap->n_bits)
{
/* Clear the surplus bits in the last word. */
/* Clear the surplus bits in the last word. */
last_bit = n_elms % SBITMAP_ELT_BITS;
if (last_bit)
bmap->elms[size - 1]

View File

@ -1967,7 +1967,7 @@ wrapup_global_declarations (vec, len)
decl = vec[i];
/* We're not deferring this any longer. Assignment is
conditional to avoid needlessly dirtying PCH pages. */
conditional to avoid needlessly dirtying PCH pages. */
if (DECL_DEFER_OUTPUT (decl) != 0)
DECL_DEFER_OUTPUT (decl) = 0;
@ -3542,7 +3542,7 @@ rest_of_compilation (decl)
open_dump_file (DFI_bbro, decl);
/* Last attempt to optimize CFG, as scheduling, peepholing and insn
splitting possibly introduced more crossjumping opportunities. */
splitting possibly introduced more crossjumping opportunities. */
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
| (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));