c-common.c, [...]: Fix comment typos.

* c-common.c, calls.c, cfgcleanup.c, cgraph.c, cgraphunit.c,
	ddg.c, ddg.h, df.c, df.h, except.c, expr.c, flags.h,
	fold-const.c, gcc.c, gimplify.c, haifa-sched.c,
	modulo-sched.c, tree-inline.c, tree-into-ssa.c, tree-nested.c,
	tree-nrv.c, tree-ssa-ccp.c, tree-ssa-dom.c, tree-ssa-live.c,
	tree-ssa-loop.c, tree-ssa-pre.c, tree-tailcall.c, tree.h: Fix
	comment typos.  Follow spelling conventions.

From-SVN: r82439
This commit is contained in:
Kazu Hirata 2004-05-30 07:12:58 +00:00 committed by Kazu Hirata
parent b453c95fd3
commit 1ea7e6ad84
29 changed files with 67 additions and 57 deletions

View File

@ -1,3 +1,13 @@
2004-05-30 Kazu Hirata <kazu@cs.umass.edu>
* c-common.c, calls.c, cfgcleanup.c, cgraph.c, cgraphunit.c,
ddg.c, ddg.h, df.c, df.h, except.c, expr.c, flags.h,
fold-const.c, gcc.c, gimplify.c, haifa-sched.c,
modulo-sched.c, tree-inline.c, tree-into-ssa.c, tree-nested.c,
tree-nrv.c, tree-ssa-ccp.c, tree-ssa-dom.c, tree-ssa-live.c,
tree-ssa-loop.c, tree-ssa-pre.c, tree-tailcall.c, tree.h: Fix
comment typos. Follow spelling conventions.
2004-05-29 Geoffrey Keating <geoffk@apple.com>
* gengtype-yacc.y: Add NESTED_PTR token.

View File

@ -4215,7 +4215,7 @@ finish_label_address_expr (tree label)
/* Hook used by expand_expr to expand language-specific tree codes. */
/* The only things that should go here are bits needed to expand
constant initalizers. Everything else should be handled by the
constant initializers. Everything else should be handled by the
gimplification routines. */
rtx

View File

@ -3313,7 +3313,7 @@ expand_call (tree exp, rtx target, int ignore)
break;
}
/* If tail call production suceeded, we need to remove REG_EQUIV notes on
/* If tail call production succeeded, we need to remove REG_EQUIV notes on
arguments too, as argument area is now clobbered by the call. */
if (tail_call_insns)
{

View File

@ -19,7 +19,7 @@ along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* This file contains optimizer of the control flow. The main entrypoint is
/* This file contains optimizer of the control flow. The main entry point is
cleanup_cfg. Following optimizations are performed:
- Unreachable blocks removal

View File

@ -61,7 +61,7 @@ The callgraph:
The function inlining information is decided in advance and maintained
in the callgraph as so called inline plan.
For each inlined call, the callee's node is cloned to represent the
new function copy produced by inlininer.
new function copy produced by inliner.
Each inlined call gets a unique corresponding clone node of the callee
and the data structure is updated while inlining is performed, so
the clones are eliminated and their callee edges redirected to the

View File

@ -1020,7 +1020,7 @@ cgraph_estimate_growth (struct cgraph_node *node)
/* E is expected to be an edge being inlined. Clone destination node of
the edge and redirect it to the new clone.
DUPLICATE is used for bookeeping on whether we are actually creating new
DUPLICATE is used for bookkeeping on whether we are actually creating new
clones or re-using node originally representing out-of-line function call.
*/
void
@ -1191,7 +1191,7 @@ cgraph_recursive_inlining_p (struct cgraph_node *to,
recursive = what->decl == to->global.inlined_to->decl;
else
recursive = what->decl == to->decl;
/* Marking recursive function inlinine has sane semantic and thus we should
/* Marking recursive function inline has sane semantic and thus we should
not warn on it. */
if (recursive && reason)
*reason = (what->local.disregard_inline_limits

View File

@ -80,7 +80,7 @@ mark_mem_use_1 (rtx *x, void *data)
for_each_rtx (x, mark_mem_use, data);
}
/* Returns non-zero if INSN reads from memory. */
/* Returns nonzero if INSN reads from memory. */
static bool
mem_read_insn_p (rtx insn)
{
@ -96,7 +96,7 @@ mark_mem_store (rtx loc, rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSE
mem_ref_p = true;
}
/* Returns non-zero if INSN writes to memory. */
/* Returns nonzero if INSN writes to memory. */
static bool
mem_write_insn_p (rtx insn)
{
@ -105,7 +105,7 @@ mem_write_insn_p (rtx insn)
return mem_ref_p;
}
/* Returns non-zero if X has access to memory. */
/* Returns nonzero if X has access to memory. */
static bool
rtx_mem_access_p (rtx x)
{
@ -138,7 +138,7 @@ rtx_mem_access_p (rtx x)
return false;
}
/* Returns non-zero if INSN reads to or writes from memory. */
/* Returns nonzero if INSN reads to or writes from memory. */
static bool
mem_access_insn_p (rtx insn)
{
@ -383,7 +383,7 @@ build_intra_loop_deps (ddg_ptr g)
get_block_head_tail (g->bb->index, &head, &tail);
sched_analyze (&tmp_deps, head, tail);
/* Build intra-loop data dependecies using the schedular dependecy
/* Build intra-loop data dependecies using the scheduler dependecy
analysis. */
for (i = 0; i < g->num_nodes; i++)
{
@ -980,7 +980,7 @@ find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
/* Updates the counts of U_NODE's successors (that belong to NODES) to be
at-least as large as the count of U_NODE plus the latency between them.
Sets a bit in TMP for each successor whose count was changed (increased).
Returns non-zero if any count was changed. */
Returns nonzero if any count was changed. */
static int
update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
{

View File

@ -51,10 +51,10 @@ struct ddg_node
/* The insn represented by the node. */
rtx insn;
/* A note preceeding INSN (or INSN itself), such that all insns linked
/* A note preceding INSN (or INSN itself), such that all insns linked
from FIRST_NOTE until INSN (inclusive of both) are moved together
when reordering the insns. This takes care of notes that should
continue to preceed INSN. */
continue to precede INSN. */
rtx first_note;
/* Incoming and outgoing dependency edges. */

View File

@ -55,7 +55,7 @@ Here's an example of using the dataflow routines.
df_init simply creates a poor man's object (df) that needs to be
passed to all the dataflow routines. df_finish destroys this
object and frees up any allocated memory. DF_ALL says to analyse
object and frees up any allocated memory. DF_ALL says to analyze
everything.
df_analyze performs the following:

View File

@ -229,7 +229,7 @@ struct df_map
#define DF_INSN_USES(DF, INSN) ((DF)->insns[INSN_UID (INSN)].uses)
/* Functions to build and analyse dataflow information. */
/* Functions to build and analyze dataflow information. */
extern struct df *df_init (void);

View File

@ -1187,7 +1187,7 @@ remove_unreachable_regions (rtx insns)
case ERT_MUST_NOT_THROW:
/* MUST_NOT_THROW regions are implementable solely in the
runtime, but their existance continues to affect calls
runtime, but their existence continues to affect calls
within that region. Never delete them here. */
kill_it = false;
break;

View File

@ -4387,7 +4387,7 @@ store_expr (tree exp, rtx target, int want_value)
return target;
}
/* Examine CTOR. Discover how many scalar fields are set to non-zero
/* Examine CTOR. Discover how many scalar fields are set to nonzero
values and place it in *P_NZ_ELTS. Discover how many scalar fields
are set to non-constant values and place it in *P_NC_ELTS. */

View File

@ -424,7 +424,7 @@ extern int flag_gen_aux_info;
extern int flag_shared_data;
/* Controls the activiation of SMS modulo scheduling. */
/* Controls the activation of SMS modulo scheduling. */
extern int flag_modulo_sched;
/* flag_schedule_insns means schedule insns within basic blocks (before

View File

@ -2319,7 +2319,7 @@ truth_value_p (enum tree_code code)
/* Return nonzero if two operands (typically of the same tree node)
are necessarily equal. If either argument has side-effects this
function returns zero. FLAGS modifies behaviour as follows:
function returns zero. FLAGS modifies behavior as follows:
If OEP_ONLY_CONST is set, only return nonzero for constants.
This function tests whether the operands are indistinguishable;
@ -5243,7 +5243,7 @@ fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
}
/* Subroutine of fold() that optimizes comparisons of a division by
a non-zero integer constant against an integer constant, i.e.
a nonzero integer constant against an integer constant, i.e.
X/C1 op C2.
CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,

View File

@ -887,9 +887,9 @@ struct compiler
const char *cpp_spec; /* If non-NULL, substitute this spec
for `%C', rather than the usual
cpp_spec. */
const int combinable; /* If non-zero, compiler can deal with
const int combinable; /* If nonzero, compiler can deal with
multiple source files at once (IMA). */
const int needs_preprocessing; /* If non-zero, source files need to
const int needs_preprocessing; /* If nonzero, source files need to
be run through a preprocessor. */
};

View File

@ -282,7 +282,7 @@ create_artificial_label (void)
return lab;
}
/* Create a new temporary name with PREFIX. Returns an indentifier. */
/* Create a new temporary name with PREFIX. Returns an identifier. */
static GTY(()) unsigned int tmp_var_id_num;
@ -797,7 +797,7 @@ voidify_wrapper_expr (tree wrapper)
}
/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
temporary through that they comunicate. */
a temporary through which they communicate. */
static void
build_stack_save_restore (tree *save, tree *restore)
@ -1310,7 +1310,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
/* ??? This bit ought not be needed. For any element not present
in the initializer, we should simply set them to zero. Except
we'd need to *find* the elements that are not present, and that
requires trickery to avoid quadratic compile-time behaviour in
requires trickery to avoid quadratic compile-time behavior in
large cases or excessive memory use in small cases. */
else
{
@ -2600,7 +2600,7 @@ gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
}
/* Gimplifies a statement list. These may be created either by an
enlightend front-end, or by shortcut_cond_expr. */
enlightened front-end, or by shortcut_cond_expr. */
static enum gimplify_status
gimplify_statement_list (tree *expr_p)

View File

@ -259,7 +259,7 @@ static rtx note_list;
description interface, MAX_INSN_QUEUE_INDEX is a power of two minus
one which is larger than maximal time of instruction execution
computed by genattr.c on the base maximal time of functional unit
reservations and geting a result. This is the longest time an
reservations and getting a result. This is the longest time an
insn may be queued. */
#define MAX_INSN_QUEUE_INDEX max_insn_queue_index_macro_value

View File

@ -57,7 +57,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
IEEE Trans. on Comps., 50(3), March 2001
[2] J. Llosa, A. Gonzalez, E. Ayguade, and M. Valero.
Swing Modulo Scheduling: A Lifetime Sensitive Approach.
PACT '96 , pages 80-87, October 1996 (Boston - Massachussets - USA).
PACT '96 , pages 80-87, October 1996 (Boston - Massachusetts - USA).
The basic structure is:
1. Build a data-dependence graph (DDG) for each loop.
@ -156,7 +156,7 @@ void rotate_partial_schedule (partial_schedule_ptr, int);
void set_row_column_for_ps (partial_schedule_ptr);
/* This page defines constants and structures for the modulo scheduiing
/* This page defines constants and structures for the modulo scheduling
driver. */
/* As in haifa-sched.c: */
@ -202,7 +202,7 @@ typedef struct node_sched_params
original register defined by the node. */
rtx first_reg_move;
/* The number of register-move instructions added, immediately preceeding
/* The number of register-move instructions added, immediately preceding
first_reg_move. */
int nreg_moves;
@ -210,7 +210,7 @@ typedef struct node_sched_params
int stage; /* Holds time / ii. */
/* The column of a node inside the ps. If nodes u, v are on the same row,
u will preceed v if column (u) < column (v). */
u will precede v if column (u) < column (v). */
int column;
} *node_sched_params_ptr;
@ -472,7 +472,7 @@ generate_reg_moves (partial_schedule_ptr ps)
{
int nreg_moves4e = (SCHED_TIME (e->dest) - SCHED_TIME (e->src)) / ii;
/* If dest preceeds src in the schedule of the kernel, then dest
/* If dest precedes src in the schedule of the kernel, then dest
will read before src writes and we can save one reg_copy. */
if (SCHED_ROW (e->dest) == SCHED_ROW (e->src)
&& SCHED_COLUMN (e->dest) < SCHED_COLUMN (e->src))
@ -605,7 +605,7 @@ duplicate_insns_of_cycles (partial_schedule_ptr ps, int from_stage,
if (for_prolog)
{
/* SCHED_STAGE (u_node) >= from_stage == 0. Generate increasing
number of reg_moves starting with the second occurance of
number of reg_moves starting with the second occurrence of
u_node, which is generated if its SCHED_STAGE <= to_stage. */
i_reg_moves = to_stage - SCHED_STAGE (u_node);
i_reg_moves = MAX (i_reg_moves, 0);

View File

@ -1343,7 +1343,7 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
*count++;
break;
/* Few special cases of expensive operations. This is usefull
/* Few special cases of expensive operations. This is useful
to avoid inlining on functions having too many of these. */
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:

View File

@ -473,7 +473,7 @@ rewrite_initialize_block_local_data (struct dom_walk_data *walk_data ATTRIBUTE_U
/* We get cleared memory from the allocator, so if the memory is
not cleared, then we are re-using a previously allocated entry. In
that case, we can also re-use the underlying virtal arrays. Just
that case, we can also re-use the underlying virtual arrays. Just
make sure we clear them before using them! */
if (recycled && bd->block_defs && VARRAY_ACTIVE_SIZE (bd->block_defs) > 0)
abort ();

View File

@ -474,7 +474,7 @@ get_nl_goto_field (struct nesting_info *info)
/* For __builtin_nonlocal_goto, we need N words. The first is the
frame pointer, the rest is for the target's stack pointer save
area. The number of words is controled by STACK_SAVEAREA_MODE;
area. The number of words is controlled by STACK_SAVEAREA_MODE;
not the best interface, but it'll do for now. */
if (Pmode == ptr_mode)
type = ptr_type_node;
@ -987,7 +987,7 @@ convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
/* The original user label may also be use for a normal goto, therefore
we must create a new label that will actually receive the abnormal
control transfer. This new label will be marked LABEL_NONLOCAL; this
mark will trigger proper behaviour in the cfg, as well as cause the
mark will trigger proper behavior in the cfg, as well as cause the
(hairy target-specific) non-local goto receiver code to be generated
when we expand rtl. */
new_label = create_artificial_label ();
@ -1316,7 +1316,7 @@ finalize_nesting_tree_1 (struct nesting_info *root)
sf->has_nonlocal_label = 1;
}
/* Make sure all new local variables get insertted into the
/* Make sure all new local variables get inserted into the
proper BIND_EXPR. */
if (root->new_local_var_chain)
declare_tmp_vars (root->new_local_var_chain,

View File

@ -54,7 +54,7 @@ struct nrv_data
this function's RETURN_EXPR statements. */
tree var;
/* This is the function's RESULT_DECL. We will replace all occurences
/* This is the function's RESULT_DECL. We will replace all occurrences
of VAR with RESULT_DECL when we apply this optimization. */
tree result;
};
@ -84,7 +84,7 @@ finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
to RESULT. */
else if (TREE_CODE (*tp) == RETURN_EXPR)
TREE_OPERAND (*tp, 0) = dp->result;
/* Replace all occurences of VAR with RESULT. */
/* Replace all occurrences of VAR with RESULT. */
else if (*tp == dp->var)
*tp = dp->result;

View File

@ -1578,7 +1578,7 @@ maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
found:
/* If we get here, we've got an aggregate field, and a possibly
non-zero offset into them. Recurse and hope for a valid match. */
nonzero offset into them. Recurse and hope for a valid match. */
if (base_is_ptr)
base = build1 (INDIRECT_REF, record_type, base);
base = build (COMPONENT_REF, field_type, base, f);

View File

@ -642,7 +642,7 @@ tree_ssa_dominator_optimize (void)
/* We emptyed the hash table earlier, now delete it completely. */
htab_delete (avail_exprs);
/* It is not nocessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
/* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
of the do-while loop above. */
@ -1076,7 +1076,7 @@ remove_local_expressions_from_table (varray_type locals,
}
/* Use the SSA_NAMES in LOCALS to restore TABLE to its original
state, stopping when there are LIMIT entires left in LOCALs. */
state, stopping when there are LIMIT entries left in LOCALs. */
static void
restore_nonzero_vars_to_original_value (varray_type locals,
@ -1095,7 +1095,7 @@ restore_nonzero_vars_to_original_value (varray_type locals,
}
/* Use the source/dest pairs in LOCALS to restore TABLE to its original
state, stopping when there are LIMIT entires left in LOCALs. */
state, stopping when there are LIMIT entries left in LOCALs. */
static void
restore_vars_to_original_value (varray_type locals,
@ -1365,7 +1365,7 @@ record_equivalences_from_phis (struct dom_walk_data *walk_data, basic_block bb)
breaking out of the loop, then we have a PHI which may create
a useful equivalence. We do not need to record unwind data for
this, since this is a true assignment and not an equivalence
infered from a comparison. All uses of this ssa name are dominated
inferred from a comparison. All uses of this ssa name are dominated
by this assignment, so unwinding just costs time and space. */
if (i == PHI_NUM_ARGS (phi)
&& may_propagate_copy (lhs, rhs))
@ -1659,7 +1659,7 @@ record_equality (tree x, tree y, varray_type *block_const_and_copies_p)
/* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
variable compared against zero. If we're honoring signed zeros,
then we cannot record this value unless we know that the value is
non-zero. */
nonzero. */
if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
&& (TREE_CODE (y) != REAL_CST
|| REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
@ -2405,7 +2405,7 @@ record_equivalences_from_stmt (tree stmt,
/* If the RHS of the assignment is a constant or another variable that
may be propagated, register it in the CONST_AND_COPIES table. We
do not need to record unwind data for this, since this is a true
assignment and not an equivalence infered from a comparison. All
assignment and not an equivalence inferred from a comparison. All
uses of this ssa name are dominated by this assignment, so unwinding
just costs time and space. */
if (may_optimize_p

View File

@ -838,7 +838,7 @@ tpa_delete (tpa_p tpa)
}
/* This function will remove any tree entires from TPA which have only a single
/* This function will remove any tree entries from TPA which have only a single
element. This will help keep the size of the conflict graph down. The
function returns the number of remaining tree lists. */
@ -1419,7 +1419,7 @@ build_tree_conflict_graph (tree_live_info_p liveinfo, tpa_p tpa,
/* Anything which is still live at this point interferes.
In order to implement this efficiently, only conflicts between
partitions which have the same TPA root need be added.
TPA roots which have been seen are tracked in 'tpa_nodes'. A non-zero
TPA roots which have been seen are tracked in 'tpa_nodes'. A nonzero
entry points to an index into 'partition_link', which then indexes
into itself forming a linked list of partitions sharing a tpa root
which have been seen as live up to this point. Since partitions start

View File

@ -268,7 +268,7 @@ copy_loop_headers (void)
create_preheaders (loops, CP_SIMPLE_PREHEADERS);
/* We do not try to keep the information about irreductible regions
/* We do not try to keep the information about irreducible regions
up-to-date. */
loops->state &= ~LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS;

View File

@ -1596,7 +1596,7 @@ process_delayed_rename (struct expr_info *ei, tree use, tree real_occ)
/* For the uninitiated, the algorithm is a modified SSA renaming
algorithm (working on expressions rather than variables) . We
attempt to determine which expression occurrences have the same
ESSA version (we call it class, for equivalence/redunancy class,
ESSA version (we call it class, for equivalence/redundancy class,
which is what the papers call it. Open64 calls it e-version), and
which occurrences are actually operands for an EPHI (since this has
to be discovered from the program).
@ -2149,7 +2149,7 @@ insert_one_operand (struct expr_info *ei, tree ephi, int opnd_indx,
/* First step of finalization. Determine which expressions are being
saved and which are being deleted.
This is done as a simple dominator based availabilty calculation,
This is done as a simple dominator based availability calculation,
using the e-versions/redundancy classes. */
static bool

View File

@ -37,7 +37,7 @@ Boston, MA 02111-1307, USA. */
#include "langhooks.h"
/* The file implements the tail recursion elimination. It is also used to
analyse the tail calls in general, passing the results to the rtl level
analyze the tail calls in general, passing the results to the rtl level
where they are used for sibcall optimization.
In addition to the standard tail recursion elimination, we handle the most
@ -80,7 +80,7 @@ Boston, MA 02111-1307, USA. */
We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
2) return f (...), where f is the current function, is rewritten in a
clasical tail-recursion elimination way, into assignment of arguments
classical tail-recursion elimination way, into assignment of arguments
and jump to the start of the function. Values of the accumulators
are unchanged.
@ -574,7 +574,7 @@ adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
}
}
/* Adjust value of the return at the end of BB accodring to M and A
/* Adjust value of the return at the end of BB according to M and A
accumulators. */
static void

View File

@ -1008,7 +1008,7 @@ struct tree_vec GTY(())
/* Define fields and accessors for some nodes that represent expressions. */
/* Non-zero if NODE is an emtpy statement (NOP_EXPR <0>). */
/* Nonzero if NODE is an empty statement (NOP_EXPR <0>). */
#define IS_EMPTY_STMT(NODE) (TREE_CODE (NODE) == NOP_EXPR \
&& VOID_TYPE_P (TREE_TYPE (NODE)) \
&& integer_zerop (TREE_OPERAND (NODE, 0)))