cgraphunit.c, [...]: Fix comment typos.

* cgraphunit.c, config/arm/arm.c, config/m68k/m68k.c,
	ipa-inline.c, tree-profile.c, tree-ssa-live.c,
	tree-ssa-math-opts.c, tree-ssanames.c, tree-vect-analyze.c,
	value-prof.c: Fix comment typos.

From-SVN: r121374
This commit is contained in:
Kazu Hirata 2007-01-31 03:45:38 +00:00 committed by Kazu Hirata
parent 65de695fa5
commit 88512ba0b8
11 changed files with 28 additions and 21 deletions

View File

@ -1,3 +1,10 @@
2007-01-31 Kazu Hirata <kazu@codesourcery.com>
* cgraphunit.c, config/arm/arm.c, config/m68k/m68k.c,
ipa-inline.c, tree-profile.c, tree-ssa-live.c,
tree-ssa-math-opts.c, tree-ssanames.c, tree-vect-analyze.c,
value-prof.c: Fix comment typos.
2007-01-30 Manuel Lopez-Ibanez <manu@gcc.gnu.org>
PR c++/24745

View File

@ -783,7 +783,7 @@ process_function_and_variable_attributes (struct cgraph_node *first,
/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
each reachable functions) and build cgraph.
The function can be called multiple times after inserting new nodes
into beggining of queue. Just the new part of queue is re-scanned then. */
into beginning of queue. Just the new part of queue is re-scanned then. */
static void
cgraph_analyze_functions (void)

View File

@ -7710,7 +7710,7 @@ get_jump_table_size (rtx insn)
switch (modesize)
{
case 1:
/* Round up size of TBB table to a haflword boundary. */
/* Round up size of TBB table to a halfword boundary. */
size = (size + 1) & ~(HOST_WIDE_INT)1;
break;
case 2:

View File

@ -1957,7 +1957,7 @@ m68k_rtx_costs (rtx x, int code, int outer_code, int *total)
}
}
/* Return an instruction to move CONST_INT OPERANDS[1] into data regsiter
/* Return an instruction to move CONST_INT OPERANDS[1] into data register
OPERANDS[0]. */
static const char *
@ -2838,7 +2838,7 @@ notice_update_cc (rtx exp, rtx insn)
codes. Normal moves _do_ set the condition codes, but not in
a way that is appropriate for comparison with 0, because -0.0
would be treated as a negative nonzero number. Note that it
isn't appropriate to conditionalize this restiction on
isn't appropriate to conditionalize this restriction on
HONOR_SIGNED_ZEROS because that macro merely indicates whether
we care about the difference between -0.0 and +0.0. */
else if (!FP_REG_P (SET_DEST (exp))

View File

@ -91,11 +91,11 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
maintained by pass manager). The functions after inlining are early
optimized so the early inliner sees unoptimized function itself, but
all considered callees are already optimized allowing it to unfold
abstraction penalty on C++ effectivly and cheaply.
abstraction penalty on C++ effectively and cheaply.
pass_ipa_early_inlining
With profiling, the early inlining is also neccesary to reduce
With profiling, the early inlining is also necessary to reduce
instrumentation costs on program with high abstraction penalty (doing
many redundant calls). This can't happen in parallel with early
optimization and profile instrumentation, because we would end up
@ -751,7 +751,7 @@ cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
e->inline_failed = reason;
}
/* Given whole compilation unit esitmate of INSNS, compute how large we can
/* Given whole compilation unit estimate of INSNS, compute how large we can
allow the unit to grow. */
static int
compute_max_insns (int insns)
@ -1043,7 +1043,7 @@ cgraph_decide_inlining (void)
e->caller->global.insns);
}
/* Inlining self recursive function might introduce new calls to
thsemselves we didn't see in the loop above. Fill in the proper
themselves we didn't see in the loop above. Fill in the proper
reason why inline failed. */
for (e = node->callers; e; e = e->next_caller)
if (e->inline_failed)
@ -1126,7 +1126,7 @@ cgraph_decide_inlining (void)
recursive inlining, but as an special case, we want to try harder inline
ALWAYS_INLINE functions: consider callgraph a->b->c->b, with a being
flatten, b being always inline. Flattening 'a' will collapse
a->b->c before hitting cycle. To accomondate always inline, we however
a->b->c before hitting cycle. To accommodate always inline, we however
need to inline a->b->c->b.
So after hitting cycle first time, we switch into ALWAYS_INLINE mode and
@ -1145,7 +1145,7 @@ try_inline (struct cgraph_edge *e, enum inlining_mode mode, int depth)
mode yet. and the function in question is always_inline. */
if (always_inline && mode != INLINE_ALWAYS_INLINE)
mode = INLINE_ALWAYS_INLINE;
/* Otheriwse it is time to give up. */
/* Otherwise it is time to give up. */
else
{
if (dump_file)

View File

@ -64,7 +64,7 @@ static GTY(()) tree ptr_void;
/* Add code:
static gcov* __gcov_indirect_call_counters; // pointer to actual counter
static void* __gcov_indirect_call_callee; // actual callie addres
static void* __gcov_indirect_call_callee; // actual callee address
*/
static void
tree_init_ic_make_global_vars (void)
@ -269,7 +269,7 @@ tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
/* Output instructions as GIMPLE trees for code to find the most
common called function in indirect call.
VALUE is the call expression whose indirect callie is profiled.
VALUE is the call expression whose indirect callee is profiled.
TAG is the tag of the section for counters, BASE is offset of the
counter position. */
@ -308,7 +308,7 @@ tree_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
/* Output instructions as GIMPLE trees for code to find the most
common called function in indirect call. Insert instructions at the
begining of every possible called function.
beginning of every possible called function.
*/
static void

View File

@ -504,7 +504,7 @@ remove_unused_locals (void)
/* Remove unused variables from REFERENCED_VARs. As an special exception
keep the variables that are believed to be aliased. Those can't be
easilly removed from the alias sets and and operand caches.
easily removed from the alias sets and and operand caches.
They will be removed shortly after next may_alias pass is performed. */
FOR_EACH_REFERENCED_VAR (t, rvi)
if (!is_global_var (t)

View File

@ -521,9 +521,9 @@ struct tree_opt_pass pass_cse_reciprocals =
0 /* letter */
};
/* Records an occurance at statement USE_STMT in the vector of trees
/* Records an occurrence at statement USE_STMT in the vector of trees
STMTS if it is dominated by *TOP_BB or dominates it or this basic block
is not yet initialized. Returns true if the occurance was pushed on
is not yet initialized. Returns true if the occurrence was pushed on
the vector. Adjusts *TOP_BB to be the basic block dominating all
statements in the vector. */

View File

@ -318,7 +318,7 @@ release_dead_ssa_names (void)
referenced_var_iterator rvi;
/* Current defs point to various dead SSA names that in turn points to dead
statements so bunch of dead memory is holded from releasing. */
statements so bunch of dead memory is held from releasing. */
FOR_EACH_REFERENCED_VAR (t, rvi)
set_current_def (t, NULL);
/* Now release the freelist. */
@ -328,7 +328,7 @@ release_dead_ssa_names (void)
/* Dangling pointers might make GGC to still see dead SSA names, so it is
important to unlink the list and avoid GGC from seeing all subsequent
SSA names. In longer run we want to have all dangling pointers here
removed (since they usually go trhough dead statements that consume
removed (since they usually go through dead statements that consume
considerable amounts of memory). */
TREE_CHAIN (t) = NULL_TREE;
n++;

View File

@ -164,7 +164,7 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
arguments (e.g. demotion, promotion), vectype will be reset
appropriately (later). Note that we have to visit the smallest
datatype in this function, because that determines the VF.
If the samallest datatype in the loop is present only as the
If the smallest datatype in the loop is present only as the
rhs of a promotion operation - we'd miss it here.
However, in such a case, that a variable of this datatype
does not appear in the lhs anywhere in the loop, it shouldn't
@ -1752,7 +1752,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
return false;
}
/* Check that there is no load-store dependecies for this loads
/* Check that there is no load-store dependencies for this loads
to prevent a case of load-store-load to the same location. */
if (DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (next))
|| DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (prev)))

View File

@ -63,7 +63,7 @@ static struct value_prof_hooks *value_prof_hooks;
3) Indirect/virtual call specialization. If we can determine most
common function callee in indirect/virtual call. We can use this
information to improve code effectivity (espetialy info for
information to improve code effectiveness (especially info for
inliner).
Every such optimization should add its requirements for profiled values to