ChangeLog.7: Fix comment typos.

* ChangeLog.7: Fix comment typos.
	* c-common.c: Likewise.
	* c-pretty-print.c: Likewise.
	* cgraphunit.c: Likewise.
	* et-forest.h: Likewise.
	* expr.c: Likewise.
	* gcse.c: Likewise.
	* genautomata.c: Likewise.
	* genrecog.c: Likewise.
	* gensupport.c: Likewise.
	* ggc-zone.c: Likewise.
	* haifa-sched.c: Likewise.
	* ifcvt.c: Likewise.
	* loop.c: Likewise.
	* optabs.c: Likewise.
	* pretty-print.h: Likewise.
	* regrename.c: Likewise.
	* rtl.h: Likewise.
	* sched-rgn.c: Likewise.
	* target.h: Likewise.
	* value-prof.c: Likewise.
	* web.c: Likewise.

From-SVN: r73795
This commit is contained in:
Kazu Hirata 2003-11-21 06:52:23 +00:00
parent 9313cfddf9
commit d91edf86b0
22 changed files with 26 additions and 26 deletions

View File

@ -12648,7 +12648,7 @@ Thu Mar 28 16:35:31 2002 Jeffrey A Law (law@redhat.com)
Thu Mar 28 19:13:36 CET 2002 Jan Hubicka <jh@suse.cz>
* ifcvt.c (if_convert): Clear aux_for_blocks early enought.
* ifcvt.c (if_convert): Clear aux_for_blocks early enough.
Thu Mar 28 13:21:53 CET 2002 Jan Hubicka <jh@suse.cz>

View File

@ -5774,7 +5774,7 @@ c_estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
return NULL;
switch (TREE_CODE (x))
{
/* Reconginze assignments of large structures and constructors of
/* Recognize assignments of large structures and constructors of
big arrays. */
case MODIFY_EXPR:
case CONSTRUCTOR:

View File

@ -1820,7 +1820,7 @@ pp_c_expression (c_pretty_printer *pp, tree e)
/* statement:
labeled-statement
coumpound-statement
compound-statement
expression-statement
selection-statement
iteration-statement
@ -1870,7 +1870,7 @@ pp_c_statement (c_pretty_printer *pp, tree stmt)
pp_needs_newline (pp) = true;
break;
/* coumpound-statement:
/* compound-statement:
{ block-item-list(opt) }
block-item-list:

View File

@ -931,7 +931,7 @@ cgraph_default_inline_p (struct cgraph_node *n)
All inline candidates are put into prioritized heap based on estimated
growth of the overall number of instructions and then update the estimates.
INLINED and INLINED_CALEES are just pointers to arrays large enought
INLINED and INLINED_CALEES are just pointers to arrays large enough
to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
static void

View File

@ -20,7 +20,7 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
for tree operations (insertion and removal of nodes and edges) and
poly-logarithmic time for nearest common ancestor.
ET tree stores its structue as a sequence of symbols obtained
ET tree stores its structure as a sequence of symbols obtained
by dfs(root)
dfs (node)

View File

@ -9341,7 +9341,7 @@ expand_increment (tree exp, int post, int ignore)
{
/* We have a true reference to the value in OP0.
If there is an insn to add or subtract in this mode, queue it.
Queueing the increment insn avoids the register shuffling
Queuing the increment insn avoids the register shuffling
that often results if we must increment now and first save
the old value for subsequent use. */

View File

@ -2205,7 +2205,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
&& oprs_available_p (pat, tmp))))
insert_set_in_table (pat, insn, table);
}
/* In case of store we want to consider the memory value as avaiable in
/* In case of store we want to consider the memory value as available in
the REG stored in that memory. This makes it possible to remove
redundant loads from due to stores to the same location. */
else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)

View File

@ -6282,7 +6282,7 @@ merge_states (automaton_t automaton, vla_ptr_t *equiv_classes)
}
}
/* Its is important that alt states were sorted before and
after merging to have the same quering results. */
after merging to have the same querying results. */
new_state->component_states = uniq_sort_alt_states (alt_states);
}
else

View File

@ -633,7 +633,7 @@ validate_pattern (rtx pattern, rtx insn, rtx set, int set_code)
if (GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
/* Find the referant for a DUP. */
/* Find the referent for a DUP. */
if (GET_CODE (dest) == MATCH_DUP
|| GET_CODE (dest) == MATCH_OP_DUP

View File

@ -259,7 +259,7 @@ process_include (rtx desc, int lineno)
fclose (input_file);
}
/* Process a top level rtx in some way, queueing as appropriate. */
/* Process a top level rtx in some way, queuing as appropriate. */
static void
process_rtx (rtx desc, int lineno)

View File

@ -1001,7 +1001,7 @@ ggc_alloc_zone_1 (size_t size, struct alloc_zone *zone, short type)
return result;
}
/* Allocate a SIZE of chunk memory of GTE type, into an approriate zone
/* Allocate a SIZE of chunk memory of GTE type, into an appropriate zone
for that type. */
void *
@ -1288,7 +1288,7 @@ sweep_pages (struct alloc_zone *zone)
}
/* Large pages are all or none affairs. Either they are
completely empty, or they are completeley full.
completely empty, or they are completely full.
Thus, if the above didn't catch it, we need not do anything
except remove the mark and reset the bytes_free.

View File

@ -973,7 +973,7 @@ priority (rtx insn)
}
/* Macros and functions for keeping the priority queue sorted, and
dealing with queueing and dequeueing of instructions. */
dealing with queuing and dequeuing of instructions. */
#define SCHED_SORT(READY, N_READY) \
do { if ((N_READY) == 2) \

View File

@ -1652,7 +1652,7 @@ noce_try_abs (struct noce_if_info *if_info)
target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
/* ??? It's a quandry whether cmove would be better here, especially
/* ??? It's a quandary whether cmove would be better here, especially
for integers. Perhaps combine will clean things up. */
if (target && negate)
target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);

View File

@ -6223,7 +6223,7 @@ basic_induction_var (const struct loop *loop, rtx x, enum machine_mode mode,
/* convert_modes can emit new instructions, e.g. when arg is a loop
invariant MEM and dest_reg has a different mode.
These instructions would be emitted after the end of the function
and then *inc_val would be an unitialized pseudo.
and then *inc_val would be an uninitialized pseudo.
Detect this and bail in this case.
Other alternatives to solve this can be introducing a convert_modes
variant which is allowed to fail but not allowed to emit new

View File

@ -5371,7 +5371,7 @@ init_optabs (void)
abs_optab->handlers[TYPE_MODE (complex_double_type_node)].libfunc
= init_one_libfunc ("cabs");
/* The ffs function op[1erates on `int'. */
/* The ffs function operates on `int'. */
ffs_optab->handlers[(int) mode_for_size (INT_TYPE_SIZE, MODE_INT, 0)].libfunc
= init_one_libfunc ("ffs");

View File

@ -234,7 +234,7 @@ struct pretty_print_info
#define pp_buffer(PP) pp_base (PP)->buffer
/* Clients that directly derive from pretty_printer need to override
this macro to return a pointer to the base pretty_printer structrure. */
this macro to return a pointer to the base pretty_printer structure. */
#define pp_base(PP) (PP)
extern void pp_construct (pretty_printer *, const char *, int);

View File

@ -1734,7 +1734,7 @@ copyprop_hardreg_forward (void)
/* If a block has a single predecessor, that we've already
processed, begin with the value data that was live at
the end of the predecessor block. */
/* ??? Ought to use more intelligent queueing of blocks. */
/* ??? Ought to use more intelligent queuing of blocks. */
if (bb->pred)
for (bbp = bb; bbp && bbp != bb->pred->src; bbp = bbp->prev_bb);
if (bb->pred

View File

@ -1742,7 +1742,7 @@ rtx alloc_EXPR_LIST (int, rtx, rtx);
/* regclass.c */
/* Maximum number of parallel sets and clobbers in any insn in this fn.
Always at least 3, since the combiner could put that many togetherm
Always at least 3, since the combiner could put that many together
and we want this to remain correct for all the remaining passes. */
extern int max_parallel;

View File

@ -2036,7 +2036,7 @@ add_branch_dependences (rtx head, rtx tail)
end since moving them results in worse register allocation. Uses remain
at the end to ensure proper register allocation.
cc0 setters remaim at the end because they can't be moved away from
cc0 setters remain at the end because they can't be moved away from
their cc0 user.
Insns setting CLASS_LIKELY_SPILLED_P registers (usually return values)

View File

@ -207,7 +207,7 @@ struct gcc_target
correspondingly starts and finishes. The function defined by
init_dfa_pre_cycle_insn and init_dfa_post_cycle_insn are used
to initialize the corresponding insns. The default values of
the memebers result in not changing the automaton state when
the members result in not changing the automaton state when
the new simulated processor cycle correspondingly starts and
finishes. */
void (* init_dfa_pre_cycle_insn) (void);

View File

@ -132,7 +132,7 @@ insn_divmod_values_to_profile (rtx insn, unsigned *n_values,
(*n_values)++;
}
/* For mod, check whether it is not often a noop (or replacable by
/* For mod, check whether it is not often a noop (or replaceable by
a few subtractions). */
if (GET_CODE (set_src) == UMOD && !side_effects_p (op1))
{
@ -446,9 +446,9 @@ divmod_fixed_value_transform (rtx insn)
histogram = XEXP (histogram, 1);
all = INTVAL (XEXP (histogram, 0));
/* We requiere that count is at least half of all; this means
/* We require that count is at least half of all; this means
that for the transformation to fire the value must be constant
at least 50% of time (and 75% gives the garantee of usage). */
at least 50% of time (and 75% gives the guarantee of usage). */
if (!rtx_equal_p (op2, value) || 2 * count < all)
return false;

View File

@ -20,7 +20,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* Simple optimization pass that splits independent uses of each pseudo,
increasing effectivity of other optimizations. The optimization can
increasing effectiveness of other optimizations. The optimization can
serve as an example of use for the dataflow module.
We don't split registers with REG_USERVAR set unless -fmessy-debugging