tree-ssanames.c (release_free_names_and_compact_live_names): Replace "the the" with "the" in the comments.

* tree-ssanames.c (release_free_names_and_compact_live_names): Replace
	"the the" with "the" in the comments.
	* ipa-devirt.c (build_type_inheritance_graph,
	update_type_inheritance_graph): Likewise.
	* tree.c (build_function_type_list_1): Likewise.
	* cfgloopmanip.c (scale_loop_profile): Likewise.
	* tree-ssa-loop-ivopts.c (get_shiftadd_cost): Likewise.
	* gimple-ssa-split-paths.c
	(find_block_to_duplicate_for_splitting_paths): Likewise.
	* tree-sra.c (init_subtree_with_zero, clobber_subtree): Likewise.
	* expr.c (convert_move): Likewise.
	* var-tracking.c (vt_stack_adjustments): Likewise.
	* tree-vect-data-refs.c (vect_enhance_data_refs_alignment): Likewise.
	* tree-vrp.c (test_for_singularity): Likewise.

From-SVN: r232765
This commit is contained in:
Jakub Jelinek 2016-01-23 15:13:53 +01:00 committed by Jakub Jelinek
parent 5958f9e248
commit 6af801f52a
12 changed files with 30 additions and 15 deletions

View File

@ -1,5 +1,20 @@
2016-01-23 Jakub Jelinek <jakub@redhat.com>
* tree-ssanames.c (release_free_names_and_compact_live_names): Replace
"the the" with "the" in the comments.
* ipa-devirt.c (build_type_inheritance_graph,
update_type_inheritance_graph): Likewise.
* tree.c (build_function_type_list_1): Likewise.
* cfgloopmanip.c (scale_loop_profile): Likewise.
* tree-ssa-loop-ivopts.c (get_shiftadd_cost): Likewise.
* gimple-ssa-split-paths.c
(find_block_to_duplicate_for_splitting_paths): Likewise.
* tree-sra.c (init_subtree_with_zero, clobber_subtree): Likewise.
* expr.c (convert_move): Likewise.
* var-tracking.c (vt_stack_adjustments): Likewise.
* tree-vect-data-refs.c (vect_enhance_data_refs_alignment): Likewise.
* tree-vrp.c (test_for_singularity): Likewise.
* tree-vect-stmts.c (vectorizable_condition): Build a VEC_COND_EXPR
directly instead of building a temporary tree.

View File

@ -569,7 +569,7 @@ scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
}
/* Roughly speaking we want to reduce the loop body profile by the
the difference of loop iterations. We however can do better if
difference of loop iterations. We however can do better if
we look at the actual profile, if it is available. */
scale = RDIV (iteration_bound * scale, iterations);
if (loop->header->count)

View File

@ -485,7 +485,7 @@ convert_move (rtx to, rtx from, int unsignedp)
/* No special multiword conversion insn; do it by hand. */
start_sequence ();
/* Since we will turn this into a no conflict block, we must ensure the
/* Since we will turn this into a no conflict block, we must ensure
the source does not overlap the target so force it into an isolated
register when maybe so. Likewise for any MEM input, since the
conversion sequence might require several references to it and we

View File

@ -74,7 +74,7 @@ find_block_to_duplicate_for_splitting_paths (basic_block latch)
return NULL;
/* And that BB's immediate dominator's successors are the
the predecessors of BB. */
predecessors of BB. */
if (!find_edge (bb_idom, EDGE_PRED (bb, 0)->src)
|| !find_edge (bb_idom, EDGE_PRED (bb, 1)->src))
return NULL;

View File

@ -2246,7 +2246,7 @@ build_type_inheritance_graph (void)
odr_vtable_hash = new odr_vtable_hash_type (23);
/* We reconstruct the graph starting of types of all methods seen in the
the unit. */
unit. */
FOR_EACH_SYMBOL (n)
if (is_a <cgraph_node *> (n)
&& DECL_VIRTUAL_P (n->decl)
@ -3406,7 +3406,7 @@ update_type_inheritance_graph (void)
free_polymorphic_call_targets_hash ();
timevar_push (TV_IPA_INHERITANCE);
/* We reconstruct the graph starting from types of all methods seen in the
the unit. */
unit. */
FOR_EACH_FUNCTION (n)
if (DECL_VIRTUAL_P (n->decl)
&& !n->definition

View File

@ -2813,7 +2813,7 @@ generate_subtree_copies (struct access *access, tree agg,
}
/* Assign zero to all scalar replacements in an access subtree. ACCESS is the
the root of the subtree to be processed. GSI is the statement iterator used
root of the subtree to be processed. GSI is the statement iterator used
for inserting statements which are added after the current statement if
INSERT_AFTER is true or before it otherwise. */
@ -2853,7 +2853,7 @@ init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
init_subtree_with_zero (child, gsi, insert_after, loc);
}
/* Clobber all scalar replacements in an access subtree. ACCESS is the the
/* Clobber all scalar replacements in an access subtree. ACCESS is the
root of the subtree to be processed. GSI is the statement iterator used
for inserting statements which are added after the current statement if
INSERT_AFTER is true or before it otherwise. */

View File

@ -4234,7 +4234,7 @@ get_address_cost (bool symbol_present, bool var_present,
}
/* Calculate the SPEED or size cost of shiftadd EXPR in MODE. MULT is the
the EXPR operand holding the shift. COST0 and COST1 are the costs for
EXPR operand holding the shift. COST0 and COST1 are the costs for
calculating the operands of EXPR. Returns true if successful, and returns
the cost in COST. */

View File

@ -759,8 +759,8 @@ replace_ssa_name_symbol (tree ssa_name, tree sym)
TREE_TYPE (ssa_name) = TREE_TYPE (sym);
}
/* Release the vector of free SSA_NAMEs and compact the the
vector of SSA_NAMEs that are live. */
/* Release the vector of free SSA_NAMEs and compact the vector of SSA_NAMEs
that are live. */
static void
release_free_names_and_compact_live_names (function *fun)

View File

@ -1495,7 +1495,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
size (vector size / 8). Vectorization factor will 8. If both
access are misaligned by 3, the first one needs one scalar
iteration to be aligned, and the second one needs 5. But the
the first one will be aligned also by peeling 5 scalar
first one will be aligned also by peeling 5 scalar
iterations, and in that case both accesses will be aligned.
Hence, except for the immediate peeling amount, we also want
to try to add full vector size, while we don't exceed

View File

@ -9257,8 +9257,8 @@ test_for_singularity (enum tree_code cond_code, tree op0,
tree min = NULL;
tree max = NULL;
/* Extract minimum/maximum values which satisfy the
the conditional as it was written. */
/* Extract minimum/maximum values which satisfy the conditional as it was
written. */
if (cond_code == LE_EXPR || cond_code == LT_EXPR)
{
/* This should not be negative infinity; there is no overflow

View File

@ -8406,7 +8406,7 @@ build_function_type (tree value_type, tree arg_types)
/* Build a function type. The RETURN_TYPE is the type returned by the
function. If VAARGS is set, no void_type_node is appended to the
the list. ARGP must be always be terminated be a NULL_TREE. */
list. ARGP must be always be terminated be a NULL_TREE. */
static tree
build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)

View File

@ -871,7 +871,7 @@ vt_stack_adjustments (void)
pointer is often restored via a load-multiple instruction
and so no stack_adjust offset is recorded for it. This means
that the stack offset at the end of the epilogue block is the
the same as the offset before the epilogue, whereas other paths
same as the offset before the epilogue, whereas other paths
to the exit block will have the correct stack_adjust.
It is safe to ignore these differences because (a) we never