genmatch.c (expr::gen_transform): Use fold_buildN_loc and build_call_expr_loc.

2014-10-24  Richard Biener  <rguenther@suse.de>

	* genmatch.c (expr::gen_transform): Use fold_buildN_loc
	and build_call_expr_loc.
	(dt_simplify::gen): Drop non_lvalue for GIMPLE, use
	non_lvalue_loc to build it for GENERIC.
	(decision_tree::gen_generic): Add location argument to
	generic_simplify prototype.
	(capture_info): New class.
	(capture_info::capture_info): New constructor.
	(capture_info::walk_match): New method.
	(capture_info::walk_result): New method.
	(capture_info::walk_c_expr): New method.
	(dt_simplify::gen): Handle preserving side-effects for
	GENERIC code generation.
	(decision_tree::gen_generic): Do not reject operands
	with TREE_SIDE_EFFECTS.
	* generic-match.h: New file.
	* generic-match-head.c: Include generic-match.h, not gimple-match.h.
	* match.pd: Add some constant folding patterns from fold-const.c.
	* fold-const.c: Include generic-match.h.
	(fold_unary_loc): Dispatch to generic_simplify.
	(fold_ternary_loc): Likewise.
	(fold_binary_loc): Likewise.  Remove patterns now implemented
	by generic_simplify.
	* gimple-fold.c (replace_stmt_with_simplification): New function.
	(fold_stmt_1): Add valueize parameter, dispatch to gimple_simplify.
	(no_follow_ssa_edges): New function.
	(fold_stmt): New overload with valueization hook.  Use
	no_follow_ssa_edges for the overload without hook.
	(fold_stmt_inplace): Likewise.
	* gimple-fold.h (no_follow_ssa_edges): Declare.

From-SVN: r216631
This commit is contained in:
Richard Biener 2014-10-24 11:00:08 +00:00 committed by Richard Biener
parent 77efea3120
commit e0ee10ed5a
8 changed files with 552 additions and 92 deletions

View File

@ -1,3 +1,36 @@
2014-10-24 Richard Biener <rguenther@suse.de>
* genmatch.c (expr::gen_transform): Use fold_buildN_loc
and build_call_expr_loc.
(dt_simplify::gen): Drop non_lvalue for GIMPLE, use
non_lvalue_loc to build it for GENERIC.
(decision_tree::gen_generic): Add location argument to
generic_simplify prototype.
(capture_info): New class.
(capture_info::capture_info): New constructor.
(capture_info::walk_match): New method.
(capture_info::walk_result): New method.
(capture_info::walk_c_expr): New method.
(dt_simplify::gen): Handle preserving side-effects for
GENERIC code generation.
(decision_tree::gen_generic): Do not reject operands
with TREE_SIDE_EFFECTS.
* generic-match.h: New file.
* generic-match-head.c: Include generic-match.h, not gimple-match.h.
* match.pd: Add some constant folding patterns from fold-const.c.
* fold-const.c: Include generic-match.h.
(fold_unary_loc): Dispatch to generic_simplify.
(fold_ternary_loc): Likewise.
(fold_binary_loc): Likewise. Remove patterns now implemented
by generic_simplify.
* gimple-fold.c (replace_stmt_with_simplification): New function.
(fold_stmt_1): Add valueize parameter, dispatch to gimple_simplify.
(no_follow_ssa_edges): New function.
(fold_stmt): New overload with valueization hook. Use
no_follow_ssa_edges for the overload without hook.
(fold_stmt_inplace): Likewise.
* gimple-fold.h (no_follow_ssa_edges): Declare.
2014-10-24 Felix Yang <felix.yang@huawei.com>
Jiji Jiang <jiangjiji@huawei.com>

View File

@ -70,6 +70,7 @@ along with GCC; see the file COPYING3. If not see
#include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
#include "builtins.h"
#include "cgraph.h"
#include "generic-match.h"
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
@ -7564,6 +7565,10 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 1);
tem = generic_simplify (loc, code, type, op0);
if (tem)
return tem;
arg0 = op0;
if (arg0)
{
@ -9913,6 +9918,10 @@ fold_binary_loc (location_t loc,
&& tree_swap_operands_p (arg0, arg1, true))
return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
tem = generic_simplify (loc, code, type, op0, op1);
if (tem)
return tem;
/* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
First check for cases where an arithmetic operation is applied to a
@ -10035,10 +10044,6 @@ fold_binary_loc (location_t loc,
if (integer_zerop (arg0))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
/* PTR +p 0 -> PTR */
if (integer_zerop (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
@ -10159,9 +10164,6 @@ fold_binary_loc (location_t loc,
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
@ -10648,8 +10650,6 @@ fold_binary_loc (location_t loc,
{
if (integer_zerop (arg0))
return negate_expr (fold_convert_loc (loc, type, arg1));
if (integer_zerop (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Fold A - (A & B) into ~B & A. */
if (!TREE_SIDE_EFFECTS (arg0)
@ -10742,16 +10742,6 @@ fold_binary_loc (location_t loc,
}
}
/* Fold &x - &x. This can happen from &x.foo - &x.
This is unsafe for certain floats even in non-IEEE formats.
In IEEE, it is unsafe because it does wrong for NaNs.
Also note that operand_equal_p is always false if an operand
is volatile. */
if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
&& operand_equal_p (arg0, arg1, 0))
return omit_one_operand_loc (loc, type, build_zero_cst (type), arg0);
/* A - B -> A + (-B) if B is easily negatable. */
if (negate_expr_p (arg1)
&& ((FLOAT_TYPE_P (type)
@ -10829,10 +10819,6 @@ fold_binary_loc (location_t loc,
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
return omit_one_operand_loc (loc, type, arg1, arg0);
if (integer_onep (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Transform x * -1 into -x. Make sure to do the negation
on the original operand with conversions not stripped
because we can only strip non-sign-changing conversions. */
@ -11129,10 +11115,6 @@ fold_binary_loc (location_t loc,
case BIT_IOR_EXPR:
bit_ior:
if (integer_all_onesp (arg1))
return omit_one_operand_loc (loc, type, arg1, arg0);
if (integer_zerop (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
@ -11271,12 +11253,8 @@ fold_binary_loc (location_t loc,
goto bit_rotate;
case BIT_XOR_EXPR:
if (integer_zerop (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_all_onesp (arg1))
return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* ~X ^ X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
@ -11434,8 +11412,6 @@ fold_binary_loc (location_t loc,
case BIT_AND_EXPR:
if (integer_all_onesp (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
return omit_one_operand_loc (loc, type, arg1, arg0);
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
@ -12186,8 +12162,6 @@ fold_binary_loc (location_t loc,
case ROUND_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
if (integer_onep (arg1))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
return NULL_TREE;
/* X / -1 is -X. */
@ -12257,21 +12231,6 @@ fold_binary_loc (location_t loc,
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
/* X % 1 is always zero, but be sure to preserve any side
effects in X. */
if (integer_onep (arg1))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* X % 0, return X % 0 unchanged so that we can get the
proper warnings and errors. */
if (integer_zerop (arg1))
return NULL_TREE;
/* 0 % X is always zero, but be sure to preserve any side
effects in X. Place this after checking for X == 0. */
if (integer_zerop (arg0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
@ -13804,6 +13763,10 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
&& tree_swap_operands_p (op0, op1, true))
return fold_build3_loc (loc, code, type, op1, op0, op2);
tem = generic_simplify (loc, code, type, op0, op1, op2);
if (tem)
return tem;
/* Strip any conversions that don't change the mode. This is safe
for every expression, except for a comparison expression because
its signedness is derived from its operands. So, in the latter

View File

@ -43,6 +43,6 @@ along with GCC; see the file COPYING3. If not see
#include "tree-phinodes.h"
#include "ssa-iterators.h"
#include "dumpfile.h"
#include "gimple-match.h"
#include "generic-match.h"

33
gcc/generic-match.h Normal file
View File

@ -0,0 +1,33 @@
/* Generic simplify definitions.
Copyright (C) 2011-2014 Free Software Foundation, Inc.
Contributed by Richard Guenther <rguenther@suse.de>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#ifndef GCC_GENERIC_MATCH_H
#define GCC_GENERIC_MATCH_H
/* Note the following functions are supposed to be only used from
fold_unary_loc, fold_binary_loc and fold_ternary_loc respectively.
They are not considered a public API. */
tree generic_simplify (location_t, enum tree_code, tree, tree);
tree generic_simplify (location_t, enum tree_code, tree, tree, tree);
tree generic_simplify (location_t, enum tree_code, tree, tree, tree, tree);
#endif /* GCC_GENERIC_MATCH_H */

View File

@ -1374,10 +1374,11 @@ expr::gen_transform (FILE *f, const char *dest, bool gimple, int depth,
else
{
if (operation->kind == id_base::CODE)
fprintf (f, " res = fold_build%d (%s, %s",
fprintf (f, " res = fold_build%d_loc (loc, %s, %s",
ops.length(), operation->id, type);
else
fprintf (f, " res = build_call_expr (builtin_decl_implicit (%s), %d",
fprintf (f, " res = build_call_expr_loc (loc, "
"builtin_decl_implicit (%s), %d",
operation->id, ops.length());
for (unsigned i = 0; i < ops.length (); ++i)
fprintf (f, ", ops%d[%u]", depth, i);
@ -1860,6 +1861,167 @@ dt_operand::gen (FILE *f, bool gimple)
fprintf (f, "}\n");
}
/* For GENERIC we have to take care of wrapping multiple-used
expressions with side-effects in save_expr and preserve side-effects
of expressions with omit_one_operand. Analyze captures in
match, result and with expressions and perform early-outs
on the outermost match expression operands for cases we cannot
handle. */
struct capture_info
{
capture_info (simplify *s);
void walk_match (operand *o, unsigned toplevel_arg, bool);
void walk_result (operand *o, bool);
void walk_c_expr (c_expr *);
struct cinfo
{
bool expr_p;
bool cse_p;
bool force_no_side_effects_p;
unsigned long toplevel_msk;
int result_use_count;
};
auto_vec<cinfo> info;
unsigned long force_no_side_effects;
};
/* Analyze captures in S. */
capture_info::capture_info (simplify *s)
{
expr *e;
if (!s->result
|| ((e = dyn_cast <expr *> (s->result))
&& is_a <predicate_id *> (e->operation)))
{
force_no_side_effects = -1;
return;
}
force_no_side_effects = 0;
info.safe_grow_cleared (s->capture_max + 1);
e = as_a <expr *> (s->match);
for (unsigned i = 0; i < e->ops.length (); ++i)
walk_match (e->ops[i], i, false);
walk_result (s->result, false);
for (unsigned i = 0; i < s->ifexpr_vec.length (); ++i)
if (s->ifexpr_vec[i].is_with)
walk_c_expr (as_a <c_expr *>(s->ifexpr_vec[i].cexpr));
}
/* Analyze captures in the match expression piece O. */
void
capture_info::walk_match (operand *o, unsigned toplevel_arg, bool conditional_p)
{
if (capture *c = dyn_cast <capture *> (o))
{
info[c->where].toplevel_msk |= 1 << toplevel_arg;
info[c->where].force_no_side_effects_p |= conditional_p;
/* Mark expr (non-leaf) captures and recurse. */
if (c->what
&& is_a <expr *> (c->what))
{
info[c->where].expr_p = true;
walk_match (c->what, toplevel_arg, conditional_p);
}
}
else if (expr *e = dyn_cast <expr *> (o))
{
for (unsigned i = 0; i < e->ops.length (); ++i)
{
bool cond_p = conditional_p;
if (i == 0
&& *e->operation == COND_EXPR)
cond_p = true;
else if (*e->operation == TRUTH_ANDIF_EXPR
|| *e->operation == TRUTH_ORIF_EXPR)
cond_p = true;
walk_match (e->ops[i], toplevel_arg, cond_p);
}
}
else if (is_a <predicate *> (o))
{
/* Mark non-captured leafs toplevel arg for checking. */
force_no_side_effects |= 1 << toplevel_arg;
}
else
gcc_unreachable ();
}
/* Analyze captures in the result expression piece O. */
void
capture_info::walk_result (operand *o, bool conditional_p)
{
if (capture *c = dyn_cast <capture *> (o))
{
info[c->where].result_use_count++;
/* If we substitute an expression capture we don't know
which captures this will end up using (well, we don't
compute that). Force the uses to be side-effect free
which means forcing the toplevels that reach the
expression side-effect free. */
if (info[c->where].expr_p)
force_no_side_effects |= info[c->where].toplevel_msk;
/* Mark CSE capture capture uses as forced to have
no side-effects. */
if (c->what
&& is_a <expr *> (c->what))
{
info[c->where].cse_p = true;
walk_result (c->what, true);
}
}
else if (expr *e = dyn_cast <expr *> (o))
{
for (unsigned i = 0; i < e->ops.length (); ++i)
{
bool cond_p = conditional_p;
if (i == 0
&& *e->operation == COND_EXPR)
cond_p = true;
else if (*e->operation == TRUTH_ANDIF_EXPR
|| *e->operation == TRUTH_ORIF_EXPR)
cond_p = true;
walk_result (e->ops[i], cond_p);
}
}
else if (c_expr *e = dyn_cast <c_expr *> (o))
walk_c_expr (e);
else
gcc_unreachable ();
}
/* Look for captures in the C expr E. */
void
capture_info::walk_c_expr (c_expr *e)
{
/* Give up for C exprs mentioning captures. */
for (unsigned i = 0; i < e->code.length (); ++i)
if (e->code[i].type == CPP_ATSIGN
&& (e->code[i+1].type == CPP_NUMBER
|| e->code[i+1].type == CPP_NAME)
&& !(e->code[i+1].flags & PREV_WHITE))
{
const cpp_token *n = &e->code[i+1];
const char *id;
if (n->type == CPP_NUMBER)
id = (const char *)n->val.str.text;
else
id = (const char *)CPP_HASHNODE (n->val.node.node)->ident.str;
info[(*e->capture_ids)[id]].force_no_side_effects_p = true;
}
}
/* Generate code for the '(if ...)', '(with ..)' and actual transform
step of a '(simplify ...)' or '(match ...)'. This handles everything
that is not part of the decision tree (simplify->match). */
@ -1928,21 +2090,54 @@ dt_simplify::gen (FILE *f, bool gimple)
n_braces++;
}
/* Analyze captures and perform early-outs on the incoming arguments
that cover cases we cannot handle. */
capture_info cinfo (s);
expr *e;
if (!gimple
&& s->result
&& !((e = dyn_cast <expr *> (s->result))
&& is_a <predicate_id *> (e->operation)))
{
for (unsigned i = 0; i < as_a <expr *> (s->match)->ops.length (); ++i)
if (cinfo.force_no_side_effects & (1 << i))
fprintf (f, "if (TREE_SIDE_EFFECTS (op%d)) return NULL_TREE;\n", i);
for (int i = 0; i <= s->capture_max; ++i)
if (cinfo.info[i].cse_p)
;
else if (cinfo.info[i].force_no_side_effects_p
&& (cinfo.info[i].toplevel_msk
& cinfo.force_no_side_effects) == 0)
fprintf (f, "if (TREE_SIDE_EFFECTS (captures[%d])) "
"return NULL_TREE;\n", i);
else if ((cinfo.info[i].toplevel_msk
& cinfo.force_no_side_effects) != 0)
/* Mark capture as having no side-effects if we had to verify
that via forced toplevel operand checks. */
cinfo.info[i].force_no_side_effects_p = true;
}
fprintf (f, "if (dump_file && (dump_flags & TDF_DETAILS)) "
"fprintf (dump_file, \"Applying pattern ");
output_line_directive (f, s->result_location, true);
fprintf (f, ", %%s:%%d\\n\", __FILE__, __LINE__);\n");
if (!s->result)
operand *result = s->result;
if (!result)
{
/* If there is no result then this is a predicate implementation. */
fprintf (f, "return true;\n");
}
else if (gimple)
{
if (s->result->type == operand::OP_EXPR)
/* For GIMPLE simply drop NON_LVALUE_EXPR (which only appears
in outermost position). */
if (result->type == operand::OP_EXPR
&& *as_a <expr *> (result)->operation == NON_LVALUE_EXPR)
result = as_a <expr *> (result)->ops[0];
if (result->type == operand::OP_EXPR)
{
expr *e = as_a <expr *> (s->result);
expr *e = as_a <expr *> (result);
bool is_predicate = is_a <predicate_id *> (e->operation);
if (!is_predicate)
fprintf (f, "*res_code = %s;\n", e->operation->id);
@ -1964,10 +2159,10 @@ dt_simplify::gen (FILE *f, bool gimple)
fprintf (f, "gimple_resimplify%d (seq, res_code, type, "
"res_ops, valueize);\n", e->ops.length ());
}
else if (s->result->type == operand::OP_CAPTURE
|| s->result->type == operand::OP_C_EXPR)
else if (result->type == operand::OP_CAPTURE
|| result->type == operand::OP_C_EXPR)
{
s->result->gen_transform (f, "res_ops[0]", true, 1, "type", indexes);
result->gen_transform (f, "res_ops[0]", true, 1, "type", indexes);
fprintf (f, "*res_code = TREE_CODE (res_ops[0]);\n");
}
else
@ -1976,10 +2171,22 @@ dt_simplify::gen (FILE *f, bool gimple)
}
else /* GENERIC */
{
if (s->result->type == operand::OP_EXPR)
bool is_predicate = false;
if (result->type == operand::OP_EXPR)
{
expr *e = as_a <expr *> (s->result);
bool is_predicate = is_a <predicate_id *> (e->operation);
expr *e = as_a <expr *> (result);
is_predicate = is_a <predicate_id *> (e->operation);
/* Search for captures used multiple times in the result expression
and dependent on TREE_SIDE_EFFECTS emit a SAVE_EXPR. */
if (!is_predicate)
for (int i = 0; i < s->capture_max + 1; ++i)
{
if (!cinfo.info[i].force_no_side_effects_p
&& cinfo.info[i].result_use_count > 1)
fprintf (f, " if (TREE_SIDE_EFFECTS (captures[%d]))\n"
" captures[%d] = save_expr (captures[%d]);\n",
i, i, i);
}
for (unsigned j = 0; j < e->ops.length (); ++j)
{
char dest[32];
@ -1997,32 +2204,56 @@ dt_simplify::gen (FILE *f, bool gimple)
? NULL : "TREE_TYPE (res_op0)");
e->ops[j]->gen_transform (f, dest, false, 1, optype, indexes);
}
if (is_a <predicate_id *> (e->operation))
if (is_predicate)
fprintf (f, "return true;\n");
else
{
/* Re-fold the toplevel result. */
if (e->operation->kind == id_base::CODE)
fprintf (f, " return fold_build%d (%s, type",
e->ops.length (), e->operation->id);
fprintf (f, " tree res;\n");
/* Re-fold the toplevel result. Use non_lvalue to
build NON_LVALUE_EXPRs so they get properly
ignored when in GIMPLE form. */
if (*e->operation == NON_LVALUE_EXPR)
fprintf (f, " res = non_lvalue_loc (loc, res_op0);\n");
else
fprintf (f, " return build_call_expr "
"(builtin_decl_implicit (%s), %d",
e->operation->id, e->ops.length());
for (unsigned j = 0; j < e->ops.length (); ++j)
fprintf (f, ", res_op%d", j);
fprintf (f, ");\n");
{
if (e->operation->kind == id_base::CODE)
fprintf (f, " res = fold_build%d_loc (loc, %s, type",
e->ops.length (), e->operation->id);
else
fprintf (f, " res = build_call_expr_loc "
"(loc, builtin_decl_implicit (%s), %d",
e->operation->id, e->ops.length());
for (unsigned j = 0; j < e->ops.length (); ++j)
fprintf (f, ", res_op%d", j);
fprintf (f, ");\n");
}
}
}
else if (s->result->type == operand::OP_CAPTURE
|| s->result->type == operand::OP_C_EXPR)
else if (result->type == operand::OP_CAPTURE
|| result->type == operand::OP_C_EXPR)
{
fprintf (f, " tree res;\n");
s->result->gen_transform (f, " res", false, 1, "type", indexes);
fprintf (f, " return res;\n");
}
else
gcc_unreachable ();
if (!is_predicate)
{
/* Search for captures not used in the result expression and dependent
on TREE_SIDE_EFFECTS emit omit_one_operand. */
for (int i = 0; i < s->capture_max + 1; ++i)
{
if (!cinfo.info[i].force_no_side_effects_p
&& !cinfo.info[i].expr_p
&& cinfo.info[i].result_use_count == 0)
fprintf (f, " if (TREE_SIDE_EFFECTS (captures[%d]))\n"
" res = build2_loc (loc, COMPOUND_EXPR, type,"
" fold_ignored_result (captures[%d]), res);\n",
i, i);
}
fprintf (f, " return res;\n");
}
}
for (unsigned i = 0; i < n_braces; ++i)
@ -2086,25 +2317,13 @@ decision_tree::gen_generic (FILE *f)
for (unsigned n = 1; n <= 3; ++n)
{
fprintf (f, "\ntree\n"
"generic_simplify (enum tree_code code, "
"generic_simplify (location_t loc, enum tree_code code, "
"tree type ATTRIBUTE_UNUSED");
for (unsigned i = 0; i < n; ++i)
fprintf (f, ", tree op%d", i);
fprintf (f, ")\n");
fprintf (f, "{\n");
/* ??? For now reject all simplifications on operands with
side-effects as we are not prepared to properly wrap
omitted parts with omit_one_operand and friends. In
principle we can do that automagically for a subset of
transforms (and only reject the remaining cases).
This fixes for example gcc.c-torture/execute/20050131-1.c. */
fprintf (f, "if ((op0 && TREE_SIDE_EFFECTS (op0))");
for (unsigned i = 1; i < n; ++i)
fprintf (f, "|| (op%d && TREE_SIDE_EFFECTS (op%d))", i, i);
fprintf (f, ")\n"
" return NULL_TREE;\n");
fprintf (f, "switch (code)\n"
"{\n");
for (unsigned i = 0; i < root->kids.length (); i++)

View File

@ -61,6 +61,8 @@ along with GCC; see the file COPYING3. If not see
#include "dbgcnt.h"
#include "builtins.h"
#include "output.h"
#include "tree-eh.h"
#include "gimple-match.h"
/* Return true when DECL can be referenced from current unit.
FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
@ -2792,6 +2794,121 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
return changed;
}
/* Worker for fold_stmt_1 dispatch to pattern based folding with
gimple_simplify.
Replaces *GSI with the simplification result in RCODE and OPS
and the associated statements in *SEQ. Does the replacement
according to INPLACE and returns true if the operation succeeded. */
static bool
replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
code_helper rcode, tree *ops,
gimple_seq *seq, bool inplace)
{
gimple stmt = gsi_stmt (*gsi);
/* Play safe and do not allow abnormals to be mentioned in
newly created statements. See also maybe_push_res_to_seq. */
if ((TREE_CODE (ops[0]) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
|| (ops[1]
&& TREE_CODE (ops[1]) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
|| (ops[2]
&& TREE_CODE (ops[2]) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
return false;
if (gimple_code (stmt) == GIMPLE_COND)
{
gcc_assert (rcode.is_tree_code ());
if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
/* GIMPLE_CONDs condition may not throw. */
&& (!flag_exceptions
|| !cfun->can_throw_non_call_exceptions
|| !operation_could_trap_p (rcode,
FLOAT_TYPE_P (TREE_TYPE (ops[0])),
false, NULL_TREE)))
gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
else if (rcode == SSA_NAME)
gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
build_zero_cst (TREE_TYPE (ops[0])));
else if (rcode == INTEGER_CST)
{
if (integer_zerop (ops[0]))
gimple_cond_make_false (stmt);
else
gimple_cond_make_true (stmt);
}
else if (!inplace)
{
tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
ops, seq);
if (!res)
return false;
gimple_cond_set_condition (stmt, NE_EXPR, res,
build_zero_cst (TREE_TYPE (res)));
}
else
return false;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "gimple_simplified to ");
if (!gimple_seq_empty_p (*seq))
print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
print_gimple_stmt (dump_file, gsi_stmt (*gsi),
0, TDF_SLIM);
}
gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
return true;
}
else if (is_gimple_assign (stmt)
&& rcode.is_tree_code ())
{
if (!inplace
|| gimple_num_ops (stmt) <= get_gimple_rhs_num_ops (rcode))
{
maybe_build_generic_op (rcode,
TREE_TYPE (gimple_assign_lhs (stmt)),
&ops[0], ops[1], ops[2]);
gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
ops[0], ops[1], ops[2]);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "gimple_simplified to ");
if (!gimple_seq_empty_p (*seq))
print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
print_gimple_stmt (dump_file, gsi_stmt (*gsi),
0, TDF_SLIM);
}
gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
return true;
}
}
else if (!inplace)
{
if (gimple_has_lhs (stmt))
{
tree lhs = gimple_get_lhs (stmt);
maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
ops, seq, lhs);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "gimple_simplified to ");
print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
}
gsi_replace_with_seq_vops (gsi, *seq);
return true;
}
else
gcc_unreachable ();
}
return false;
}
/* Canonicalize MEM_REFs invariant address operand after propagation. */
static bool
@ -2878,7 +2995,7 @@ maybe_canonicalize_mem_ref_addr (tree *t)
distinguishes both cases. */
static bool
fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
{
bool changed = false;
gimple stmt = gsi_stmt (*gsi);
@ -2956,6 +3073,25 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
default:;
}
/* Dispatch to pattern-based folding. */
if (!inplace
|| is_gimple_assign (stmt)
|| gimple_code (stmt) == GIMPLE_COND)
{
gimple_seq seq = NULL;
code_helper rcode;
tree ops[3] = {};
if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
{
if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
changed = true;
else
gimple_seq_discard (seq);
}
}
stmt = gsi_stmt (*gsi);
/* Fold the main computation performed by the statement. */
switch (gimple_code (stmt))
{
@ -3095,6 +3231,14 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
return changed;
}
/* Valueziation callback that ends up not following SSA edges. */
tree
no_follow_ssa_edges (tree)
{
return NULL_TREE;
}
/* Fold the statement pointed to by GSI. In some cases, this function may
replace the whole statement with a new one. Returns true iff folding
makes any changes.
@ -3105,7 +3249,13 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
bool
fold_stmt (gimple_stmt_iterator *gsi)
{
return fold_stmt_1 (gsi, false);
return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
}
bool
fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
{
return fold_stmt_1 (gsi, false, valueize);
}
/* Perform the minimal folding on statement *GSI. Only operations like
@ -3120,7 +3270,7 @@ bool
fold_stmt_inplace (gimple_stmt_iterator *gsi)
{
gimple stmt = gsi_stmt (*gsi);
bool changed = fold_stmt_1 (gsi, true);
bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
gcc_assert (gsi_stmt (*gsi) == stmt);
return changed;
}

View File

@ -26,11 +26,13 @@ extern tree canonicalize_constructor_val (tree, tree);
extern tree get_symbol_constant_value (tree);
extern void gimplify_and_update_call_from_tree (gimple_stmt_iterator *, tree);
extern bool fold_stmt (gimple_stmt_iterator *);
extern bool fold_stmt (gimple_stmt_iterator *, tree (*) (tree));
extern bool fold_stmt_inplace (gimple_stmt_iterator *);
extern tree maybe_fold_and_comparisons (enum tree_code, tree, tree,
enum tree_code, tree, tree);
extern tree maybe_fold_or_comparisons (enum tree_code, tree, tree,
enum tree_code, tree, tree);
extern tree no_follow_ssa_edges (tree);
extern tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree));
extern tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree));
extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));

View File

@ -28,3 +28,63 @@ along with GCC; see the file COPYING3. If not see
integer_onep integer_zerop integer_all_onesp
real_zerop real_onep
CONSTANT_CLASS_P)
/* Simplifications of operations with one constant operand and
simplifications to constants. */
(for op (plus pointer_plus minus bit_ior bit_xor)
(simplify
(op @0 integer_zerop)
(non_lvalue @0)))
/* Simplify x - x.
This is unsafe for certain floats even in non-IEEE formats.
In IEEE, it is unsafe because it does wrong for NaNs.
Also note that operand_equal_p is always false if an operand
is volatile. */
(simplify
(minus @0 @0)
(if (!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
{ build_zero_cst (type); }))
(simplify
(mult @0 integer_zerop@1)
@1)
/* Make sure to preserve divisions by zero. This is the reason why
we don't simplify x / x to 1 or 0 / x to 0. */
(for op (mult trunc_div ceil_div floor_div round_div exact_div)
(simplify
(op @0 integer_onep)
(non_lvalue @0)))
/* Same applies to modulo operations, but fold is inconsistent here
and simplifies 0 % x to 0, only preserving literal 0 % 0. */
(for op (ceil_mod floor_mod round_mod trunc_mod)
/* 0 % X is always zero. */
(simplify
(trunc_mod integer_zerop@0 @1)
/* But not for 0 % 0 so that we can get the proper warnings and errors. */
(if (!integer_zerop (@1))
@0))
/* X % 1 is always zero. */
(simplify
(trunc_mod @0 integer_onep)
{ build_zero_cst (type); }))
/* x | ~0 -> ~0 */
(simplify
(bit_ior @0 integer_all_onesp@1)
@1)
/* x & 0 -> 0 */
(simplify
(bit_and @0 integer_zerop@1)
@1)
/* x ^ x -> 0 */
(simplify
(bit_xor @0 @0)
{ build_zero_cst (type); })