Makefile.in (OBJS): Add gimple-ssa-isolate-paths.o

* Makefile.in (OBJS): Add gimple-ssa-isolate-paths.o
	* common.opt (-fisolate-erroneous-paths): Add option and
	documentation.
	* gimple-ssa-isolate-paths.c: New file.
	* gimple.c (check_loadstore): New function.
	(infer_nonnull_range): Moved into gimple.c from tree-vrp.c
	Verify OP is in the argument list and the argument corresponding
	to OP is a pointer type.  Use operand_equal_p rather than
	pointer equality when testing if OP is on the nonnull list.
	Use check_loadstore rather than count_ptr_derefs.  Handle
	GIMPLE_RETURN statements.
	* tree-vrp.c (infer_nonnull_range): Remove.
	* gimple.h (infer_nonnull_range): Declare.
	* opts.c (default_options_table): Add
	* OPT_fisolate_erroneous_paths.
	* passes.def: Add pass_isolate_erroneous_paths.
	* timevar.def (TV_ISOLATE_ERRONEOUS_PATHS): New timevar.
	* tree-pass.h (make_pass_isolate_erroneous_paths): Declare.
	* tree-ssa.c (struct count_ptr_d): Remove.
	(count_ptr_derefs, count_uses_and_derefs): Remove.
	* tree-ssa.h (count_uses_and_derefs): Remove.

	* gcc.dg/pr38984.c: Add -fno-isolate-erroneous-paths.
	* gcc.dg/tree-ssa/isolate-1.c: New test.
	* gcc.dg/tree-ssa/isolate-2.c: New test.
	* gcc.dg/tree-ssa/isolate-3.c: New test.
	* gcc.dg/tree-ssa/isolate-4.c: New test.

From-SVN: r204414
This commit is contained in:
Jeff Law 2013-11-05 12:47:44 -07:00 committed by Jeff Law
parent 50fae5a679
commit 8fdc414d43
19 changed files with 656 additions and 150 deletions

View File

@ -1,3 +1,26 @@
2013-11-05 Jeff Law <law@redhat.com>
* Makefile.in (OBJS): Add gimple-ssa-isolate-paths.o
* common.opt (-fisolate-erroneous-paths): Add option and
documentation.
* gimple-ssa-isolate-paths.c: New file.
* gimple.c (check_loadstore): New function.
(infer_nonnull_range): Moved into gimple.c from tree-vrp.c
Verify OP is in the argument list and the argument corresponding
to OP is a pointer type. Use operand_equal_p rather than
pointer equality when testing if OP is on the nonnull list.
Use check_loadstore rather than count_ptr_derefs. Handle
GIMPLE_RETURN statements.
* tree-vrp.c (infer_nonnull_range): Remove.
* gimple.h (infer_nonnull_range): Declare.
* opts.c (default_options_table): Add OPT_fisolate_erroneous_paths.
* passes.def: Add pass_isolate_erroneous_paths.
* timevar.def (TV_ISOLATE_ERRONEOUS_PATHS): New timevar.
* tree-pass.h (make_pass_isolate_erroneous_paths): Declare.
* tree-ssa.c (struct count_ptr_d): Remove.
(count_ptr_derefs, count_uses_and_derefs): Remove.
* tree-ssa.h (count_uses_and_derefs): Remove.
2013-11-05 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/58997

View File

@ -1235,6 +1235,7 @@ OBJS = \
gimple-fold.o \
gimple-low.o \
gimple-pretty-print.o \
gimple-ssa-isolate-paths.o \
gimple-ssa-strength-reduction.o \
gimple-streamer-in.o \
gimple-streamer-out.o \

View File

@ -2109,6 +2109,12 @@ foptimize-strlen
Common Report Var(flag_optimize_strlen) Optimization
Enable string length optimizations on trees
fisolate-erroneous-paths
Common Report Var(flag_isolate_erroneous_paths) Optimization
Detect paths which trigger erroneous or undefined behaviour. Isolate those
paths from the main control flow and turn the statement with erroneous or
undefined behaviour into a trap.
ftree-loop-distribution
Common Report Var(flag_tree_loop_distribution) Optimization
Enable loop distribution on trees

View File

@ -0,0 +1,325 @@
/* Detect paths through the CFG which can never be executed in a conforming
program and isolate them.
Copyright (C) 2013
Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tree.h"
#include "flags.h"
#include "basic-block.h"
#include "gimple.h"
#include "tree-ssa.h"
#include "tree-ssanames.h"
#include "gimple-ssa.h"
#include "tree-ssa-operands.h"
#include "tree-phinodes.h"
#include "ssa-iterators.h"
#include "cfgloop.h"
#include "tree-pass.h"
static bool cfg_altered;
/* Insert a trap before SI and remove SI and all statements after SI. */
static void
insert_trap_and_remove_trailing_statements (gimple_stmt_iterator *si_p)
{
gimple_seq seq = NULL;
gimple stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
gimple_seq_add_stmt (&seq, stmt);
gsi_insert_before (si_p, seq, GSI_SAME_STMT);
/* Now delete all remaining statements in this block. */
for (; !gsi_end_p (*si_p);)
{
stmt = gsi_stmt (*si_p);
unlink_stmt_vdef (stmt);
gsi_remove (si_p, true);
release_defs (stmt);
}
}
/* BB when reached via incoming edge E will exhibit undefined behaviour
at STMT. Isolate and optimize the path which exhibits undefined
behaviour.
Isolation is simple. Duplicate BB and redirect E to BB'.
Optimization is simple as well. Replace STMT in BB' with an
unconditional trap and remove all outgoing edges from BB'.
DUPLICATE is a pre-existing duplicate, use it as BB' if it exists.
Return BB'. */
basic_block
isolate_path (basic_block bb, basic_block duplicate, edge e, gimple stmt)
{
gimple_stmt_iterator si, si2;
edge_iterator ei;
edge e2;
/* First duplicate BB if we have not done so already and remove all
the duplicate's outgoing edges as duplicate is going to unconditionally
trap. Removing the outgoing edges is both an optimization and ensures
we don't need to do any PHI node updates. */
if (!duplicate)
{
duplicate = duplicate_block (bb, NULL, NULL);
for (ei = ei_start (duplicate->succs); (e2 = ei_safe_edge (ei)); )
remove_edge (e2);
}
/* Complete the isolation step by redirecting E to reach DUPLICATE. */
e2 = redirect_edge_and_branch (e, duplicate);
if (e2)
flush_pending_stmts (e2);
/* There may be more than one statement in DUPLICATE which exhibits
undefined behaviour. Ultimately we want the first such statement in
DUPLCIATE so that we're able to delete as much code as possible.
So each time we discover undefined behaviour in DUPLICATE, search for
the statement which triggers undefined behaviour. If found, then
transform the statement into a trap and delete everything after the
statement. If not found, then this particular instance was subsumed by
an earlier instance of undefined behaviour and there's nothing to do.
This is made more complicated by the fact that we have STMT, which is in
BB rather than in DUPLICATE. So we set up two iterators, one for each
block and walk forward looking for STMT in BB, advancing each iterator at
each step.
When we find STMT the second iterator should point to STMT's equivalent in
duplicate. If DUPLICATE ends before STMT is found in BB, then there's
nothing to do.
Ignore labels and debug statements. */
si = gsi_start_nondebug_after_labels_bb (bb);
si2 = gsi_start_nondebug_after_labels_bb (duplicate);
while (!gsi_end_p (si) && !gsi_end_p (si2) && gsi_stmt (si) != stmt)
{
gsi_next_nondebug (&si);
gsi_next_nondebug (&si2);
}
/* This would be an indicator that we never found STMT in BB, which should
never happen. */
gcc_assert (!gsi_end_p (si));
/* If we did not run to the end of DUPLICATE, then SI points to STMT and
SI2 points to the duplicate of STMT in DUPLICATE. Insert a trap
before SI2 and remove SI2 and all trailing statements. */
if (!gsi_end_p (si2))
insert_trap_and_remove_trailing_statements (&si2);
return duplicate;
}
/* Search the function for statements which, if executed, would cause
the program to fault such as a dereference of a NULL pointer.
Such a program can't be valid if such a statement was to execute
according to ISO standards.
We detect explicit NULL pointer dereferences as well as those implied
by a PHI argument having a NULL value which unconditionally flows into
a dereference in the same block as the PHI.
In the former case we replace the offending statement with an
unconditional trap and eliminate the outgoing edges from the statement's
basic block. This may expose secondary optimization opportunities.
In the latter case, we isolate the path(s) with the NULL PHI
feeding the dereference. We can then replace the offending statement
and eliminate the outgoing edges in the duplicate. Again, this may
expose secondary optimization opportunities.
A warning for both cases may be advisable as well.
Other statically detectable violations of the ISO standard could be
handled in a similar way, such as out-of-bounds array indexing. */
static unsigned int
gimple_ssa_isolate_erroneous_paths (void)
{
basic_block bb;
initialize_original_copy_tables ();
/* Search all the blocks for edges which, if traversed, will
result in undefined behaviour. */
cfg_altered = false;
FOR_EACH_BB (bb)
{
gimple_stmt_iterator si;
/* First look for a PHI which sets a pointer to NULL and which
is then dereferenced within BB. This is somewhat overly
conservative, but probably catches most of the interesting
cases. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
gimple phi = gsi_stmt (si);
tree lhs = gimple_phi_result (phi);
/* If the result is not a pointer, then there is no need to
examine the arguments. */
if (!POINTER_TYPE_P (TREE_TYPE (lhs)))
continue;
/* PHI produces a pointer result. See if any of the PHI's
arguments are NULL.
When we remove an edge, we want to reprocess the current
index, hence the ugly way we update I for each iteration. */
basic_block duplicate = NULL;
for (unsigned i = 0, next_i = 0;
i < gimple_phi_num_args (phi);
i = next_i)
{
tree op = gimple_phi_arg_def (phi, i);
next_i = i + 1;
if (!integer_zerop (op))
continue;
edge e = gimple_phi_arg_edge (phi, i);
imm_use_iterator iter;
gimple use_stmt;
/* We've got a NULL PHI argument. Now see if the
PHI's result is dereferenced within BB. */
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
/* We only care about uses in BB. Catching cases in
in other blocks would require more complex path
isolation code. */
if (gimple_bb (use_stmt) != bb)
continue;
if (infer_nonnull_range (use_stmt, lhs))
{
duplicate = isolate_path (bb, duplicate,
e, use_stmt);
/* When we remove an incoming edge, we need to
reprocess the Ith element. */
next_i = i;
cfg_altered = true;
}
}
}
}
/* Now look at the statements in the block and see if any of
them explicitly dereference a NULL pointer. This happens
because of jump threading and constant propagation. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
/* By passing null_pointer_node, we can use infer_nonnull_range
to detect explicit NULL pointer dereferences and other uses
where a non-NULL value is required. */
if (infer_nonnull_range (stmt, null_pointer_node))
{
insert_trap_and_remove_trailing_statements (&si);
/* And finally, remove all outgoing edges from BB. */
edge e;
for (edge_iterator ei = ei_start (bb->succs);
(e = ei_safe_edge (ei)); )
remove_edge (e);
/* Ignore any more operands on this statement and
continue the statement iterator (which should
terminate its loop immediately. */
cfg_altered = true;
break;
}
}
}
free_original_copy_tables ();
/* We scramble the CFG and loop structures a bit, clean up
appropriately. We really should incrementally update the
loop structures, in theory it shouldn't be that hard. */
if (cfg_altered)
{
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
loops_state_set (LOOPS_NEED_FIXUP);
return TODO_cleanup_cfg | TODO_update_ssa;
}
return 0;
}
static bool
gate_isolate_erroneous_paths (void)
{
/* If we do not have a suitable builtin function for the trap statement,
then do not perform the optimization. */
return (flag_isolate_erroneous_paths != 0
&& builtin_decl_explicit (BUILT_IN_TRAP) != NULL);
}
namespace {
const pass_data pass_data_isolate_erroneous_paths =
{
GIMPLE_PASS, /* type */
"isolate-paths", /* name */
OPTGROUP_NONE, /* optinfo_flags */
true, /* has_gate */
true, /* has_execute */
TV_ISOLATE_ERRONEOUS_PATHS, /* tv_id */
( PROP_cfg | PROP_ssa ), /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_verify_ssa, /* todo_flags_finish */
};
class pass_isolate_erroneous_paths : public gimple_opt_pass
{
public:
pass_isolate_erroneous_paths (gcc::context *ctxt)
: gimple_opt_pass (pass_data_isolate_erroneous_paths, ctxt)
{}
/* opt_pass methods: */
opt_pass * clone () { return new pass_isolate_erroneous_paths (m_ctxt); }
bool gate () { return gate_isolate_erroneous_paths (); }
unsigned int execute () { return gimple_ssa_isolate_erroneous_paths (); }
}; // class pass_uncprop
}
gimple_opt_pass *
make_pass_isolate_erroneous_paths (gcc::context *ctxt)
{
return new pass_isolate_erroneous_paths (ctxt);
}

View File

@ -3347,3 +3347,84 @@ nonfreeing_call_p (gimple call)
return false;
}
/* Callback for walk_stmt_load_store_ops.
Return TRUE if OP will dereference the tree stored in DATA, FALSE
otherwise.
This routine only makes a superficial check for a dereference. Thus
it must only be used if it is safe to return a false negative. */
static bool
check_loadstore (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
&& operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
return true;
return false;
}
/* If OP can be inferred to be non-zero after STMT executes, return true. */
bool
infer_nonnull_range (gimple stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
if (!flag_delete_null_pointer_checks
|| !POINTER_TYPE_P (TREE_TYPE (op))
|| gimple_code (stmt) == GIMPLE_ASM)
return false;
if (walk_stmt_load_store_ops (stmt, (void *)op,
check_loadstore, check_loadstore))
return true;
if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
{
tree fntype = gimple_call_fntype (stmt);
tree attrs = TYPE_ATTRIBUTES (fntype);
for (; attrs; attrs = TREE_CHAIN (attrs))
{
attrs = lookup_attribute ("nonnull", attrs);
/* If "nonnull" wasn't specified, we know nothing about
the argument. */
if (attrs == NULL_TREE)
return false;
/* If "nonnull" applies to all the arguments, then ARG
is non-null if it's in the argument list. */
if (TREE_VALUE (attrs) == NULL_TREE)
{
for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
{
if (operand_equal_p (op, gimple_call_arg (stmt, i), 0)
&& POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i))))
return true;
}
return false;
}
/* Now see if op appears in the nonnull list. */
for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
{
int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
tree arg = gimple_call_arg (stmt, idx);
if (operand_equal_p (op, arg, 0))
return true;
}
}
}
/* If this function is marked as returning non-null, then we can
infer OP is non-null if it is used in the return statement. */
if (gimple_code (stmt) == GIMPLE_RETURN
&& gimple_return_retval (stmt)
&& operand_equal_p (gimple_return_retval (stmt), op, 0)
&& lookup_attribute ("returns_nonnull",
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
return true;
return false;
}

View File

@ -1038,6 +1038,7 @@ extern gimple_predicate rhs_predicate_for (tree);
extern tree canonicalize_cond_expr_cond (tree);
extern void dump_decl_set (FILE *, bitmap);
extern bool nonfreeing_call_p (gimple);
extern bool infer_nonnull_range (gimple, tree);
/* In trans-mem.c. */
extern void diagnose_tm_safe_errors (tree);

View File

@ -493,6 +493,7 @@ static const struct default_options default_options_table[] =
{ OPT_LEVELS_2_PLUS, OPT_fvect_cost_model_, NULL, VECT_COST_MODEL_CHEAP },
{ OPT_LEVELS_2_PLUS_SPEED_ONLY, OPT_foptimize_strlen, NULL, 1 },
{ OPT_LEVELS_2_PLUS, OPT_fhoist_adjacent_loads, NULL, 1 },
{ OPT_LEVELS_2_PLUS, OPT_fisolate_erroneous_paths, NULL, 1 },
/* -O3 optimizations. */
{ OPT_LEVELS_3_PLUS, OPT_ftree_loop_distribute_patterns, NULL, 1 },

View File

@ -167,9 +167,16 @@ along with GCC; see the file COPYING3. If not see
is removed, and this place fits nicely. Remember this when
trying to move or duplicate pass_dominator somewhere earlier. */
NEXT_PASS (pass_dominator);
/* At this point the majority of const/copy propagations
are exposed. Go ahead and identify paths that should never
be executed in a conforming program and isolate those paths.
This will expose more degenerate PHIs in the main path and
expose more PRE/DOM optimization opportunities. */
NEXT_PASS (pass_isolate_erroneous_paths);
/* The only const/copy propagation opportunities left after
DOM should be due to degenerate PHI nodes. So rather than
run the full propagators, run a specialized pass which
DOM and erroneous path isolation should be due to degenerate PHI nodes.
So rather than run the full propagators, run a specialized pass which
only examines PHIs to discover const/copy propagation
opportunities. */
NEXT_PASS (pass_phi_only_cprop);

View File

@ -1,3 +1,11 @@
2013-10-05 Jeff Law <law@redhat.com>
* gcc.dg/pr38984.c: Add -fno-isolate-erroneous-paths.
* gcc.dg/tree-ssa/isolate-1.c: New test.
* gcc.dg/tree-ssa/isolate-2.c: New test.
* gcc.dg/tree-ssa/isolate-3.c: New test.
* gcc.dg/tree-ssa/isolate-4.c: New test.
2013-11-05 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/58997

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fno-delete-null-pointer-checks -fdump-tree-optimized" }
/* { dg-options "-O2 -fno-delete-null-pointer-checks -fdump-tree-optimized -fno-isolate-erroneous-paths" }
* */
int f(int *p)

View File

@ -0,0 +1,58 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-isolate-paths" } */
struct demangle_component
{
int type;
int zzz;
};
struct d_info
{
struct demangle_component *comps;
int next_comp;
int num_comps;
};
static struct demangle_component *
d_make_empty (struct d_info *di)
{
struct demangle_component *p;
if (di->next_comp >= di->num_comps)
return ((void *)0);
p = &di->comps[di->next_comp];
return p;
}
struct demangle_component *
d_type (struct d_info *di)
{
struct demangle_component *ret;
ret = d_make_empty (di);
ret->type = 42;
ret->zzz = -1;
return ret;
}
/* We're testing two aspects of isolation here. First that isolation
occurs, second that if we have two null dereferences in a block that
that we delete everything from the first dereferece to the end of the
block, regardless of which comes first in the immediate use iterator. */
/* { dg-final { scan-tree-dump-times "__builtin_trap" 1 "isolate-paths"} } */
/* { dg-final { scan-tree-dump-times "->type" 1 "isolate-paths"} } */
/* { dg-final { scan-tree-dump-times "->zzz" 1 "isolate-paths"} } */
/* { dg-final { cleanup-tree-dump "isolate-paths" } } */

View File

@ -0,0 +1,43 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-isolate-paths -fdump-tree-phicprop1" } */
int z;
int y;
int * foo(int a) __attribute__((returns_nonnull));
int * bar(void) __attribute__((returns_nonnull));
int *
foo(int a)
{
switch (a)
{
case 0:
return &z;
default:
return (int *)0;
}
}
int *
bar (void)
{
return 0;
}
/* We testing that the path isolation code can take advantage of the
returns non-null attribute to isolate a path where NULL flows into
a return statement. We test this twice, once where the NULL flows
from a PHI, the second with an explicit return 0 in the IL.
We also verify that after isolation phi-cprop simplifies the
return statement so that it returns &z directly.
/* { dg-final { scan-tree-dump-times "__builtin_trap" 2 "isolate-paths"} } */
/* { dg-final { scan-tree-dump-times "return &z;" 1 "phicprop1"} } */
/* { dg-final { cleanup-tree-dump "isolate-paths" } } */
/* { dg-final { cleanup-tree-dump "phicprop1" } } */

View File

@ -0,0 +1,65 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-isolate-paths" } */
typedef long unsigned int size_t;
extern void *memset (void *__s, int __c, size_t __n)
__attribute__ ((__nothrow__, __leaf__)) __attribute__ ((__nonnull__ (1)));
struct rtx_def;
typedef struct rtx_def *rtx;
typedef struct VEC_rtx_base
{
unsigned num;
unsigned alloc;
rtx vec[1];
} VEC_rtx_base;
static __inline__ rtx *
VEC_rtx_base_address (VEC_rtx_base * vec_)
{
return vec_ ? vec_->vec : 0;
}
typedef struct VEC_rtx_gc
{
VEC_rtx_base base;
} VEC_rtx_gc;
static __inline__ void
VEC_rtx_gc_safe_grow (VEC_rtx_gc ** vec_, int size_, const char *file_,
unsigned line_, const char *function_)
{
((*vec_) ? &(*vec_)->base : 0)->num = size_;
}
static __inline__ void
VEC_rtx_gc_safe_grow_cleared (VEC_rtx_gc ** vec_, int size_,
const char *file_, unsigned line_,
const char *function_, int oldsize)
{
VEC_rtx_gc_safe_grow (vec_, size_, file_, line_, function_);
memset (&(VEC_rtx_base_address ((*vec_) ? &(*vec_)->base : 0))[oldsize], 0,
sizeof (rtx) * (size_ - oldsize));
}
static VEC_rtx_gc *reg_base_value;
void
init_alias_analysis (void)
{
unsigned int maxreg = max_reg_num ();
(VEC_rtx_gc_safe_grow_cleared
(&(reg_base_value), maxreg, "../../../gcc-4.6.0/gcc/alias.c", 2755,
__FUNCTION__, arf ()));
}
/* This is an example of how a NULL pointer dereference can show up
without a PHI. Note VEC_rtx_gcc_safe_grow. If an earlier pass
(such as VRP) isolates the NULL path for some reason or another
we end up with an explicit NULL dereference in the IL. Yes, it
started with a PHI, but by the time the path isolation code runs
its explicit in the IL. */
/* { dg-final { scan-tree-dump-times "__builtin_trap" 1 "isolate-paths"} } */
/* { dg-final { cleanup-tree-dump "isolate-paths" } } */

View File

@ -0,0 +1,32 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-isolate-paths -fdump-tree-phicprop1" } */
extern void foo(void *) __attribute__ ((__nonnull__ (1)));
int z;
void
com (int a)
{
foo (a == 42 ? &z : (void *) 0);
}
void
bar (void)
{
foo ((void *)0);
}
/* We testing that the path isolation code can take advantage of the
returns non-null attribute to isolate a path where NULL flows into
a return statement.
We also verify that after isolation phi-cprop simplifies the
return statement so that it returns &z directly.
/* { dg-final { scan-tree-dump-times "__builtin_trap" 2 "isolate-paths"} } */
/* { dg-final { scan-tree-dump-times "foo .&z.;" 1 "phicprop1"} } */
/* { dg-final { cleanup-tree-dump "isolate-paths" } } */
/* { dg-final { cleanup-tree-dump "phicprop1" } } */

View File

@ -144,6 +144,7 @@ DEFTIMEVAR (TV_TREE_SSA_INCREMENTAL , "tree SSA incremental")
DEFTIMEVAR (TV_TREE_OPS , "tree operand scan")
DEFTIMEVAR (TV_TREE_SSA_DOMINATOR_OPTS , "dominator optimization")
DEFTIMEVAR (TV_TREE_SRA , "tree SRA")
DEFTIMEVAR (TV_ISOLATE_ERRONEOUS_PATHS , "isolate eroneous paths")
DEFTIMEVAR (TV_TREE_CCP , "tree CCP")
DEFTIMEVAR (TV_TREE_PHI_CPROP , "tree PHI const/copy prop")
DEFTIMEVAR (TV_TREE_SPLIT_EDGES , "tree split crit edges")

View File

@ -425,6 +425,7 @@ extern gimple_opt_pass *make_pass_sink_code (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_fre (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_check_data_deps (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_copy_prop (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_isolate_erroneous_paths (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_vrp (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_uncprop (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_return_slot (gcc::context *ctxt);

View File

@ -236,100 +236,6 @@ flush_pending_stmts (edge e)
redirect_edge_var_map_clear (e);
}
/* Data structure used to count the number of dereferences to PTR
inside an expression. */
struct count_ptr_d
{
tree ptr;
unsigned num_stores;
unsigned num_loads;
};
/* Helper for count_uses_and_derefs. Called by walk_tree to look for
(ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
static tree
count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
{
struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
/* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
pointer 'ptr' is *not* dereferenced, it is simply used to compute
the address of 'fld' as 'ptr + offsetof(fld)'. */
if (TREE_CODE (*tp) == ADDR_EXPR)
{
*walk_subtrees = 0;
return NULL_TREE;
}
if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
{
if (wi_p->is_lhs)
count_p->num_stores++;
else
count_p->num_loads++;
}
return NULL_TREE;
}
/* Count the number of direct and indirect uses for pointer PTR in
statement STMT. The number of direct uses is stored in
*NUM_USES_P. Indirect references are counted separately depending
on whether they are store or load operations. The counts are
stored in *NUM_STORES_P and *NUM_LOADS_P. */
void
count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
unsigned *num_loads_p, unsigned *num_stores_p)
{
ssa_op_iter i;
tree use;
*num_uses_p = 0;
*num_loads_p = 0;
*num_stores_p = 0;
/* Find out the total number of uses of PTR in STMT. */
FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
if (use == ptr)
(*num_uses_p)++;
/* Now count the number of indirect references to PTR. This is
truly awful, but we don't have much choice. There are no parent
pointers inside INDIRECT_REFs, so an expression like
'*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
find all the indirect and direct uses of x_1 inside. The only
shortcut we can take is the fact that GIMPLE only allows
INDIRECT_REFs inside the expressions below. */
if (is_gimple_assign (stmt)
|| gimple_code (stmt) == GIMPLE_RETURN
|| gimple_code (stmt) == GIMPLE_ASM
|| is_gimple_call (stmt))
{
struct walk_stmt_info wi;
struct count_ptr_d count;
count.ptr = ptr;
count.num_stores = 0;
count.num_loads = 0;
memset (&wi, 0, sizeof (wi));
wi.info = &count;
walk_gimple_op (stmt, count_ptr_derefs, &wi);
*num_stores_p = count.num_stores;
*num_loads_p = count.num_loads;
}
gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
}
/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
expression with a different value.

View File

@ -39,8 +39,6 @@ extern edge_var_map_vector *redirect_edge_var_map_vector (edge);
extern void redirect_edge_var_map_destroy (void);
extern edge ssa_redirect_edge (edge, basic_block);
extern void flush_pending_stmts (edge);
extern void count_uses_and_derefs (tree, gimple, unsigned *, unsigned *,
unsigned *);
extern void gimple_replace_ssa_lhs (gimple, tree);
extern tree target_for_debug_bind (tree);
extern void insert_debug_temp_for_var_def (gimple_stmt_iterator *, tree);

View File

@ -4476,57 +4476,6 @@ fp_predicate (gimple stmt)
return FLOAT_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)));
}
/* If OP can be inferred to be non-zero after STMT executes, return true. */
static bool
infer_nonnull_range (gimple stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
if (!flag_delete_null_pointer_checks
|| !POINTER_TYPE_P (TREE_TYPE (op))
|| gimple_code (stmt) == GIMPLE_ASM)
return false;
unsigned num_uses, num_loads, num_stores;
count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
if (num_loads + num_stores > 0)
return true;
if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
{
tree fntype = gimple_call_fntype (stmt);
tree attrs = TYPE_ATTRIBUTES (fntype);
for (; attrs; attrs = TREE_CHAIN (attrs))
{
attrs = lookup_attribute ("nonnull", attrs);
/* If "nonnull" wasn't specified, we know nothing about
the argument. */
if (attrs == NULL_TREE)
return false;
/* If "nonnull" applies to all the arguments, then ARG
is non-null. */
if (TREE_VALUE (attrs) == NULL_TREE)
return true;
/* Now see if op appears in the nonnull list. */
for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
{
int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
tree arg = gimple_call_arg (stmt, idx);
if (op == arg)
return true;
}
}
}
return false;
}
/* If the range of values taken by OP can be inferred after STMT executes,
return the comparison code (COMP_CODE_P) and value (VAL_P) that
describes the inferred range. Return true if a range could be