gengtype.c (write_field_root): Avoid out-of-scope access of newv.

* gengtype.c (write_field_root): Avoid out-of-scope access of newv.

	* tree-stdarg.c (execute_optimize_stdarg): Accept clobbers.

	* tree.h (TREE_CLOBBER_P): New macro.
	* gimple.h (gimple_clobber_p): New inline function.
	* gimplify.c (gimplify_bind_expr): Add clobbers for all variables
	that go out of scope and live in memory.
	* tree-ssa-operands.c (get_expr_operands): Transfer volatility also
	for constructors.
	* cfgexpand.c (decl_to_stack_part): New static variable.
	(add_stack_var): Allocate it, and remember mapping.
	(fini_vars_expansion): Deallocate it.
	(stack_var_conflict_p): Add early outs.
	(visit_op, visit_conflict, add_scope_conflicts_1,
	add_scope_conflicts): New static functions.
	(expand_used_vars_for_block): Don't call add_stack_var_conflict, tidy.
	(expand_used_vars): Add scope conflicts.
	(expand_gimple_stmt_1): Expand clobbers to nothing.
	(expand_debug_expr): Ditto.

	* tree-pretty-print.c (dump_generic_node): Dump clobbers nicely.
	* tree-ssa-live.c (remove_unused_locals): Remove clobbers that
	refer to otherwise unused locals.
	* tree-sra.c (build_accesses_from_assign): Ignore clobbers.
	* tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Clobbers of
	SSA names aren't necessary.
	(propagate_necessity): Accept and ignore constructors on the rhs,
	tidy.
	* gimple.c (walk_gimple_op): Accept constructors like mem_rhs.
	* tree-ssa-structalias.c (find_func_aliases): Clobbers don't store
	any known value.
	* tree-ssa-sccvn.c (vn_reference_lookup_3): Ditto, in particular they
	don't zero-initialize something.
	* tree-ssa-phiopt.c (cond_if_else_store_replacement_1): Ignore
	clobber RHS, we don't want PHI nodes with those.

testsuite/
	* gcc.dg/tree-ssa/20031015-1.c: Adjust.
	* g++.dg/tree-ssa/ehcleanup-1.C: Ditto.
	* g++.dg/eh/builtin1.C: Rewrite to not use local variables.
	* g++.dg/eh/builtin2.C: Ditto.
	* g++.dg/eh/builtin3.C: Ditto.

From-SVN: r181172
This commit is contained in:
Michael Matz 2011-11-08 16:47:16 +00:00 committed by Michael Matz
parent a58a38b32c
commit 47598145be
22 changed files with 406 additions and 65 deletions

View File

@ -1,3 +1,42 @@
2011-11-08 Michael Matz <matz@suse.de>
* gengtype.c (write_field_root): Avoid out-of-scope access of newv.
* tree-stdarg.c (execute_optimize_stdarg): Accept clobbers.
* tree.h (TREE_CLOBBER_P): New macro.
* gimple.h (gimple_clobber_p): New inline function.
* gimplify.c (gimplify_bind_expr): Add clobbers for all variables
that go out of scope and live in memory.
* tree-ssa-operands.c (get_expr_operands): Transfer volatility also
for constructors.
* cfgexpand.c (decl_to_stack_part): New static variable.
(add_stack_var): Allocate it, and remember mapping.
(fini_vars_expansion): Deallocate it.
(stack_var_conflict_p): Add early outs.
(visit_op, visit_conflict, add_scope_conflicts_1,
add_scope_conflicts): New static functions.
(expand_used_vars_for_block): Don't call add_stack_var_conflict, tidy.
(expand_used_vars): Add scope conflicts.
(expand_gimple_stmt_1): Expand clobbers to nothing.
(expand_debug_expr): Ditto.
* tree-pretty-print.c (dump_generic_node): Dump clobbers nicely.
* tree-ssa-live.c (remove_unused_locals): Remove clobbers that
refer to otherwise unused locals.
* tree-sra.c (build_accesses_from_assign): Ignore clobbers.
* tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Clobbers of
SSA names aren't necessary.
(propagate_necessity): Accept and ignore constructors on the rhs,
tidy.
* gimple.c (walk_gimple_op): Accept constructors like mem_rhs.
* tree-ssa-structalias.c (find_func_aliases): Clobbers don't store
any known value.
* tree-ssa-sccvn.c (vn_reference_lookup_3): Ditto, in particular they
don't zero-initialize something.
* tree-ssa-phiopt.c (cond_if_else_store_replacement_1): Ignore
clobber RHS, we don't want PHI nodes with those.
2011-11-08 Jakub Jelinek <jakub@redhat.com>
* config/i386/i386.c (ix86_preferred_simd_mode): Even for TARGET_AVX

View File

@ -135,7 +135,7 @@ set_rtl (tree t, rtx x)
/* If we don't yet have something recorded, just record it now. */
if (!DECL_RTL_SET_P (var))
SET_DECL_RTL (var, x);
/* If we have it set alrady to "multiple places" don't
/* If we have it set already to "multiple places" don't
change this. */
else if (DECL_RTL (var) == pc_rtx)
;
@ -184,6 +184,7 @@ struct stack_var
static struct stack_var *stack_vars;
static size_t stack_vars_alloc;
static size_t stack_vars_num;
static struct pointer_map_t *decl_to_stack_part;
/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
is non-decreasing. */
@ -262,7 +263,11 @@ add_stack_var (tree decl)
stack_vars
= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
}
if (!decl_to_stack_part)
decl_to_stack_part = pointer_map_create ();
v = &stack_vars[stack_vars_num];
* (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
v->decl = decl;
v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
@ -309,6 +314,14 @@ stack_var_conflict_p (size_t x, size_t y)
{
struct stack_var *a = &stack_vars[x];
struct stack_var *b = &stack_vars[y];
if (x == y)
return false;
/* Partitions containing an SSA name result from gimple registers
with things like unsupported modes. They are top-level and
hence conflict with everything else. */
if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
return true;
if (!a->conflicts || !b->conflicts)
return false;
return bitmap_bit_p (a->conflicts, y);
@ -379,6 +392,163 @@ add_alias_set_conflicts (void)
}
}
/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
enter its partition number into bitmap DATA. */
static bool
visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
if (op
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
if (v)
bitmap_set_bit (active, *v);
}
return false;
}
/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
record conflicts between it and all currently active other partitions
from bitmap DATA. */
static bool
visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
if (op
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
size_t *v =
(size_t *) pointer_map_contains (decl_to_stack_part, op);
if (v && bitmap_set_bit (active, *v))
{
size_t num = *v;
bitmap_iterator bi;
unsigned i;
gcc_assert (num < stack_vars_num);
EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
add_stack_var_conflict (num, i);
}
}
return false;
}
/* Helper routine for add_scope_conflicts, calculating the active partitions
at the end of BB, leaving the result in WORK. We're called to generate
conflicts when FOR_CONFLICT is true, otherwise we're just tracking
liveness. */
static void
add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
{
edge e;
edge_iterator ei;
gimple_stmt_iterator gsi;
bool (*visit)(gimple, tree, void *);
bitmap_clear (work);
FOR_EACH_EDGE (e, ei, bb->preds)
bitmap_ior_into (work, (bitmap)e->src->aux);
if (for_conflict)
{
/* We need to add conflicts for everything life at the start of
this block. Unlike classical lifeness for named objects we can't
rely on seeing a def/use of the names we're interested in.
There might merely be indirect loads/stores. We'd not add any
conflicts for such partitions. */
bitmap_iterator bi;
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
{
unsigned j;
bitmap_iterator bj;
EXECUTE_IF_SET_IN_BITMAP (work, i, j, bj)
add_stack_var_conflict (i, j);
}
visit = visit_conflict;
}
else
visit = visit_op;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt))
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (gimple_clobber_p (stmt))
{
tree lhs = gimple_assign_lhs (stmt);
size_t *v;
/* Nested function lowering might introduce LHSs
that are COMPONENT_REFs. */
if (TREE_CODE (lhs) != VAR_DECL)
continue;
if (DECL_RTL_IF_SET (lhs) == pc_rtx
&& (v = (size_t *)
pointer_map_contains (decl_to_stack_part, lhs)))
bitmap_clear_bit (work, *v);
}
else if (!is_gimple_debug (stmt))
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
}
}
/* Generate stack partition conflicts between all partitions that are
simultaneously live. */
static void
add_scope_conflicts (void)
{
basic_block bb;
bool changed;
bitmap work = BITMAP_ALLOC (NULL);
/* We approximate the life range of a stack variable by taking the first
mention of its name as starting point(s), and by the end-of-scope
death clobber added by gimplify as ending point(s) of the range.
This overapproximates in the case we for instance moved an address-taken
operation upward, without also moving a dereference to it upwards.
But it's conservatively correct as a variable never can hold values
before its name is mentioned at least once.
We then do a mostly classical bitmap lifeness algorithm. */
FOR_ALL_BB (bb)
bb->aux = BITMAP_ALLOC (NULL);
changed = true;
while (changed)
{
changed = false;
FOR_EACH_BB (bb)
{
bitmap active = (bitmap)bb->aux;
add_scope_conflicts_1 (bb, work, false);
if (bitmap_ior_into (active, work))
changed = true;
}
}
FOR_EACH_BB (bb)
add_scope_conflicts_1 (bb, work, true);
BITMAP_FREE (work);
FOR_ALL_BB (bb)
BITMAP_FREE (bb->aux);
}
/* A subroutine of partition_stack_vars. A comparison function for qsort,
sorting an array of indices by the properties of the object. */
@ -1095,11 +1265,8 @@ expand_one_var (tree var, bool toplevel, bool really_expand)
static void
expand_used_vars_for_block (tree block, bool toplevel)
{
size_t i, j, old_sv_num, this_sv_num, new_sv_num;
tree t;
old_sv_num = toplevel ? 0 : stack_vars_num;
/* Expand all variables at this level. */
for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
if (TREE_USED (t)
@ -1107,24 +1274,9 @@ expand_used_vars_for_block (tree block, bool toplevel)
|| !DECL_NONSHAREABLE (t)))
expand_one_var (t, toplevel, true);
this_sv_num = stack_vars_num;
/* Expand all variables at containing levels. */
for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
expand_used_vars_for_block (t, false);
/* Since we do not track exact variable lifetimes (which is not even
possible for variables whose address escapes), we mirror the block
tree in the interference graph. Here we cause all variables at this
level, and all sublevels, to conflict. */
if (old_sv_num < this_sv_num)
{
new_sv_num = stack_vars_num;
for (i = old_sv_num; i < new_sv_num; ++i)
for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
add_stack_var_conflict (i, j);
}
}
/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
@ -1312,6 +1464,8 @@ fini_vars_expansion (void)
XDELETEVEC (stack_vars_sorted);
stack_vars = NULL;
stack_vars_alloc = stack_vars_num = 0;
pointer_map_destroy (decl_to_stack_part);
decl_to_stack_part = NULL;
}
/* Make a fair guess for the size of the stack frame of the function
@ -1466,6 +1620,7 @@ expand_used_vars (void)
if (stack_vars_num > 0)
{
add_scope_conflicts ();
/* Due to the way alias sets work, no variables with non-conflicting
alias sets may be assigned the same address. Add conflicts to
reflect this. */
@ -2008,8 +2163,13 @@ expand_gimple_stmt_1 (gimple stmt)
== GIMPLE_SINGLE_RHS);
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
SET_EXPR_LOCATION (rhs, gimple_location (stmt));
expand_assignment (lhs, rhs,
gimple_assign_nontemporal_move_p (stmt));
if (TREE_CLOBBER_P (rhs))
/* This is a clobber to mark the going out of scope for
this LHS. */
;
else
expand_assignment (lhs, rhs,
gimple_assign_nontemporal_move_p (stmt));
}
else
{
@ -3199,7 +3359,9 @@ expand_debug_expr (tree exp)
/* Fall through. */
case CONSTRUCTOR:
if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
if (TREE_CLOBBER_P (exp))
return NULL;
else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
{
unsigned i;
tree val;

View File

@ -3651,14 +3651,13 @@ write_field_root (outf_p f, pair_p v, type_p type, const char *name,
int has_length, struct fileloc *line, const char *if_marked,
bool emit_pch, type_p field_type, const char *field_name)
{
struct pair newv;
/* If the field reference is relative to V, rather than to some
subcomponent of V, we can mark any subarrays with a single stride.
We're effectively treating the field as a global variable in its
own right. */
if (v && type == v->type)
{
struct pair newv;
newv = *v;
newv.type = field_type;
newv.name = ACONCAT ((v->name, ".", field_name, NULL));

View File

@ -1499,7 +1499,9 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
{
/* If the RHS has more than 1 operand, it is not appropriate
for the memory. */
wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
wi->val_only = !(is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
|| TREE_CODE (gimple_assign_rhs1 (stmt))
== CONSTRUCTOR)
|| !gimple_assign_single_p (stmt);
wi->is_lhs = true;
}

View File

@ -2053,6 +2053,14 @@ gimple_assign_cast_p (gimple s)
return false;
}
/* Return true if S is a clobber statement. */
static inline bool
gimple_clobber_p (gimple s)
{
return gimple_assign_single_p (s)
&& TREE_CLOBBER_P (gimple_assign_rhs1 (s));
}
/* Return true if GS is a GIMPLE_CALL. */

View File

@ -1135,7 +1135,8 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
gimple gimple_bind;
gimple_seq body;
gimple_seq body, cleanup;
gimple stack_save;
tree temp = voidify_wrapper_expr (bind_expr, NULL);
@ -1181,22 +1182,50 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
gimple_bind_set_body (gimple_bind, body);
cleanup = NULL;
stack_save = NULL;
if (gimplify_ctxp->save_stack)
{
gimple stack_save, stack_restore, gs;
gimple_seq cleanup, new_body;
gimple stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
block to achieve this. Note that mudflap depends on the
format of the emitted code: see mx_register_decls(). */
build_stack_save_restore (&stack_save, &stack_restore);
cleanup = new_body = NULL;
gimplify_seq_add_stmt (&cleanup, stack_restore);
}
/* Add clobbers for all variables that go out of scope. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
{
if (TREE_CODE (t) == VAR_DECL
&& !is_global_var (t)
&& DECL_CONTEXT (t) == current_function_decl
&& !DECL_HARD_REGISTER (t)
&& !TREE_THIS_VOLATILE (t)
&& !DECL_HAS_VALUE_EXPR_P (t)
/* Only care for variables that have to be in memory. Others
will be rewritten into SSA names, hence moved to the top-level. */
&& needs_to_live_in_memory (t))
{
tree clobber = build_constructor (TREE_TYPE (t), NULL);
TREE_THIS_VOLATILE (clobber) = 1;
gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
}
}
if (cleanup)
{
gimple gs;
gimple_seq new_body;
new_body = NULL;
gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
GIMPLE_TRY_FINALLY);
gimplify_seq_add_stmt (&new_body, stack_save);
if (stack_save)
gimplify_seq_add_stmt (&new_body, stack_save);
gimplify_seq_add_stmt (&new_body, gs);
gimple_bind_set_body (gimple_bind, new_body);
}

View File

@ -1,3 +1,11 @@
2011-11-08 Michael Matz <matz@suse.de>
* gcc.dg/tree-ssa/20031015-1.c: Adjust.
* g++.dg/tree-ssa/ehcleanup-1.C: Ditto.
* g++.dg/eh/builtin1.C: Rewrite to not use local variables.
* g++.dg/eh/builtin2.C: Ditto.
* g++.dg/eh/builtin3.C: Ditto.
2011-11-08 Jakub Jelinek <jakub@redhat.com>
* gcc.target/i386/avx-cvt-2.c: Adjust for 32-byte integer vectors

View File

@ -6,20 +6,26 @@
extern "C" int printf (const char *, ...);
struct A { A (); ~A (); int i; };
extern void callme (void) throw();
int
foo ()
foo (int i)
{
A a;
printf ("foo %d\n", a.i);
try {
printf ("foo %d\n", i);
} catch (...) {
callme();
}
}
int
bar ()
bar (int i)
{
A a;
__builtin_printf ("foo %d\n", a.i);
try {
__builtin_printf ("foo %d\n", i);
} catch (...) {
callme();
}
}
/* { dg-final { scan-tree-dump-times "resx" 2 "eh" } } */

View File

@ -5,20 +5,26 @@
extern "C" int printf (const char *, ...) throw();
struct A { A (); ~A (); int i; };
extern void callme (void) throw();
int
foo ()
foo (int i)
{
A a;
printf ("foo %d\n", a.i);
try {
printf ("foo %d\n", i);
} catch (...) {
callme();
}
}
int
bar ()
bar (int i)
{
A a;
__builtin_printf ("foo %d\n", a.i);
try {
__builtin_printf ("foo %d\n", i);
} catch (...) {
callme();
}
}
/* { dg-final { scan-tree-dump-times "resx" 0 "eh" } } */

View File

@ -3,13 +3,16 @@
// { dg-do compile }
// { dg-options "-fdump-tree-eh" }
struct A { A (); ~A (); int i; };
extern void callme (void) throw();
int
bar ()
bar (int i)
{
A a;
__builtin_printf ("foo %d\n", a.i);
try {
__builtin_printf ("foo %d\n", i);
} catch (...) {
callme();
}
}
/* { dg-final { scan-tree-dump-times "resx" 1 "eh" } } */

View File

@ -16,9 +16,9 @@ t (void)
can_throw ();
}
// We ought to remove implicit cleanup, since destructor is empty.
// { dg-final { scan-tree-dump-times "Empty EH handler" 1 "ehcleanup1" } }
// { dg-final { scan-tree-dump-times "Empty EH handler" 2 "ehcleanup1" } }
//
// And as a result also contained control flow.
// { dg-final { scan-tree-dump-times "Removing unreachable" 2 "ehcleanup1" } }
// { dg-final { scan-tree-dump-times "Removing unreachable" 4 "ehcleanup1" } }
//
// { dg-final { cleanup-tree-dump "ehcleanup1" } }

View File

@ -13,6 +13,6 @@ main(void)
return 0;
}
/* The VDEF comes from the initial assignment and the asm. */
/* { dg-final { scan-tree-dump-times "DEF" 2 "alias" } } */
/* The VDEF comes from the initial assignment, the asm, and the clobber. */
/* { dg-final { scan-tree-dump-times "DEF" 3 "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -1271,8 +1271,10 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
bool is_array_init = false;
double_int curidx = double_int_zero;
pp_character (buffer, '{');
if (TREE_CODE (TREE_TYPE (node)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (node)) == UNION_TYPE)
if (TREE_CLOBBER_P (node))
pp_string (buffer, "CLOBBER");
else if (TREE_CODE (TREE_TYPE (node)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (node)) == UNION_TYPE)
is_struct_init = true;
else if (TREE_CODE (TREE_TYPE (node)) == ARRAY_TYPE
&& TYPE_DOMAIN (TREE_TYPE (node))

View File

@ -1103,7 +1103,9 @@ build_accesses_from_assign (gimple stmt)
tree lhs, rhs;
struct access *lacc, *racc;
if (!gimple_assign_single_p (stmt))
if (!gimple_assign_single_p (stmt)
/* Scope clobbers don't influence scalarization. */
|| gimple_clobber_p (stmt))
return false;
lhs = gimple_assign_lhs (stmt);

View File

@ -351,6 +351,12 @@ mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
mark_stmt_necessary (stmt, true);
break;
case GIMPLE_ASSIGN:
if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
&& TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
return;
break;
default:
break;
}
@ -917,19 +923,17 @@ propagate_necessity (struct edge_list *el)
else if (gimple_assign_single_p (stmt))
{
tree rhs;
bool rhs_aliased = false;
/* If this is a load mark things necessary. */
rhs = gimple_assign_rhs1 (stmt);
if (TREE_CODE (rhs) != SSA_NAME
&& !is_gimple_min_invariant (rhs))
&& !is_gimple_min_invariant (rhs)
&& TREE_CODE (rhs) != CONSTRUCTOR)
{
if (!ref_may_be_aliased (rhs))
mark_aliased_reaching_defs_necessary (stmt, rhs);
else
rhs_aliased = true;
mark_all_reaching_defs_necessary (stmt);
}
if (rhs_aliased)
mark_all_reaching_defs_necessary (stmt);
}
else if (gimple_code (stmt) == GIMPLE_RETURN)
{
@ -937,7 +941,8 @@ propagate_necessity (struct edge_list *el)
/* A return statement may perform a load. */
if (rhs
&& TREE_CODE (rhs) != SSA_NAME
&& !is_gimple_min_invariant (rhs))
&& !is_gimple_min_invariant (rhs)
&& TREE_CODE (rhs) != CONSTRUCTOR)
{
if (!ref_may_be_aliased (rhs))
mark_aliased_reaching_defs_necessary (stmt, rhs);
@ -955,6 +960,7 @@ propagate_necessity (struct edge_list *el)
tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
if (TREE_CODE (op) != SSA_NAME
&& !is_gimple_min_invariant (op)
&& TREE_CODE (op) != CONSTRUCTOR
&& !ref_may_be_aliased (op))
mark_aliased_reaching_defs_necessary (stmt, op);
}

View File

@ -688,6 +688,7 @@ remove_unused_locals (void)
referenced_var_iterator rvi;
bitmap global_unused_vars = NULL;
unsigned srcidx, dstidx, num;
bool have_local_clobbers = false;
/* Removing declarations from lexical blocks when not optimizing is
not only a waste of time, it actually causes differences in stack
@ -720,6 +721,12 @@ remove_unused_locals (void)
if (is_gimple_debug (stmt))
continue;
if (gimple_clobber_p (stmt))
{
have_local_clobbers = true;
continue;
}
if (b)
TREE_USED (b) = true;
@ -753,6 +760,41 @@ remove_unused_locals (void)
TREE_USED (e->goto_block) = true;
}
/* We do a two-pass approach about the out-of-scope clobbers. We want
to remove them if they are the only references to a local variable,
but we want to retain them when there's any other. So the first pass
ignores them, and the second pass (if there were any) tries to remove
them. */
if (have_local_clobbers)
FOR_EACH_BB (bb)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
gimple stmt = gsi_stmt (gsi);
tree b = gimple_block (stmt);
if (gimple_clobber_p (stmt))
{
tree lhs = gimple_assign_lhs (stmt);
lhs = get_base_address (lhs);
if (TREE_CODE (lhs) == SSA_NAME)
lhs = SSA_NAME_VAR (lhs);
if (DECL_P (lhs) && (!var_ann (lhs) || !is_used_p (lhs)))
{
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
release_defs (stmt);
continue;
}
if (b)
TREE_USED (b) = true;
}
gsi_next (&gsi);
}
}
cfun->has_local_explicit_reg_vars = false;
/* Remove unmarked local vars from local_decls. */

View File

@ -956,6 +956,12 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
/* A volatile constructor is actually TREE_CLOBBER_P, transfer
the volatility to the statement, don't use TREE_CLOBBER_P for
mirroring the other uses of THIS_VOLATILE in this file. */
if (TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)

View File

@ -1318,8 +1318,10 @@ cond_if_else_store_replacement_1 (basic_block then_bb, basic_block else_bb,
if (then_assign == NULL
|| !gimple_assign_single_p (then_assign)
|| gimple_clobber_p (then_assign)
|| else_assign == NULL
|| !gimple_assign_single_p (else_assign))
|| !gimple_assign_single_p (else_assign)
|| gimple_clobber_p (else_assign))
return false;
lhs = gimple_assign_lhs (then_assign);

View File

@ -1388,8 +1388,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
if (maxsize == -1)
return (void *)-1;
/* We can't deduce anything useful from clobbers. */
if (gimple_clobber_p (def_stmt))
return (void *)-1;
/* def_stmt may-defs *ref. See if we can derive a value for *ref
from that defintion.
from that definition.
1) Memset. */
if (is_gimple_reg_type (vr->type)
&& gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)

View File

@ -4484,7 +4484,11 @@ find_func_aliases (gimple origt)
tree lhsop = gimple_assign_lhs (t);
tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
if (rhsop && TREE_CLOBBER_P (rhsop))
/* Ignore clobbers, they don't actually store anything into
the LHS. */
;
else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
do_structure_copy (lhsop, rhsop);
else
{

View File

@ -872,8 +872,11 @@ execute_optimize_stdarg (void)
if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
== GIMPLE_SINGLE_RHS)
{
/* Check for ap ={v} {}. */
if (TREE_CLOBBER_P (rhs))
continue;
/* Check for ap[0].field = temp. */
if (va_list_counter_struct_op (&si, lhs, rhs, true))
else if (va_list_counter_struct_op (&si, lhs, rhs, true))
continue;
/* Check for temp = ap[0].field. */

View File

@ -1643,6 +1643,14 @@ struct GTY(()) tree_vec {
#define CONSTRUCTOR_BITFIELD_P(NODE) \
(DECL_BIT_FIELD (FIELD_DECL_CHECK (NODE)) && DECL_MODE (NODE) != BLKmode)
/* True if NODE is a clobber right hand side, an expression of indeterminate
value that clobbers the LHS in a copy instruction. We use a volatile
empty CONSTRUCTOR for this, as it matches most of the necessary semantic.
In particular the volatile flag causes us to not prematurely remove
such clobber instructions. */
#define TREE_CLOBBER_P(NODE) \
(TREE_CODE (NODE) == CONSTRUCTOR && TREE_THIS_VOLATILE (NODE))
/* A single element of a CONSTRUCTOR. VALUE holds the actual value of the
element. INDEX can optionally design the position of VALUE: in arrays,
it is the index where VALUE has to be placed; in structures, it is the