2006-02-07 19:31:27 +01:00
|
|
|
/* SSA Jump Threading
|
2012-11-07 08:50:01 +01:00
|
|
|
Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
|
2011-01-03 21:52:22 +01:00
|
|
|
Free Software Foundation, Inc.
|
2006-02-07 19:31:27 +01:00
|
|
|
Contributed by Jeff Law <law@redhat.com>
|
|
|
|
|
|
|
|
This file is part of GCC.
|
|
|
|
|
|
|
|
GCC is free software; you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
2007-07-26 10:37:01 +02:00
|
|
|
the Free Software Foundation; either version 3, or (at your option)
|
2006-02-07 19:31:27 +01:00
|
|
|
any later version.
|
|
|
|
|
|
|
|
GCC is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
2007-07-26 10:37:01 +02:00
|
|
|
along with GCC; see the file COPYING3. If not see
|
|
|
|
<http://www.gnu.org/licenses/>. */
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
#include "config.h"
|
|
|
|
#include "system.h"
|
|
|
|
#include "coretypes.h"
|
|
|
|
#include "tm.h"
|
|
|
|
#include "tree.h"
|
|
|
|
#include "flags.h"
|
|
|
|
#include "tm_p.h"
|
|
|
|
#include "basic-block.h"
|
|
|
|
#include "cfgloop.h"
|
|
|
|
#include "function.h"
|
|
|
|
#include "timevar.h"
|
system.h (dump_file): Do not define.
gcc/
* system.h (dump_file): Do not define.
* tree-pass.h: Include dumpfile.h, which is a new file containing...
(enum tree_dump_index, TDF_*, get_dump_file_name, dump_enabled_p,
dump_initialized_p, dump_begin, dump_end, dump_node, dump_switch_p,
dump_flag_name, dump_file, dump_flags, dump_file_name,
get_dump_file_info, struct dump_file_info): all of this, moved to...
* dumpfile.h: Here, new file.
* tree-dump.h: Include dumpfile.h, but not tree-pass.h.
(dump_stmt): Remove prototype for C++ specific function.
(dump_enumerated_decls): Move prototype from here...
* tree-flow.h (dump_enumerated_decls): ... to here.
(get_ref_base_and_extent) Move prototype from here ...
* tree.h (get_ref_base_and_extent) ... to here.
* tree-ssa-live.c: Do not inclde tree-pretty-print.h, because
gimple-pretty-print.h is enough. Do not include tree-dump.h,
include timevar.h and dumpfile.h instead.
(struct numbered_tree_d, compare_decls_by_uid,
dump_enumerated_decls_push, dump_enumerated_decls): Move from here ...
* tree-dfa.c:(struct numbered_tree_d, compare_decls_by_uid,
dump_enumerated_decls_push, dump_enumerated_decls):... to here.
Do not include timevar.h.
* tree.c: Do not include timevar.h.
* tree-cfg.c: Do not include langhooks.h, tree-pretty-print.h, and
timevar.h.
(dump_cfg_stats): Use current_function_name.
(gimple_cfg2vcg): Likewise.
(dump_function_to_file): Likewise.
* df-scan.c: Do not include tree-pass.h and timevar.h.
Include dumpfile.h.
(df_entry_block_bitmap_verify, df_exit_block_bitmap_verify): Do not
use print_current_pass.
* df-problems.c: Include dumpfile.h.
Always define REG_DEAD_DEBUGGING, avoid #ifdef code, because it
leads to errors in the code not selected.
(df_note_compute): Do not print_rtl_with_bb here. Fix compilation
bug if REG_DEAD_DEBUGGING is not 0, get_insns is not available here.
* lcm.c: Include dumpfile.h.
Remove obsolete include of insn-attr.h.
* dojump.c (do_compare_rtx_and_jump): Remove failure printing for
missing probability notes.
* stmt.c: Include dumpfile.h.
(emit_case_decision_tree): Re-enable printing expand details only
if TDF_DETAILS.
* alias.c, auto-inc-dec.c, bb-reorder.c, caller-save.c, cfg.c,
cfgcleanup.c, cfgexpand.c, cfgloop.c, cfgloopmanip.c, cgraph.c,
cgraphclones.c, cgraphunit.c, combine.c, combine-stack-adj.c,
coverage.c, cprop.c, cse.c, cselib.c, dbgcnt.c, dce.c, df-core.c,
dse.c, dwarf2out.c, emit-rtl.c, except.c, expr.c, final.c,
function.c, fwprop.c, gcse.c, gimple-fold.c,
gimple-pretty-print.c, gimple-ssa-strength-reduction.c,
gimplify.c, graphite-blocking.c, graphite-clast-to-gimple.c,
graphite-dependences.c, graphite-interchange.c,
graphite-optimize-isl.c, graphite-poly.c,
graphite-sese-to-poly.c, haifa-sched.c, hw-doloop.c, ifcvt.c,
ipa.c, ipa-cp.c, ipa-inline-analysis.c, ipa-inline.c,
ipa-inline-transform.c, ipa-prop.c, ipa-pure-const.c,
ipa-reference.c, ipa-split.c, ipa-utils.c, ira.c, ira-emit.c,
jump.c, loop-doloop.c, loop-init.c, loop-invariant.c, loop-iv.c,
loop-unroll.c, loop-unswitch.c, lower-subreg.c,
lto-section-out.c, lto-streamer-in.c, matrix-reorg.c, mcf.c,
mode-switching.c, modulo-sched.c, omega.c, omp-low.c, passes.c,
plugin.c, postreload.c, postreload-gcse.c, predict.c, print-rtl.c,
print-tree.c, profile.c, recog.c, ree.c, regcprop.c, reginfo.c,
regmove.c, regrename.c, reg-stack.c, reload1.c, reorg.c,
sched-rgn.c, sched-vis.c, sel-sched.c, sel-sched-ir.c,
store-motion.c, tracer.c, trans-mem.c, tree-affine.c,
tree-call-cdce.c, tree-cfgcleanup.c, tree-chrec.c,
tree-data-ref.c, tree-diagnostic.c, tree-dump.c,
tree-eh.c, tree-flow-inline.h, tree-if-conv.c, tree-into-ssa.c,
tree-mudflap.c, tree-nrv.c, tree-object-size.c,
tree-optimize.c, tree-outof-ssa.c, tree-predcom.c,
tree-pretty-print.c, tree-profile.c, tree-scalar-evolution.c,
tree-sra.c, tree-ssa-address.c, tree-ssa-alias.c, tree-ssa.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-copy.c,
tree-ssa-copyrename.c,, tree-ssa-dce.c, tree-ssa-dom.c,
tree-ssa-dse.c, tree-ssa-forwprop.c, tree-ssa-ifcombine.c,
tree-ssa-loop.c, tree-ssa-loop-ch.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-ivopts.c,
tree-ssa-loop-manip.c, tree-ssa-loop-niter.c,
tree-ssa-loop-prefetch.c, tree-ssa-loop-unswitch.c,
tree-ssa-math-opts.c, tree-ssa-operands.c, tree-ssa-phiopt.c,
tree-ssa-phiprop.c, tree-ssa-pre.c, tree-ssa-propagate.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-sink.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c, tree-ssa-ter.c,
tree-ssa-threadedge.c, tree-ssa-threadupdate.c,
tree-ssa-uncprop.c, tree-ssa-uninit.c,
tree-switch-conversion.c, tree-tailcall.c,
tree-vect-data-refs.c, tree-vect-loop.c,
tree-vect-loop-manip.c, tree-vectorizer.c,
tree-vect-patterns.c, tree-vect-slp.c, tree-vect-stmts.c,
tree-vrp.c, value-prof.c, var-tracking.c, web.c: Include tree-pass.h
only if needed. If tree-pass.h is included, do not include timevar.h
and dumpfile.h. If tree-pass.h is not included but dump_file, or
dump_flags, or the TDF_* flags are used, include dumpfile.h.
If gimple-pretty-print.h is included, don't include tree-pretty-print.h.
Remove assorted unnecessary includes.
* config/mn10300/mn10300.c, config/c6x/c6x.c, config/ia64/ia64.c,
config/arm/arm.c, config/bfin/bfin.c, config/frv/frv.c,
config/spu/spu.c, config/mep/mep.c, config/i386/i386.c:
Include dumpfile.h.
* config/rl78/rl78.c: Include dumpfile.h instead of tree-pass.h.
* arm/t-arm, avr/t-avr, i386/t-i386, ia64/t-ia64, mep/t-mep,
spu/t-spu-elf: Fix dependencies.
c-family/
* c-gimplify.c: Include dumpfile.h instead of tree-dump.h.
* c-ada-spec.c: Likewise.
* c-dump.c (dump_stmt): Move to cp/dump.c, the only user.
c/
* c-decl.c: Include dumpfile.h instead of tree-dump.h.
* Make-lang.in: Fix dependencies.
cp/
* dump.c (dump_stmt): Moved here from c-dump.c.
* optimize.c: Include dumpfile.h instead of tree-dump.h.
* class.c: Likewise.
* decl2.c: Likewise.
* Make-lang.in: Fix dependencies.
fortran/
* f95-lang.c: Include dumpfile.h instead of tree-dump.h.
* Make-lang.in: Fix dependencies.
java/
* java-gimplify.c Include dumpfile.h instead of tree-dump.h
* Make-lang.in: Fix dependencies.
lto/
* lto.c: Do not include timevar.h.
* Make-lang.in: Fix dependencies.
ada/
* gcc-interface/utils.c: Include timevar.h.
* Make-lang.in: Fix dependencies.
From-SVN: r189519
2012-07-16 13:32:42 +02:00
|
|
|
#include "dumpfile.h"
|
2006-02-07 19:31:27 +01:00
|
|
|
#include "tree-flow.h"
|
|
|
|
#include "tree-ssa-propagate.h"
|
|
|
|
#include "langhooks.h"
|
|
|
|
#include "params.h"
|
|
|
|
|
|
|
|
/* To avoid code explosion due to jump threading, we limit the
|
|
|
|
number of statements we are going to copy. This variable
|
|
|
|
holds the number of statements currently seen that we'll have
|
|
|
|
to copy as part of the jump threading process. */
|
|
|
|
static int stmt_count;
|
|
|
|
|
2009-04-28 10:50:19 +02:00
|
|
|
/* Array to record value-handles per SSA_NAME. */
|
|
|
|
VEC(tree,heap) *ssa_name_values;
|
|
|
|
|
|
|
|
/* Set the value for the SSA name NAME to VALUE. */
|
|
|
|
|
|
|
|
void
|
|
|
|
set_ssa_name_value (tree name, tree value)
|
|
|
|
{
|
|
|
|
if (SSA_NAME_VERSION (name) >= VEC_length (tree, ssa_name_values))
|
|
|
|
VEC_safe_grow_cleared (tree, heap, ssa_name_values,
|
|
|
|
SSA_NAME_VERSION (name) + 1);
|
|
|
|
VEC_replace (tree, ssa_name_values, SSA_NAME_VERSION (name), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Initialize the per SSA_NAME value-handles array. Returns it. */
|
|
|
|
void
|
|
|
|
threadedge_initialize_values (void)
|
|
|
|
{
|
|
|
|
gcc_assert (ssa_name_values == NULL);
|
|
|
|
ssa_name_values = VEC_alloc(tree, heap, num_ssa_names);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Free the per SSA_NAME value-handle array. */
|
|
|
|
void
|
|
|
|
threadedge_finalize_values (void)
|
|
|
|
{
|
|
|
|
VEC_free(tree, heap, ssa_name_values);
|
|
|
|
}
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* Return TRUE if we may be able to thread an incoming edge into
|
|
|
|
BB to an outgoing edge from BB. Return FALSE otherwise. */
|
|
|
|
|
|
|
|
bool
|
|
|
|
potentially_threadable_block (basic_block bb)
|
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple_stmt_iterator gsi;
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* If BB has a single successor or a single predecessor, then
|
|
|
|
there is no threading opportunity. */
|
|
|
|
if (single_succ_p (bb) || single_pred_p (bb))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
/* If BB does not end with a conditional, switch or computed goto,
|
|
|
|
then there is no threading opportunity. */
|
2008-07-28 16:33:56 +02:00
|
|
|
gsi = gsi_last_bb (bb);
|
|
|
|
if (gsi_end_p (gsi)
|
|
|
|
|| ! gsi_stmt (gsi)
|
|
|
|
|| (gimple_code (gsi_stmt (gsi)) != GIMPLE_COND
|
|
|
|
&& gimple_code (gsi_stmt (gsi)) != GIMPLE_GOTO
|
|
|
|
&& gimple_code (gsi_stmt (gsi)) != GIMPLE_SWITCH))
|
2006-02-07 19:31:27 +01:00
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Return the LHS of any ASSERT_EXPR where OP appears as the first
|
|
|
|
argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates
|
|
|
|
BB. If no such ASSERT_EXPR is found, return OP. */
|
|
|
|
|
|
|
|
static tree
|
2008-07-28 16:33:56 +02:00
|
|
|
lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
imm_use_iterator imm_iter;
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple use_stmt;
|
2006-04-27 22:22:17 +02:00
|
|
|
use_operand_p use_p;
|
2006-02-07 19:31:27 +01:00
|
|
|
|
2006-04-27 22:22:17 +02:00
|
|
|
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
2006-04-27 22:22:17 +02:00
|
|
|
use_stmt = USE_STMT (use_p);
|
2006-02-07 19:31:27 +01:00
|
|
|
if (use_stmt != stmt
|
2008-07-28 16:33:56 +02:00
|
|
|
&& gimple_assign_single_p (use_stmt)
|
|
|
|
&& TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR
|
|
|
|
&& TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op
|
|
|
|
&& dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt)))
|
2006-04-27 22:22:17 +02:00
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
return gimple_assign_lhs (use_stmt);
|
2006-04-27 22:22:17 +02:00
|
|
|
}
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* We record temporary equivalences created by PHI nodes or
|
|
|
|
statements within the target block. Doing so allows us to
|
|
|
|
identify more jump threading opportunities, even in blocks
|
|
|
|
with side effects.
|
|
|
|
|
|
|
|
We keep track of those temporary equivalences in a stack
|
|
|
|
structure so that we can unwind them when we're done processing
|
|
|
|
a particular edge. This routine handles unwinding the data
|
|
|
|
structures. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
remove_temporary_equivalences (VEC(tree, heap) **stack)
|
|
|
|
{
|
|
|
|
while (VEC_length (tree, *stack) > 0)
|
|
|
|
{
|
|
|
|
tree prev_value, dest;
|
|
|
|
|
|
|
|
dest = VEC_pop (tree, *stack);
|
|
|
|
|
builtins.c, [...]: Fix comment typos.
* builtins.c, c-pragma.h, c-typeck.c, cgraph.c, cgraphunit.c,
combine.c, common.opt, config/dfp-bit.c, config/i386/i386.c,
config/m68k/m68k.c, config/m68k/m68k.md, config/mt/mt.c,
config/mt/mt.h, config/s390/s390.md, df-core.c, df-problems.c,
df-scan.c, df.h, diagnostic.c, expr.c, function.h, gimplify.c,
loop-invariant.c, omp-low.c, opts.c, passes.c,
rtl-factoring.c, rtlanal.c, struct-equiv.c, tree-cfgcleanup.c,
tree-ssa-loop-niter.c, tree-ssa-loop-prefetch.c,
tree-ssa-structalias.c, tree-ssa-threadedge.c,
tree-ssa-threadupdate.c, tree-vect-patterns.c,
tree-vect-transform.c, tree-vectorizer.h, tree-vrp.c,
unwind-dw2.c: Fix comment typos. Follow spelling conventions.
From-SVN: r111721
2006-03-05 00:05:24 +01:00
|
|
|
/* A NULL value indicates we should stop unwinding, otherwise
|
2006-02-07 19:31:27 +01:00
|
|
|
pop off the next entry as they're recorded in pairs. */
|
|
|
|
if (dest == NULL)
|
|
|
|
break;
|
|
|
|
|
|
|
|
prev_value = VEC_pop (tree, *stack);
|
2009-04-28 10:50:19 +02:00
|
|
|
set_ssa_name_value (dest, prev_value);
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Record a temporary equivalence, saving enough information so that
|
|
|
|
we can restore the state of recorded equivalences when we're
|
|
|
|
done processing the current edge. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
|
|
|
|
{
|
|
|
|
tree prev_x = SSA_NAME_VALUE (x);
|
|
|
|
|
|
|
|
if (TREE_CODE (y) == SSA_NAME)
|
|
|
|
{
|
|
|
|
tree tmp = SSA_NAME_VALUE (y);
|
|
|
|
y = tmp ? tmp : y;
|
|
|
|
}
|
|
|
|
|
2009-04-28 10:50:19 +02:00
|
|
|
set_ssa_name_value (x, y);
|
2006-02-07 19:31:27 +01:00
|
|
|
VEC_reserve (tree, heap, *stack, 2);
|
|
|
|
VEC_quick_push (tree, *stack, prev_x);
|
|
|
|
VEC_quick_push (tree, *stack, x);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Record temporary equivalences created by PHIs at the target of the
|
2009-11-25 11:55:54 +01:00
|
|
|
edge E. Record unwind information for the equivalences onto STACK.
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
If a PHI which prevents threading is encountered, then return FALSE
|
|
|
|
indicating we should not thread this edge, else return TRUE. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
|
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple_stmt_iterator gsi;
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* Each PHI creates a temporary equivalence, record them.
|
|
|
|
These are context sensitive equivalences and will be removed
|
|
|
|
later. */
|
2008-07-28 16:33:56 +02:00
|
|
|
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple phi = gsi_stmt (gsi);
|
2006-02-07 19:31:27 +01:00
|
|
|
tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
|
2008-07-28 16:33:56 +02:00
|
|
|
tree dst = gimple_phi_result (phi);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
2009-11-25 11:55:54 +01:00
|
|
|
/* If the desired argument is not the same as this PHI's result
|
2006-02-07 19:31:27 +01:00
|
|
|
and it is set by a PHI in E->dest, then we can not thread
|
|
|
|
through E->dest. */
|
|
|
|
if (src != dst
|
|
|
|
&& TREE_CODE (src) == SSA_NAME
|
2008-07-28 16:33:56 +02:00
|
|
|
&& gimple_code (SSA_NAME_DEF_STMT (src)) == GIMPLE_PHI
|
|
|
|
&& gimple_bb (SSA_NAME_DEF_STMT (src)) == e->dest)
|
2006-02-07 19:31:27 +01:00
|
|
|
return false;
|
|
|
|
|
|
|
|
/* We consider any non-virtual PHI as a statement since it
|
|
|
|
count result in a constant assignment or copy operation. */
|
2012-08-14 16:16:18 +02:00
|
|
|
if (!virtual_operand_p (dst))
|
2006-02-07 19:31:27 +01:00
|
|
|
stmt_count++;
|
|
|
|
|
|
|
|
record_temporary_equivalence (dst, src, stack);
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
/* Fold the RHS of an assignment statement and return it as a tree.
|
|
|
|
May return NULL_TREE if no simplification is possible. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
fold_assignment_stmt (gimple stmt)
|
|
|
|
{
|
|
|
|
enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
|
|
|
|
|
|
|
switch (get_gimple_rhs_class (subcode))
|
|
|
|
{
|
|
|
|
case GIMPLE_SINGLE_RHS:
|
2011-09-01 13:46:08 +02:00
|
|
|
return fold (gimple_assign_rhs1 (stmt));
|
2010-06-25 10:56:24 +02:00
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
case GIMPLE_UNARY_RHS:
|
|
|
|
{
|
|
|
|
tree lhs = gimple_assign_lhs (stmt);
|
|
|
|
tree op0 = gimple_assign_rhs1 (stmt);
|
|
|
|
return fold_unary (subcode, TREE_TYPE (lhs), op0);
|
|
|
|
}
|
2010-06-25 10:56:24 +02:00
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
case GIMPLE_BINARY_RHS:
|
|
|
|
{
|
|
|
|
tree lhs = gimple_assign_lhs (stmt);
|
|
|
|
tree op0 = gimple_assign_rhs1 (stmt);
|
|
|
|
tree op1 = gimple_assign_rhs2 (stmt);
|
|
|
|
return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
|
|
|
|
}
|
2010-06-25 10:56:24 +02:00
|
|
|
|
|
|
|
case GIMPLE_TERNARY_RHS:
|
|
|
|
{
|
|
|
|
tree lhs = gimple_assign_lhs (stmt);
|
|
|
|
tree op0 = gimple_assign_rhs1 (stmt);
|
|
|
|
tree op1 = gimple_assign_rhs2 (stmt);
|
|
|
|
tree op2 = gimple_assign_rhs3 (stmt);
|
2011-09-01 13:46:08 +02:00
|
|
|
|
|
|
|
/* Sadly, we have to handle conditional assignments specially
|
|
|
|
here, because fold expects all the operands of an expression
|
|
|
|
to be folded before the expression itself is folded, but we
|
|
|
|
can't just substitute the folded condition here. */
|
|
|
|
if (gimple_assign_rhs_code (stmt) == COND_EXPR)
|
|
|
|
op0 = fold (op0);
|
|
|
|
|
2010-06-25 10:56:24 +02:00
|
|
|
return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2);
|
|
|
|
}
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
default:
|
|
|
|
gcc_unreachable ();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* Try to simplify each statement in E->dest, ultimately leading to
|
|
|
|
a simplification of the COND_EXPR at the end of E->dest.
|
|
|
|
|
|
|
|
Record unwind information for temporary equivalences onto STACK.
|
|
|
|
|
|
|
|
Use SIMPLIFY (a pointer to a callback function) to further simplify
|
2009-11-25 11:55:54 +01:00
|
|
|
statements using pass specific information.
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
We might consider marking just those statements which ultimately
|
|
|
|
feed the COND_EXPR. It's not clear if the overhead of bookkeeping
|
|
|
|
would be recovered by trying to simplify fewer statements.
|
|
|
|
|
|
|
|
If we are able to simplify a statement into the form
|
|
|
|
SSA_NAME = (SSA_NAME | gimple invariant), then we can record
|
2008-07-28 16:33:56 +02:00
|
|
|
a context sensitive equivalence which may help us simplify
|
2006-02-07 19:31:27 +01:00
|
|
|
later statements in E->dest. */
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
static gimple
|
2006-02-07 19:31:27 +01:00
|
|
|
record_temporary_equivalences_from_stmts_at_dest (edge e,
|
|
|
|
VEC(tree, heap) **stack,
|
2008-07-28 16:33:56 +02:00
|
|
|
tree (*simplify) (gimple,
|
|
|
|
gimple))
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple stmt = NULL;
|
|
|
|
gimple_stmt_iterator gsi;
|
2006-02-07 19:31:27 +01:00
|
|
|
int max_stmt_count;
|
|
|
|
|
|
|
|
max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
|
|
|
|
|
|
|
|
/* Walk through each statement in the block recording equivalences
|
|
|
|
we discover. Note any equivalences we discover are context
|
|
|
|
sensitive (ie, are dependent on traversing E) and must be unwound
|
|
|
|
when we're finished processing E. */
|
2008-07-28 16:33:56 +02:00
|
|
|
for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
tree cached_lhs = NULL;
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
stmt = gsi_stmt (gsi);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* Ignore empty statements and labels. */
|
invoke.texi (-fvar-tracking-assignments): New.
gcc/ChangeLog:
* doc/invoke.texi (-fvar-tracking-assignments): New.
(-fvar-tracking-assignments-toggle): New.
(-fdump-final-insns=file): Mark filename as optional.
(--param min-nondebug-insn-uid): New.
(-gdwarf-@{version}): Mention version 4.
* opts.c (common_handle_option): Accept it.
* tree-vrp.c (find_assert_locations_1): Skip debug stmts.
* regrename.c (regrename_optimize): Drop last. Don't count debug
insns as uses. Don't reject change because of debug insn.
(do_replace): Reject DEBUG_INSN as chain starter. Take base_regno
from the chain starter, and check for inexact matches in
DEBUG_INSNS.
(scan_rtx_reg): Accept inexact matches in DEBUG_INSNs.
(build_def_use): Simplify and fix the marking of DEBUG_INSNs.
* sched-ebb.c (schedule_ebbs): Skip boundary debug insns.
* fwprop.c (forward_propagate_and_simplify): ...into debug insns.
* doc/gimple.texi (is_gimple_debug): New.
(gimple_debug_bind_p): New.
(is_gimple_call, gimple_assign_cast_p): End sentence with period.
* doc/install.texi (bootstrap-debug): More details.
(bootstrap-debug-big, bootstrap-debug-lean): Document.
(bootstrap-debug-lib): More details.
(bootstrap-debug-ckovw): Update.
(bootstrap-time): New.
* tree-into-ssa.c (mark_def_sites): Skip debug stmts.
(insert_phi_nodes_for): Insert debug stmts.
(rewrite_stmt): Take iterator. Insert debug stmts.
(rewrite_enter_block): Adjust.
(maybe_replace_use_in_debug_stmt): New.
(rewrite_update_stmt): Use it.
(mark_use_interesting): Return early for debug stmts.
* tree-ssa-loop-im.c (rewrite_bittest): Propagate DEFs into debug
stmts before replacing stmt.
(move_computations_stmt): Likewise.
* ira-conflicts.c (add_copies): Skip debug insns.
* regstat.c (regstat_init_n_sets_and_refs): Discount debug insns.
(regstat_bb_compute_ri): Skip debug insns.
* tree-ssa-threadupdate.c (redirection_block_p): Skip debug stmts.
* tree-ssa-loop-manip.c (find_uses_to_rename_stmt,
check_loop_closed_ssa_stmt): Skip debug stmts.
* tree-tailcall.c (find_tail_calls): Likewise.
* tree-ssa-loop-ch.c (should_duplicate_loop_header_p): Likewise.
* tree.h (MAY_HAVE_DEBUG_STMTS): New.
(build_var_debug_value_stat): Declare.
(build_var_debug_value): Define.
(target_for_debug_bind): Declare.
* reload.c (find_equiv_reg): Skip debug insns.
* rtlanal.c (reg_used_between_p): Skip debug insns.
(side_effects_p): Likewise.
(canonicalize_condition): Likewise.
* ddg.c (create_ddg_dep_from_intra_loop_link): Check that non-debug
insns never depend on debug insns.
(create_ddg_dep_no_link): Likewise.
(add_cross_iteration_register_deps): Use ANTI_DEP for debug insns.
Don't add inter-loop dependencies for debug insns.
(build_intra_loop_deps): Likewise.
(create_ddg): Count debug insns.
* ddg.h (struct ddg::num_debug): New.
(num_backargs): Pair up with previous int field.
* diagnostic.c (diagnostic_report_diagnostic): Skip notes on
-fcompare-debug-second.
* final.c (get_attr_length_1): Skip debug insns.
(rest_of_clean-state): Don't dump CFA_RESTORE_STATE.
* gcc.c (invoke_as): Call compare-debug-dump-opt.
(driver_self_specs): Map -fdump-final-insns to
-fdump-final-insns=..
(get_local_tick): New.
(compare_debug_dump_opt_spec_function): Test for . argument and
compute output name. Compute temp output spec without flag name.
Compute -frandom-seed.
(OPT): Undef after use.
* cfgloopanal.c (num_loop_insns): Skip debug insns.
(average_num_loop_insns): Likewise.
* params.h (MIN_NONDEBUG_INSN_UID): New.
* gimple.def (GIMPLE_DEBUG): New.
* ipa-reference.c (scan_stmt_for_static_refs): Skip debug stmts.
* auto-inc-dec.c (merge_in_block): Skip debug insns.
(merge_in_block): Fix whitespace.
* toplev.c (flag_var_tracking): Update comment.
(flag_var_tracking_assignments): New.
(flag_var_tracking_assignments_toggle): New.
(process_options): Don't open final insns dump file if we're not
going to write to it. Compute defaults for var_tracking.
* df-scan.c (df_insn_rescan_debug_internal): New.
(df_uses_record): Handle debug insns.
* haifa-sched.c (ready): Initialize n_debug.
(contributes_to_priority): Skip debug insns.
(dep_list_size): New.
(priority): Use it.
(rank_for_schedule): Likewise. Schedule debug insns as soon as
they're ready. Disregard previous debug insns to make decisions.
(queue_insn): Never queue debug insns.
(ready_add, ready_remove_first, ready_remove): Count debug insns.
(schedule_insn): Don't reject debug insns because of issue rate.
(get_ebb_head_tail, no_real_insns_p): Skip boundary debug insns.
(queue_to_ready): Skip and discount debug insns.
(choose_ready): Let debug insns through.
(schedule_block): Check boundary debug insns. Discount debug
insns, schedule them early. Adjust whitespace.
(set_priorities): Check for boundary debug insns.
(add_jump_dependencies): Use dep_list_size.
(prev_non_location_insn): New.
(check_cfg): Use it.
* tree-ssa-loop-ivopts.c (find-interesting_users): Skip debug
stmts.
(remove_unused_ivs): Reset debug stmts.
* modulo-sched.c (const_iteration_count): Skip debug insns.
(res_MII): Discount debug insns.
(loop_single_full_bb_p): Skip debug insns.
(sms_schedule): Likewise.
(sms_schedule_by_order): Likewise.
(ps_has_conflicts): Likewise.
* caller-save.c (refmarker_fn): New.
(save_call_clobbered_regs): Replace regs with saved mem in
debug insns.
(mark_referenced_regs): Take pointer, mark and arg. Adjust.
Call refmarker_fn mark for hardregnos.
(mark_reg_as_referenced): New.
(replace_reg_with_saved_mem): New.
* ipa-pure-const.c (check_stmt): Skip debug stmts.
* cse.c (cse_insn): Canonicalize debug insns. Skip them when
searching back.
(cse_extended_basic_block): Skip debug insns.
(count_reg_usage): Likewise.
(is_dead_reg): New, split out of...
(set_live_p): ... here.
(insn_live_p): Use it for debug insns.
* tree-stdarg.c (check_all_va_list_escapes): Skip debug stmts.
(execute_optimize_stdarg): Likewise.
* tree-ssa-dom.c (propagate_rhs_into_lhs): Likewise.
* tree-ssa-propagate.c (substitute_and_fold): Don't regard
changes in debug stmts as changes.
* sel-sched.c (moving_insn_creates_bookkeeping_block_p): New.
(moveup_expr): Don't move across debug insns. Don't move
debug insn if it would create a bookkeeping block.
(moveup_expr_cached): Don't use cache for debug insns that
are heads of blocks.
(compute_av_set_inside_bb): Skip debug insns.
(sel_rank_for_schedule): Schedule debug insns first. Remove
dead code.
(block_valid_for_bookkeeping_p); Support lax searches.
(create_block_for_bookkeeping): Adjust block numbers when
encountering debug-only blocks.
(find_place_for_bookkeeping): Deal with debug-only blocks.
(generate_bookkeeping_insn): Accept no place to insert.
(remove_temp_moveop_nops): New argument full_tidying.
(prepare_place_to_insert): Deal with debug insns.
(advance_state_on_fence): Debug insns don't start cycles.
(update_boundaries): Take fence as argument. Deal with
debug insns.
(schedule_expr_on_boundary): No full_tidying on debug insns.
(fill_insns): Deal with debug insns.
(track_scheduled_insns_and_blocks): Don't count debug insns.
(need_nop_to_preserve_insn_bb): New, split out of...
(remove_insn_from_stream): ... this.
(fur_orig_expr_not_found): Skip debug insns.
* rtl.def (VALUE): Move up.
(DEBUG_INSN): New.
* tree-ssa-sink.c (all_immediate_uses_same_place): Skip debug
stmts.
(nearest_common_dominator_of_uses): Take debug_stmts argument.
Set it if debug stmts are found.
(statement_sink_location): Skip debug stmts. Propagate
moving defs into debug stmts.
* ifcvt.c (first_active_insn): Skip debug insns.
(last_active_insns): Likewise.
(cond_exec_process_insns): Likewise.
(noce_process_if_block): Likewise.
(check_cond_move_block): Likewise.
(cond_move_convert_if_block): Likewise.
(block_jumps_and_fallthru_p): Likewise.
(dead_or_predicable): Likewise.
* dwarf2out.c (debug_str_hash_forced): New.
(find_AT_string): Add comment.
(gen_label_for_indirect_string): New.
(get_debug_string_label): New.
(AT_string_form): Use it.
(mem_loc_descriptor): Handle non-TLS symbols. Handle MINUS , DIV,
MOD, AND, IOR, XOR, NOT, ABS, NEG, and CONST_STRING. Accept but
discard COMPARE, IF_THEN_ELSE, ROTATE, ROTATERT, TRUNCATE and
several operations that cannot be represented with DWARF opcodes.
(loc_descriptor): Ignore SIGN_EXTEND and ZERO_EXTEND. Require
dwarf_version 4 for DW_OP_implicit_value and DW_OP_stack_value.
(dwarf2out_var_location): Take during-call mark into account.
(output_indirect_string): Update comment. Output if there are
label and references.
(prune_indirect_string): New.
(prune_unused_types): Call it if debug_str_hash_forced.
More in dwarf2out.c, from Jakub Jelinek <jakub@redhat.com>:
(dw_long_long_const): Remove.
(struct dw_val_struct): Change val_long_long type to rtx.
(print_die, attr_checksum, same_dw_val_p, loc_descriptor): Adjust for
val_long_long change to CONST_DOUBLE rtx from a long hi/lo pair.
(output_die): Likewise. Use HOST_BITS_PER_WIDE_INT size of each
component instead of HOST_BITS_PER_LONG.
(output_loc_operands): Likewise. For const8* assert
HOST_BITS_PER_WIDE_INT rather than HOST_BITS_PER_LONG is >= 64.
(output_loc_operands_raw): For const8* assert HOST_BITS_PER_WIDE_INT
rather than HOST_BITS_PER_LONG is >= 64.
(add_AT_long_long): Remove val_hi and val_lo arguments, add
val_const_double.
(size_of_die): Use HOST_BITS_PER_WIDE_INT size multiplier instead of
HOST_BITS_PER_LONG for dw_val_class_long_long.
(add_const_value_attribute): Adjust add_AT_long_long caller. Don't
handle TLS SYMBOL_REFs. If CONST wraps a constant, tail recurse.
(dwarf_stack_op_name): Handle DW_OP_implicit_value and
DW_OP_stack_value.
(size_of_loc_descr, output_loc_operands, output_loc_operands_raw):
Handle DW_OP_implicit_value.
(extract_int): Move prototype earlier.
(mem_loc_descriptor): For SUBREG punt if inner
mode size is wider than DWARF2_ADDR_SIZE. Handle SIGN_EXTEND
and ZERO_EXTEND by DW_OP_shl and DW_OP_shr{a,}. Handle
EQ, NE, GT, GE, LT, LE, GTU, GEU, LTU, LEU, SMIN, SMAX, UMIN,
UMAX, SIGN_EXTRACT, ZERO_EXTRACT.
(loc_descriptor): Compare mode size with DWARF2_ADDR_SIZE
instead of Pmode size.
(loc_descriptor): Add MODE argument. Handle CONST_INT, CONST_DOUBLE,
CONST_VECTOR, CONST, LABEL_REF and SYMBOL_REF if mode != VOIDmode,
attempt to handle other expressions. Don't handle TLS SYMBOL_REFs.
(concat_loc_descriptor, concatn_loc_descriptor,
loc_descriptor_from_tree_1): Adjust loc_descriptor callers.
(add_location_or_const_value_attribute): Likewise. For single
location loc_lists attempt to use add_const_value_attribute
for constant decls. Add DW_AT_const_value even if
NOTE_VAR_LOCATION is VAR_LOCATION with CONSTANT_P or CONST_STRING
in its expression.
* cfgbuild.c (inside_basic_block_p): Handle debug insns.
(control_flow_insn_p): Likewise.
* tree-parloops.c (eliminate_local_variables_stmt): Handle debug
stmt.
(separate_decls_in_region_debug_bind): New.
(separate_decls_in_region): Process debug bind stmts afterwards.
* recog.c (verify_changes): Handle debug insns.
(extract_insn): Likewise.
(peephole2_optimize): Skip debug insns.
* dse.c (scan_insn): Skip debug insns.
* sel-sched-ir.c (return_nop_to_pool): Take full_tidying argument.
Pass it on.
(setup_id_for_insn): Handle debug insns.
(maybe_tidy_empty_bb): Adjust whitespace.
(tidy_control_flow): Skip debug insns.
(sel_remove_insn): Adjust for debug insns.
(sel_estimate_number_of_insns): Skip debug insns.
(create_insn_rtx_from_pattern): Handle debug insns.
(create_copy_of_insn_rtx): Likewise.
* sel-sched-.h (sel_bb_end): Declare.
(sel_bb_empty_or_nop_p): New.
(get_all_loop_exits): Use it.
(_eligible_successor_edge_p): Likewise.
(return_nop_to_pool): Adjust.
* tree-eh.c (tre_empty_eh_handler_p): Skip debug stmts.
* ira-lives.c (process_bb_node_lives): Skip debug insns.
* gimple-pretty-print.c (dump_gimple_debug): New.
(dump_gimple_stmt): Use it.
(dump_bb_header): Skip gimple debug stmts.
* regmove.c (optimize_reg_copy_1): Discount debug insns.
(fixup_match_2): Likewise.
(regmove_backward_pass): Likewise. Simplify combined
replacement. Handle debug insns.
* function.c (instantiate_virtual_regs): Handle debug insns.
* function.h (struct emit_status): Add x_cur_debug_insn_uid.
* print-rtl.h: Include cselib.h.
(print_rtx): Print VALUEs. Split out and recurse for
VAR_LOCATIONs.
* df.h (df_inns_rescan_debug_internal): Declare.
* gcse.c (alloc_hash_table): Estimate n_insns.
(cprop_insn): Don't regard debug insns as changes.
(bypass_conditional_jumps): Skip debug insns.
(one_pre_gcse_pass): Adjust.
(one_code_hoisting_pass): Likewise.
(compute_ld_motion_mems): Skip debug insns.
(one_cprop_pass): Adjust.
* tree-if-conv.c (tree_if_convert_stmt): Reset debug stmts.
(if_convertible_stmt_p): Handle debug stmts.
* init-regs.c (initialize_uninitialized_regs): Skip debug insns.
* tree-vect-loop.c (vect_is_simple_reduction): Skip debug stmts.
* ira-build.c (create_bb_allocnos): Skip debug insns.
* tree-flow-inline.h (has_zero_uses): Discount debug stmts.
(has_single_use): Likewise.
(single_imm_use): Likewise.
(num_imm_uses): Likewise.
* tree-ssa-phiopt.c (empty_block_p): Skip debug stmts.
* tree-ssa-coalesce.c (build_ssa_conflict_graph): Skip debug stmts.
(create_outofssa_var_map): Likewise.
* lower-subreg.c (adjust_decomposed_uses): New.
(resolve_debug): New.
(decompose_multiword_subregs): Use it.
* tree-dfa.c (find_referenced_vars): Skip debug stmts.
* emit-rtl.c: Include params.h.
(cur_debug_insn_uid): Define.
(set_new_first_and_last_insn): Set cur_debug_insn_uid too.
(copy_rtx_if_shared_1): Handle debug insns.
(reset_used_flags): Likewise.
(set_used_flags): LIkewise.
(get_max_insn_count): New.
(next_nondebug_insn): New.
(prev_nondebug_insn): New.
(make_debug_insn_raw): New.
(emit_insn_before_noloc): Handle debug insns.
(emit_jump_insn_before_noloc): Likewise.
(emit_call_insn_before_noloc): Likewise.
(emit_debug_insn_before_noloc): New.
(emit_insn_after_noloc): Handle debug insns.
(emit_jump_insn_after_noloc): Likewise.
(emit_call_insn_after_noloc): Likewise.
(emit_debug_insn_after_noloc): Likewise.
(emit_insn_after): Take loc from earlier non-debug insn.
(emit_jump_insn_after): Likewise.
(emit_call_insn_after): Likewise.
(emit_debug_insn_after_setloc): New.
(emit_debug_insn_after): New.
(emit_insn_before): Take loc from later non-debug insn.
(emit_jump_insn_before): Likewise.
(emit_call_insn_before): Likewise.
(emit_debug_insn_before_setloc): New.
(emit_debug_insn_before): New.
(emit_insn): Handle debug insns.
(emit_debug_insn): New.
(emit_jump_insn): Handle debug insns.
(emit_call_insn): Likewise.
(emit): Likewise.
(init_emit): Take min-nondebug-insn-uid into account.
Initialize cur_debug_insn_uid.
(emit_copy_of_insn_after): Handle debug insns.
* cfgexpand.c (gimple_assign_rhs_to_tree): Do not overwrite
location of single rhs in place.
(maybe_dump_rtl_for_gimple_stmt): Dump lineno.
(floor_sdiv_adjust): New.
(cell_sdiv_adjust): New.
(cell_udiv_adjust): New.
(round_sdiv_adjust): New.
(round_udiv_adjust): New.
(wrap_constant): Moved from cselib.
(unwrap_constant): New.
(expand_debug_expr): New.
(expand_debug_locations): New.
(expand_gimple_basic_block): Drop hiding redeclaration. Expand
debug bind stmts.
(gimple_expand_cfg): Expand debug locations.
* cselib.c: Include tree-pass.h.
(struct expand_value_data): New.
(cselib_record_sets_hook): New.
(PRESERVED_VALUE_P, LONG_TERM_PRESERVED_VALUE_P): New.
(cselib_clear_table): Move, and implemnet in terms of...
(cselib_reset_table_with_next_value): ... this.
(cselib_get_next_unknown_value): New.
(discard_useless_locs): Don't discard preserved values.
(cselib_preserve_value): New.
(cselib_preserved_value_p): New.
(cselib_preserve_definitely): New.
(cselib_clear_preserve): New.
(cselib_preserve_only_values): New.
(new_cselib_val): Take rtx argument. Dump it in details.
(cselib_lookup_mem): Adjust.
(expand_loc): Take regs_active in struct. Adjust. Silence
dumps unless details are requested.
(cselib_expand_value_rtx_cb): New.
(cselib_expand_value_rtx): Rename and reimplment in terms of...
(cselib_expand_value_rtx_1): ... this. Adjust. Silence dumps
without details. Copy more subregs. Try to resolve values
using a callback. Wrap constants.
(cselib_subst_to_values): Adjust.
(cselib_log_lookup): New.
(cselib_lookup): Call it.
(cselib_invalidate_regno): Don't count preserved values as
useless.
(cselib_invalidate_mem): Likewise.
(cselib_record_set): Likewise.
(struct set): Renamed to cselib_set, moved to cselib.h.
(cselib_record_sets): Adjust. Call hook.
(cselib_process_insn): Reset table when it would be cleared.
(dump_cselib_val): New.
(dump_cselib_table): New.
* tree-cfgcleanup.c (tree_forwarded_block_p): Skip debug stmts.
(remove_forwarder_block): Support moving debug stmts.
* cselib.h (cselib_record_sets_hook): Declare.
(cselib_expand_callback): New type.
(cselib_expand_value_rtx_cb): Declare.
(cselib_reset_table_with_next_value): Declare.
(cselib_get_next_unknown_value): Declare.
(cselib_preserve_value): Declare.
(cselib_preserved_value_p): Declare.
(cselib_preserve_only_values): Declare.
(dump_cselib_table): Declare.
* cfgcleanup.c (flow_find_cross_jump): Skip debug insns.
(try_crossjump_to_edge): Likewise.
(delete_unreachable_blocks): Remove dominant GIMPLE blocks after
dominated blocks when debug stmts are present.
* simplify-rtx.c (delegitimize_mem_from_attrs): New.
* tree-ssa-live.c (remove_unused_locals): Skip debug stmts.
(set_var_live_on_entry): Likewise.
* loop-invariant.c (find_invariants_bb): Skip debug insns.
* cfglayout.c (curr_location, last_location): Make static.
(set_curr_insn_source_location): Don't avoid bouncing.
(get_curr_insn_source_location): New.
(get_curr_insn_block): New.
(duplicate_insn_chain): Handle debug insns.
* tree-ssa-forwprop.c (forward_propagate_addr_expr): Propagate
into debug stmts.
* common.opt (fcompare-debug): Move to sort order.
(fdump-unnumbered-links): Likewise.
(fvar-tracking-assignments): New.
(fvar-tracking-assignments-toggle): New.
* tree-ssa-dce.c (mark_stmt_necessary): Don't mark blocks
because of debug stmts.
(mark_stmt_if_obviously_necessary): Mark debug stmts.
(eliminate_unnecessary_stmts): Walk dominated blocks before
dominators.
* tree-ssa-ter.c (find_replaceable_in_bb): Skip debug stmts.
* ira.c (memref_used_between_p): Skip debug insns.
(update_equiv_regs): Likewise.
* sched-deps.c (sd_lists_size): Accept empty list.
(sd_init_insn): Mark debug insns.
(sd_finish_insn): Unmark them.
(sd_add_dep): Reject non-debug deps on debug insns.
(fixup_sched_groups): Give debug insns group treatment.
Skip debug insns.
(sched_analyze_reg): Don't mark debug insns for sched before call.
(sched_analyze_2): Handle debug insns.
(sched_analyze_insn): Compute next non-debug insn. Handle debug
insns.
(deps_analyze_insn): Handle debug insns.
(deps_start_bb): Skip debug insns.
(init_deps): Initialize last_debug_insn.
* tree-ssa.c (target_for_debug_bind): New.
(find_released_ssa_name): New.
(propagate_var_def_into_debug_stmts): New.
(propagate_defs_into_debug_stmts): New.
(verify_ssa): Skip debug bind stmts without values.
(warn_uninialized_vars): Skip debug stmts.
* target-def.h (TARGET_DELEGITIMIZE_ADDRESS): Set default.
* rtl.c (rtx_equal_p_cb): Handle VALUEs.
(rtx_equal_p): Likewise.
* ira-costs.c (scan_one_insn): Skip debug insns.
(process_bb_node_for_hard_reg_moves): Likewise.
* rtl.h (DEBUG_INSN_P): New.
(NONDEBUG_INSN_P): New.
(MAY_HAVE_DEBUG_INSNS): New.
(INSN_P): Accept debug insns.
(RTX_FRAME_RELATED_P): Likewise.
(INSN_DELETED_P): Likewise
(PAT_VAR_LOCATION_DECL): New.
(PAT_VAR_LOCATION_LOC): New.
(PAT_VAR_OCATION_STATUS): New.
(NOTE_VAR_LOCATION_DECL): Reimplement.
(NOTE_VAR_LOCATION_LOC): Likewise.
(NOTE_VAR_LOCATION_STATUS): Likewise.
(INSN_VAR_LOCATION): New.
(INSN_VAR_LOCATION_DECL): New.
(INSN_VAR_LOCATION_LOC): New.
(INSN_VAR_LOCATION_STATUS): New.
(gen_rtx_UNKNOWN_VAR_LOC): New.
(VAR_LOC_UNKNOWN_P): New.
(NOTE_DURING_CALL_P): New.
(SCHED_GROUP_P): Accept debug insns.
(emit_debug_insn_before): Declare.
(emit_debug_insn_before_noloc): Declare.
(emit_debug_insn_beore_setloc): Declare.
(emit_debug_insn_after): Declare.
(emit_debug_insn_after_noloc): Declare.
(emit_debug_insn_after_setloc): Declare.
(emit_debug_insn): Declare.
(make_debug_insn_raw): Declare.
(prev_nondebug_insn): Declare.
(next_nondebug_insn): Declare.
(delegitimize_mem_from_attrs): Declare.
(get_max_insn_count): Declare.
(wrap_constant): Declare.
(unwrap_constant): Declare.
(get_curr_insn_source_location): Declare.
(get_curr_insn_block): Declare.
* tree-inline.c (insert_debug_decl_map): New.
(processing_debug_stmt): New.
(remap_decl): Don't create new mappings in debug stmts.
(remap_gimple_op_r): Don't add references in debug stmts.
(copy_tree_body_r): Likewise.
(remap_gimple_stmt): Handle debug bind stmts.
(copy_bb): Skip debug stmts.
(copy_edges_for_bb): Likewise.
(copy_debug_stmt): New.
(copy_debug_stmts): New.
(copy_body): Copy debug stmts at the end.
(insert_init_debug_bind): New.
(insert_init_stmt): Take id. Skip and emit debug stmts.
(setup_one_parameter): Remap variable earlier, register debug
mapping.
(estimate_num_insns): Skip debug stmts.
(expand_call_inline): Preserve debug_map.
(optimize_inline_calls): Check for no debug_stmts left-overs.
(unsave_expr_now): Preserve debug_map.
(copy_gimple_seq_and_replace_locals): Likewise.
(tree_function_versioning): Check for no debug_stmts left-overs.
Init and destroy debug_map as needed. Split edges unconditionally.
(build_duplicate_type): Init and destroy debug_map as needed.
* tree-inline.h: Include gimple.h instead of pointer-set.h.
(struct copy_body_data): Add debug_stmts and debug_map.
* sched-int.h (struct ready_list): Add n_debug.
(struct deps): Add last_debug_insn.
(DEBUG_INSN_SCHED_P): New.
(BOUNDARY_DEBUG_INSN_P): New.
(SCHEDULE_DEBUG_INSN_P): New.
(sd_iterator_cond): Accept empty list.
* combine.c (create_log_links): Skip debug insns.
(combine_instructions): Likewise.
(cleanup_auto_inc_dec): New. From Jakub Jelinek: Make sure the
return value is always unshared.
(struct rtx_subst_pair): New.
(auto_adjust_pair): New.
(propagate_for_debug_subst): New.
(propagate_for_debug): New.
(try_combine): Skip debug insns. Propagate removed defs into
debug insns.
(next_nonnote_nondebug_insn): New.
(distribute_notes): Use it. Skip debug insns.
(distribute_links): Skip debug insns.
* tree-outof-ssa.c (set_location_for_edge): Likewise.
* resource.c (mark_target_live_regs): Likewise.
* var-tracking.c: Include cselib.h and target.h.
(enum micro_operation_type): Add MO_VAL_USE, MO_VAL_LOC, and
MO_VAL_SET.
(micro_operation_type_name): New.
(enum emit_note_where): Add EMIT_NOTE_AFTER_CALL_INSN.
(struct micro_operation_def): Update comments.
(decl_or_value): New type. Use instead of decls.
(struct emit_note_data_def): Add vars.
(struct attrs_def): Use decl_or_value.
(struct variable_tracking_info_def): Add permp, flooded.
(struct location_chain_def): Update comment.
(struct variable_part_def): Use decl_or_value.
(struct variable_def): Make var_part a variable length array.
(valvar_pool): New.
(scratch_regs): New.
(cselib_hook_called): New.
(dv_is_decl_p): New.
(dv_is_value_p): New.
(dv_as_decl): New.
(dv_as_value): New.
(dv_as_opaque): New.
(dv_onepart_p): New.
(dv_pool): New.
(IS_DECL_CODE): New.
(check_value_is_not_decl): New.
(dv_from_decl): New.
(dv_from_value): New.
(dv_htab_hash): New.
(variable_htab_hash): Use it.
(variable_htab_eq): Support values.
(variable_htab_free): Free from the right pool.
(attrs_list_member, attrs_list_insert): Use decl_or_value.
(attrs_list_union): Adjust.
(attrs_list_mpdv_union): New.
(tie_break_pointers): New.
(canon_value_cmp): New.
(unshare_variable): Return possibly-modified slot.
(vars_copy_1): Adjust.
(var_reg_decl_set): Adjust. Split out of...
(var_reg_set): ... this.
(get_init_value): Adjust.
(var_reg_delete_and_set): Adjust.
(var_reg_delete): Adjust.
(var_regno_delete): Adjust.
(var_mem_decl_set): Split out of...
(var_mem_set): ... this.
(var_mem_delete_and_set): Adjust.
(var_mem_delete): Adjust.
(val_store): New.
(val_reset): New.
(val_resolve): New.
(variable_union): Adjust. Speed up merge of 1-part vars.
(variable_canonicalize): Use unshared slot.
(VALUED_RECURSED_INTO): New.
(find_loc_in_1pdv): New.
(struct dfset_merge): New.
(insert_into_intersection): New.
(intersect_loc_chains): New.
(loc_cmp): New.
(canonicalize_loc_order_check): New.
(canonicalize_values_mark): New.
(canonicalize_values_star): New.
(variable_merge_over_cur): New.
(variable_merge_over_src): New.
(dataflow_set_merge): New.
(dataflow_set_equiv_regs): New.
(remove_duplicate_values): New.
(struct dfset_post_merge): New.
(variable_post_merge_new_vals): New.
(variable_post_merge_perm_vals): New.
(dataflow_post_merge_adjust): New.
(find_mem_expr_in_1pdv): New.
(dataflow_set_preserve_mem_locs): New.
(dataflow_set_remove_mem_locs): New.
(dataflow_set_clear_at_call): New.
(onepart_variable_different_p): New.
(variable_different_p): Use it.
(dataflow_set_different_1): Adjust. Make detailed dump
more verbose.
(track_expr_p): Add need_rtl parameter. Don't generate rtl
if not needed.
(track_loc_p): Pass it true.
(struct count_use_info): New.
(find_use_val): New.
(replace_expr_with_values): New.
(log_op_type): New.
(use_type): New, partially split out of...
(count_uses): ... this. Count new micro-ops.
(count_uses_1): Adjust.
(count_stores): Adjust.
(count_with_sets): New.
(VAL_NEEDS_RESOLUTION): New.
(VAL_HOLDS_TRACK_EXPR): New.
(VAL_EXPR_IS_COPIED): New.
(VAL_EXPR_IS_CLOBBERED): New.
(add_uses): Adjust. Generate new micro-ops.
(add_uses_1): Adjust.
(add_stores): Generate new micro-ops.
(add_with_sets): New.
(find_src_status): Adjust.
(find_src_set_src): Adjust.
(compute_bb_dataflow): Use dataflow_set_clear_at_call.
Handle new micro-ops. Canonicalize value equivalances.
(vt_find_locations): Compute total size of hash tables for
dumping. Perform merge for var-tracking-assignments. Don't
disregard single-block loops.
(dump_attrs_list): Handle decl_or_value.
(dump_variable): Take variable. Deal with decl_or_value.
(dump_variable_slot): New.
(dump_vars): Use it.
(dump_dataflow_sets): Adjust.
(set_slot_part): New, extended to support one-part variables
after splitting out of...
(set_variable_part): ... this.
(clobber_slot_part): New, split out of...
(clobber_variable_part): ... this.
(delete_slot_part): New, split out of...
(delete_variable_part): .... this.
(check_wrap_constant): New.
(vt_expand_loc_callback): New.
(vt_expand_loc): New.
(emit_note_insn_var_location): Adjust. Handle values. Handle
EMIT_NOTE_AFTER_CALL_INSN.
(emit_notes_for_differences_1): Adjust. Handle values.
(emit_notes_for_differences_2): Likewise.
(emit_notes_for_differences): Adjust.
(emit_notes_in_bb): Take pointer to set. Emit AFTER_CALL_INSN
notes. Adjust. Handle new micro-ops.
(vt_add_function_parameters): Adjust. Create and bind values.
(vt_initialize): Adjust. Initialize scratch_regs and
valvar_pool, flooded and perm.. Initialize and use cselib. Log
operations. Move some code to count_with_sets and add_with_sets.
(delete_debug_insns): New.
(vt_debug_insns_local): New.
(vt_finalize): Release permp, valvar_pool, scratch_regs. Finish
cselib.
(var_tracking_main): If var-tracking-assignments is enabled
but var-tracking isn't, delete debug insns and leave. Likewise
if we exceed limits or fail the stack adjustments tests, and
after all var-tracking processing.
More in var-tracking, from Jakub Jelinek <jakub@redhat.com>:
(dataflow_set): Add traversed_vars.
(value_chain, const_value_chain): New typedefs.
(value_chain_pool, value_chains): New variables.
(value_chain_htab_hash, value_chain_htab_eq, add_value_chain,
add_value_chains, add_cselib_value_chains, remove_value_chain,
remove_value_chains, remove_cselib_value_chains): New functions.
(shared_hash_find_slot_unshare_1, shared_hash_find_slot_1,
shared_hash_find_slot_noinsert_1, shared_hash_find_1): New
static inlines.
(shared_hash_find_slot_unshare, shared_hash_find_slot,
shared_hash_find_slot_noinsert, shared_hash_find): Update.
(dst_can_be_shared): New variable.
(unshare_variable): Unshare set->vars if shared, use shared_hash_*.
Clear dst_can_be_shared. If set->traversed_vars is non-NULL and
different from set->vars, look up slot again instead of using the
passed in slot.
(dataflow_set_init): Initialize traversed_vars.
(variable_union): Use shared_hash_*. Use initially NO_INSERT
lookup if set->vars is shared. Don't keep slot cleared before
calling unshare_variable. Unshare set->vars if needed. Adjust
unshare_variable callers. Clear dst_can_be_shared if needed.
Even ->refcount == 1 vars must be unshared if set->vars is shared
and var needs to be modified.
(dataflow_set_union): Set traversed_vars during canonicalization.
(VALUE_CHANGED, DECL_CHANGED): Define.
(set_dv_changed, dv_changed_p): New static inlines.
(track_expr_p): Clear DECL_CHANGED.
(dump_dataflow_sets): Set it.
(variable_was_changed): Call set_dv_changed.
(emit_note_insn_var_location): Likewise.
(changed_variables_stack): New variable.
(check_changed_vars_1, check_changed_vars_2): New functions.
(emit_notes_for_changes): Do nothing if changed_variables is
empty. Traverse changed_variables with check_changed_vars_1,
call check_changed_vars_2 on each changed_variables_stack entry.
(emit_notes_in_bb): Add SET argument. Just clear it at the
beginning, use it instead of local &set, don't destroy it at the
end.
(vt_emit_notes): Call dataflow_set_clear early on all
VTI(bb)->out sets, never use them, instead use emit_notes_in_bb
computed set, dataflow_set_clear also VTI(bb)->in when we are
done with the basic block. Initialize changed_variables_stack,
free it afterwards. If ENABLE_CHECKING verify that after noting
differences to an empty set value_chains hash table is empty.
(vt_initialize): Initialize value_chains and value_chain_pool.
(vt_finalize): Delete value_chains htab, free value_chain_pool.
(variable_tracking_main): Call dump_dataflow_sets before calling
vt_emit_notes, not after it.
* tree-flow.h (propagate_defs_into_debug_stmts): Declare.
(propagate_var_def_into_debug_stmts): Declare.
* df-problems.c (df_lr_bb_local_compute): Skip debug insns.
(df_set_note): Reject debug insns.
(df_whole_mw_reg_dead_p): Take added_notes_p argument. Don't
add notes to debug insns.
(df_note_bb_compute): Adjust. Likewise.
(df_simulate_uses): Skip debug insns.
(df_simulate_initialize_backwards): Likewise.
* reg-stack.c (subst_stack_regs_in_debug_insn): New.
(subst_stack_regs_pat): Reject debug insns.
(convert_regs_1): Handle debug insns.
* Makefile.in (TREE_INLINE_H): Take pointer-set.h from GIMPLE_H.
(print-rtl.o): Depend on cselib.h.
(cselib.o): Depend on TREE_PASS_H.
(var-tracking.o): Depend on cselib.h and TARGET_H.
* sched-rgn.c (rgn_estimate_number_of_insns): Discount
debug insns.
(init_ready_list): Skip boundary debug insns.
(add_branch_dependences): Skip debug insns.
(free_block_dependencies): Check for blocks with only debug
insns.
(compute_priorities): Likewise.
* gimple.c (gss_for_code): Handle GIMPLE_DEBUG.
(gimple_build_with_ops_stat): Take subcode as unsigned. Adjust
all callers.
(gimple_build_debug_bind_stat): New.
(empty_body_p): Skip debug stmts.
(gimple_has_side_effects): Likewise.
(gimple_rhs_has_side_effects): Likewise.
* gimple.h (enum gimple_debug_subcode, GIMPLE_DEBUG_BIND): New.
(gimple_build_debug_bind_stat): Declare.
(gimple_build_debug_bind): Define.
(is_gimple_debug): New.
(gimple_debug_bind_p): New.
(gimple_debug_bind_get_var): New.
(gimple_debug_bind_get_value): New.
(gimple_debug_bind_get_value_ptr): New.
(gimple_debug_bind_set_var): New.
(gimple_debug_bind_set_value): New.
(GIMPLE_DEBUG_BIND_NOVALUE): New internal temporary macro.
(gimple_debug_bind_reset_value): New.
(gimple_debug_bind_has_value_p): New.
(gsi_next_nondebug): New.
(gsi_prev_nondebug): New.
(gsi_start_nondebug_bb): New.
(gsi_last_nondebug_bb): New.
* sched-vis.c (print_pattern): Handle VAR_LOCATION.
(print_insn): Handle DEBUG_INSN.
* tree-cfg.c (remove_bb): Walk stmts backwards. Let loc
of first insn prevail.
(first_stmt): Skip debug stmts.
(first_non_label_stmt): Likewise.
(last_stmt): Likewise.
(has_zero_uses_1): New.
(single_imm_use_1): New.
(verify_gimple_debug): New.
(verify_types_in_gimple_stmt): Handle debug stmts.
(verify_stmt): Likewise.
(debug_loop_num): Skip debug stmts.
(remove_edge_and_dominated_blocks): Remove dominators last.
* tree-ssa-reasssoc.c (rewrite_expr_tree): Propagate into
debug stmts.
(linearize_expr): Likewise.
* config/i386/i386.c (ix86_delegitimize_address): Call
default implementation.
* config/ia64/ia64.c (ia64_safe_itanium_class): Handle debug
insns.
(group_barrier_needed): Skip debug insns.
(emit_insn_group_barriers): Likewise.
(emit_all_insn_group_barriers): Likewise.
(ia64_variable_issue): Handle debug insns.
(ia64_dfa_new_cycle): Likewise.
(final_emit_insn_group_barriers): Skip debug insns.
(ia64_dwarf2out_def_steady_cfa): Take frame argument. Don't
def cfa without frame.
(process_set): Likewise.
(process_for_unwind_directive): Pass frame on.
* config/rs6000/rs6000.c (TARGET_DELEGITIMIZE_ADDRESS): Define.
(rs6000_delegitimize_address): New.
(rs6000_debug_adjust_cost): Handle debug insns.
(is_microcoded_insn): Likewise.
(is_cracked_insn): Likewise.
(is_nonpipeline_insn): Likewise.
(insn_must_be_first_in_group): Likewise.
(insn_must_be_last_in_group): Likewise.
(force_new_group): Likewise.
* cfgrtl.c (rtl_split_block): Emit INSN_DELETED note if block
contains only debug insns.
(rtl_merge_blocks): Skip debug insns.
(purge_dead_edges): Likewise.
(rtl_block_ends_with_call_p): Skip debug insns.
* dce.c (deletable_insn_p): Handle VAR_LOCATION.
(mark_reg_dependencies): Skip debug insns.
* params.def (PARAM_MIN_NONDEBUG_INSN_UID): New.
* tree-ssanames.c (release_ssa_name): Propagate def into
debug stmts.
* tree-ssa-threadedge.c
(record_temporary_equivalences_from_stmts): Skip debug stmts.
* regcprop.c (replace_oldest_value_addr): Skip debug insns.
(replace_oldest_value_mem): Use ALL_REGS for debug insns.
(copyprop_hardreg_forward_1): Handle debug insns.
* reload1.c (reload): Skip debug insns. Replace unassigned
pseudos in debug insns with their equivalences.
(eliminate_regs_in_insn): Skip debug insns.
(emit_input_reload_insns): Skip debug insns at first, adjust
them later.
* tree-ssa-operands.c (add_virtual_operand): Reject debug stmts.
(get_indirect_ref_operands): Pass opf_no_vops on.
(get_expr_operands): Likewise. Skip debug stmts.
(parse_ssa_operands): Scan debug insns with opf_no_vops.
gcc/testsuite/ChangeLog:
* gcc.dg/guality/guality.c: New.
* gcc.dg/guality/guality.h: New.
* gcc.dg/guality/guality.exp: New.
* gcc.dg/guality/example.c: New.
* lib/gcc-dg.exp (cleanup-dump): Remove .gk files.
(cleanup-saved-temps): Likewise, .gkd files too.
gcc/cp/ChangeLog:
* cp-tree.h (TFF_NO_OMIT_DEFAULT_TEMPLATE_ARGUMENTS): New.
* cp-lang.c (cxx_dwarf_name): Pass it.
* error.c (count_non_default_template_args): Take flags as
argument. Adjust all callers. Skip counting of default
arguments if the new flag is given.
ChangeLog:
* Makefile.tpl (BUILD_CONFIG): Default to bootstrap-debug.
* Makefile.in: Rebuilt.
contrib/ChangeLog:
* compare-debug: Look for .gkd files and compare them.
config/ChangeLog:
* bootstrap-debug.mk: Add comments.
* bootstrap-debug-big.mk: New.
* bootstrap-debug-lean.mk: New.
* bootstrap-debug-ckovw.mk: Add comments.
* bootstrap-debug-lib.mk: Drop CFLAGS for stages. Use -g0
for TFLAGS in stage1. Drop -fvar-tracking-assignments-toggle.
From-SVN: r151312
2009-09-02 04:42:21 +02:00
|
|
|
if (gimple_code (stmt) == GIMPLE_NOP
|
|
|
|
|| gimple_code (stmt) == GIMPLE_LABEL
|
|
|
|
|| is_gimple_debug (stmt))
|
2006-02-07 19:31:27 +01:00
|
|
|
continue;
|
|
|
|
|
|
|
|
/* If the statement has volatile operands, then we assume we
|
|
|
|
can not thread through this block. This is overly
|
|
|
|
conservative in some ways. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt))
|
2006-02-07 19:31:27 +01:00
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* If duplicating this block is going to cause too much code
|
|
|
|
expansion, then do not thread through this block. */
|
|
|
|
stmt_count++;
|
|
|
|
if (stmt_count > max_stmt_count)
|
|
|
|
return NULL;
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
/* If this is not a statement that sets an SSA_NAME to a new
|
2006-02-07 19:31:27 +01:00
|
|
|
value, then do not try to simplify this statement as it will
|
|
|
|
not simplify in any way that is helpful for jump threading. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if ((gimple_code (stmt) != GIMPLE_ASSIGN
|
|
|
|
|| TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
|
|
|
|
&& (gimple_code (stmt) != GIMPLE_CALL
|
|
|
|
|| gimple_call_lhs (stmt) == NULL_TREE
|
|
|
|
|| TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME))
|
2006-02-07 19:31:27 +01:00
|
|
|
continue;
|
|
|
|
|
2008-04-30 19:21:55 +02:00
|
|
|
/* The result of __builtin_object_size depends on all the arguments
|
|
|
|
of a phi node. Temporarily using only one edge produces invalid
|
|
|
|
results. For example
|
|
|
|
|
|
|
|
if (x < 6)
|
|
|
|
goto l;
|
|
|
|
else
|
|
|
|
goto l;
|
|
|
|
|
|
|
|
l:
|
|
|
|
r = PHI <&w[2].a[1](2), &a.a[6](3)>
|
|
|
|
__builtin_object_size (r, 0)
|
|
|
|
|
|
|
|
The result of __builtin_object_size is defined to be the maximum of
|
|
|
|
remaining bytes. If we use only one edge on the phi, the result will
|
2009-01-16 16:01:24 +01:00
|
|
|
change to be the remaining bytes for the corresponding phi argument.
|
|
|
|
|
|
|
|
Similarly for __builtin_constant_p:
|
|
|
|
|
|
|
|
r = PHI <1(2), 2(3)>
|
|
|
|
__builtin_constant_p (r)
|
|
|
|
|
|
|
|
Both PHI arguments are constant, but x ? 1 : 2 is still not
|
|
|
|
constant. */
|
2008-04-30 19:21:55 +02:00
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
if (is_gimple_call (stmt))
|
2008-04-30 19:21:55 +02:00
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
tree fndecl = gimple_call_fndecl (stmt);
|
2009-01-16 16:01:24 +01:00
|
|
|
if (fndecl
|
|
|
|
&& (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE
|
|
|
|
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P))
|
2008-04-30 19:21:55 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* At this point we have a statement which assigns an RHS to an
|
|
|
|
SSA_VAR on the LHS. We want to try and simplify this statement
|
|
|
|
to expose more context sensitive equivalences which in turn may
|
2009-11-25 11:55:54 +01:00
|
|
|
allow us to simplify the condition at the end of the loop.
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
Handle simple copy operations as well as implied copies from
|
|
|
|
ASSERT_EXPRs. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (gimple_assign_single_p (stmt)
|
|
|
|
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
|
|
|
|
cached_lhs = gimple_assign_rhs1 (stmt);
|
|
|
|
else if (gimple_assign_single_p (stmt)
|
|
|
|
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
|
|
|
|
cached_lhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
|
2006-02-07 19:31:27 +01:00
|
|
|
else
|
|
|
|
{
|
|
|
|
/* A statement that is not a trivial copy or ASSERT_EXPR.
|
|
|
|
We're going to temporarily copy propagate the operands
|
|
|
|
and see if that allows us to simplify this statement. */
|
2008-07-28 16:33:56 +02:00
|
|
|
tree *copy;
|
2006-02-07 19:31:27 +01:00
|
|
|
ssa_op_iter iter;
|
|
|
|
use_operand_p use_p;
|
|
|
|
unsigned int num, i = 0;
|
|
|
|
|
|
|
|
num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
|
|
|
|
copy = XCNEWVEC (tree, num);
|
|
|
|
|
|
|
|
/* Make a copy of the uses & vuses into USES_COPY, then cprop into
|
|
|
|
the operands. */
|
|
|
|
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
|
|
|
|
{
|
|
|
|
tree tmp = NULL;
|
|
|
|
tree use = USE_FROM_PTR (use_p);
|
|
|
|
|
|
|
|
copy[i++] = use;
|
|
|
|
if (TREE_CODE (use) == SSA_NAME)
|
|
|
|
tmp = SSA_NAME_VALUE (use);
|
2008-07-08 18:11:06 +02:00
|
|
|
if (tmp)
|
2006-02-07 19:31:27 +01:00
|
|
|
SET_USE (use_p, tmp);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Try to fold/lookup the new expression. Inserting the
|
2008-07-28 16:33:56 +02:00
|
|
|
expression into the hash table is unlikely to help. */
|
|
|
|
if (is_gimple_call (stmt))
|
|
|
|
cached_lhs = fold_call_stmt (stmt, false);
|
2006-02-07 19:31:27 +01:00
|
|
|
else
|
2008-07-28 16:33:56 +02:00
|
|
|
cached_lhs = fold_assignment_stmt (stmt);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
if (!cached_lhs
|
|
|
|
|| (TREE_CODE (cached_lhs) != SSA_NAME
|
|
|
|
&& !is_gimple_min_invariant (cached_lhs)))
|
|
|
|
cached_lhs = (*simplify) (stmt, stmt);
|
2009-11-25 11:55:54 +01:00
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* Restore the statement's original uses/defs. */
|
|
|
|
i = 0;
|
|
|
|
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
|
|
|
|
SET_USE (use_p, copy[i++]);
|
|
|
|
|
|
|
|
free (copy);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Record the context sensitive equivalence if we were able
|
|
|
|
to simplify this statement. */
|
|
|
|
if (cached_lhs
|
|
|
|
&& (TREE_CODE (cached_lhs) == SSA_NAME
|
|
|
|
|| is_gimple_min_invariant (cached_lhs)))
|
2008-07-28 16:33:56 +02:00
|
|
|
record_temporary_equivalence (gimple_get_lhs (stmt), cached_lhs, stack);
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
return stmt;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Simplify the control statement at the end of the block E->dest.
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
To avoid allocating memory unnecessarily, a scratch GIMPLE_COND
|
2006-02-07 19:31:27 +01:00
|
|
|
is available to use/clobber in DUMMY_COND.
|
|
|
|
|
|
|
|
Use SIMPLIFY (a pointer to a callback function) to further simplify
|
|
|
|
a condition using pass specific information.
|
|
|
|
|
|
|
|
Return the simplified condition or NULL if simplification could
|
|
|
|
not be performed. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
simplify_control_stmt_condition (edge e,
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple stmt,
|
|
|
|
gimple dummy_cond,
|
|
|
|
tree (*simplify) (gimple, gimple),
|
2006-02-07 19:31:27 +01:00
|
|
|
bool handle_dominating_asserts)
|
|
|
|
{
|
|
|
|
tree cond, cached_lhs;
|
2008-07-28 16:33:56 +02:00
|
|
|
enum gimple_code code = gimple_code (stmt);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* For comparisons, we have to update both operands, then try
|
|
|
|
to simplify the comparison. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (code == GIMPLE_COND)
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
tree op0, op1;
|
|
|
|
enum tree_code cond_code;
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
op0 = gimple_cond_lhs (stmt);
|
|
|
|
op1 = gimple_cond_rhs (stmt);
|
|
|
|
cond_code = gimple_cond_code (stmt);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* Get the current value of both operands. */
|
|
|
|
if (TREE_CODE (op0) == SSA_NAME)
|
|
|
|
{
|
|
|
|
tree tmp = SSA_NAME_VALUE (op0);
|
2008-07-08 18:11:06 +02:00
|
|
|
if (tmp)
|
2006-02-07 19:31:27 +01:00
|
|
|
op0 = tmp;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (TREE_CODE (op1) == SSA_NAME)
|
|
|
|
{
|
|
|
|
tree tmp = SSA_NAME_VALUE (op1);
|
2008-07-08 18:11:06 +02:00
|
|
|
if (tmp)
|
2006-02-07 19:31:27 +01:00
|
|
|
op1 = tmp;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (handle_dominating_asserts)
|
|
|
|
{
|
|
|
|
/* Now see if the operand was consumed by an ASSERT_EXPR
|
|
|
|
which dominates E->src. If so, we want to replace the
|
|
|
|
operand with the LHS of the ASSERT_EXPR. */
|
|
|
|
if (TREE_CODE (op0) == SSA_NAME)
|
|
|
|
op0 = lhs_of_dominating_assert (op0, e->src, stmt);
|
|
|
|
|
|
|
|
if (TREE_CODE (op1) == SSA_NAME)
|
|
|
|
op1 = lhs_of_dominating_assert (op1, e->src, stmt);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* We may need to canonicalize the comparison. For
|
|
|
|
example, op0 might be a constant while op1 is an
|
|
|
|
SSA_NAME. Failure to canonicalize will cause us to
|
|
|
|
miss threading opportunities. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (tree_swap_operands_p (op0, op1, false))
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
tree tmp;
|
2008-07-28 16:33:56 +02:00
|
|
|
cond_code = swap_tree_comparison (cond_code);
|
2006-02-07 19:31:27 +01:00
|
|
|
tmp = op0;
|
|
|
|
op0 = op1;
|
|
|
|
op1 = tmp;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Stuff the operator and operands into our dummy conditional
|
|
|
|
expression. */
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple_cond_set_code (dummy_cond, cond_code);
|
|
|
|
gimple_cond_set_lhs (dummy_cond, op0);
|
|
|
|
gimple_cond_set_rhs (dummy_cond, op1);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
/* We absolutely do not care about any type conversions
|
|
|
|
we only care about a zero/nonzero value. */
|
2007-02-13 23:34:45 +01:00
|
|
|
fold_defer_overflow_warnings ();
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1);
|
|
|
|
if (cached_lhs)
|
2008-08-18 18:23:47 +02:00
|
|
|
while (CONVERT_EXPR_P (cached_lhs))
|
2008-07-28 16:33:56 +02:00
|
|
|
cached_lhs = TREE_OPERAND (cached_lhs, 0);
|
2007-02-13 23:34:45 +01:00
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
fold_undefer_overflow_warnings ((cached_lhs
|
|
|
|
&& is_gimple_min_invariant (cached_lhs)),
|
2007-02-13 23:34:45 +01:00
|
|
|
stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* If we have not simplified the condition down to an invariant,
|
|
|
|
then use the pass specific callback to simplify the condition. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (!cached_lhs
|
|
|
|
|| !is_gimple_min_invariant (cached_lhs))
|
|
|
|
cached_lhs = (*simplify) (dummy_cond, stmt);
|
|
|
|
|
|
|
|
return cached_lhs;
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
if (code == GIMPLE_SWITCH)
|
|
|
|
cond = gimple_switch_index (stmt);
|
|
|
|
else if (code == GIMPLE_GOTO)
|
|
|
|
cond = gimple_goto_dest (stmt);
|
|
|
|
else
|
|
|
|
gcc_unreachable ();
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* We can have conditionals which just test the state of a variable
|
|
|
|
rather than use a relational operator. These are simpler to handle. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (TREE_CODE (cond) == SSA_NAME)
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
cached_lhs = cond;
|
|
|
|
|
2008-07-28 16:33:56 +02:00
|
|
|
/* Get the variable's current value from the equivalence chains.
|
2006-02-10 20:22:58 +01:00
|
|
|
|
|
|
|
It is possible to get loops in the SSA_NAME_VALUE chains
|
|
|
|
(consider threading the backedge of a loop where we have
|
|
|
|
a loop invariant SSA_NAME used in the condition. */
|
|
|
|
if (cached_lhs
|
|
|
|
&& TREE_CODE (cached_lhs) == SSA_NAME
|
|
|
|
&& SSA_NAME_VALUE (cached_lhs))
|
2006-02-07 19:31:27 +01:00
|
|
|
cached_lhs = SSA_NAME_VALUE (cached_lhs);
|
|
|
|
|
|
|
|
/* If we're dominated by a suitable ASSERT_EXPR, then
|
|
|
|
update CACHED_LHS appropriately. */
|
|
|
|
if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME)
|
|
|
|
cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt);
|
|
|
|
|
|
|
|
/* If we haven't simplified to an invariant yet, then use the
|
|
|
|
pass specific callback to try and simplify it further. */
|
|
|
|
if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
|
2007-03-08 18:36:05 +01:00
|
|
|
cached_lhs = (*simplify) (stmt, stmt);
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
else
|
|
|
|
cached_lhs = NULL;
|
|
|
|
|
|
|
|
return cached_lhs;
|
|
|
|
}
|
|
|
|
|
2012-10-23 22:33:49 +02:00
|
|
|
/* Return TRUE if the statement at the end of e->dest depends on
|
|
|
|
the output of any statement in BB. Otherwise return FALSE.
|
|
|
|
|
|
|
|
This is used when we are threading a backedge and need to ensure
|
|
|
|
that temporary equivalences from BB do not affect the condition
|
|
|
|
in e->dest. */
|
|
|
|
|
|
|
|
static bool
|
2012-10-23 23:27:52 +02:00
|
|
|
cond_arg_set_in_bb (edge e, basic_block bb)
|
2012-10-23 22:33:49 +02:00
|
|
|
{
|
|
|
|
ssa_op_iter iter;
|
|
|
|
use_operand_p use_p;
|
2012-10-26 19:33:11 +02:00
|
|
|
gimple last = last_stmt (e->dest);
|
2012-10-23 22:33:49 +02:00
|
|
|
|
|
|
|
/* E->dest does not have to end with a control transferring
|
|
|
|
instruction. This can occurr when we try to extend a jump
|
|
|
|
threading opportunity deeper into the CFG. In that case
|
|
|
|
it is safe for this check to return false. */
|
|
|
|
if (!last)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (gimple_code (last) != GIMPLE_COND
|
|
|
|
&& gimple_code (last) != GIMPLE_GOTO
|
|
|
|
&& gimple_code (last) != GIMPLE_SWITCH)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
|
|
|
|
{
|
|
|
|
tree use = USE_FROM_PTR (use_p);
|
|
|
|
|
|
|
|
if (TREE_CODE (use) == SSA_NAME
|
|
|
|
&& gimple_code (SSA_NAME_DEF_STMT (use)) != GIMPLE_PHI
|
|
|
|
&& gimple_bb (SSA_NAME_DEF_STMT (use)) == bb)
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-11-06 18:58:52 +01:00
|
|
|
DEF_VEC_O(tree);
|
|
|
|
DEF_VEC_ALLOC_O_STACK(tree);
|
|
|
|
#define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
|
|
|
|
|
2012-11-04 19:44:13 +01:00
|
|
|
/* Copy debug stmts from DEST's chain of single predecessors up to
|
|
|
|
SRC, so that we don't lose the bindings as PHI nodes are introduced
|
|
|
|
when DEST gains new predecessors. */
|
2012-11-07 08:50:01 +01:00
|
|
|
void
|
2012-11-04 19:44:13 +01:00
|
|
|
propagate_threaded_block_debug_into (basic_block dest, basic_block src)
|
|
|
|
{
|
|
|
|
if (!MAY_HAVE_DEBUG_STMTS)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (!single_pred_p (dest))
|
|
|
|
return;
|
|
|
|
|
|
|
|
gcc_checking_assert (dest != src);
|
|
|
|
|
|
|
|
gimple_stmt_iterator gsi = gsi_after_labels (dest);
|
2012-11-06 18:58:52 +01:00
|
|
|
int i = 0;
|
|
|
|
const int alloc_count = 16; // ?? Should this be a PARAM?
|
2012-11-04 19:44:13 +01:00
|
|
|
|
2012-11-06 18:58:52 +01:00
|
|
|
/* Estimate the number of debug vars overridden in the beginning of
|
|
|
|
DEST, to tell how many we're going to need to begin with. */
|
2012-11-04 19:44:13 +01:00
|
|
|
for (gimple_stmt_iterator si = gsi;
|
2012-11-06 18:58:52 +01:00
|
|
|
i * 4 <= alloc_count * 3 && !gsi_end_p (si); gsi_next (&si))
|
|
|
|
{
|
|
|
|
gimple stmt = gsi_stmt (si);
|
|
|
|
if (!is_gimple_debug (stmt))
|
|
|
|
break;
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
|
|
|
|
VEC(tree, stack) *fewvars = NULL;
|
|
|
|
pointer_set_t *vars = NULL;
|
|
|
|
|
|
|
|
/* If we're already starting with 3/4 of alloc_count, go for a
|
|
|
|
pointer_set, otherwise start with an unordered stack-allocated
|
|
|
|
VEC. */
|
|
|
|
if (i * 4 > alloc_count * 3)
|
|
|
|
vars = pointer_set_create ();
|
|
|
|
else if (alloc_count)
|
|
|
|
fewvars = VEC_alloc (tree, stack, alloc_count);
|
|
|
|
|
|
|
|
/* Now go through the initial debug stmts in DEST again, this time
|
|
|
|
actually inserting in VARS or FEWVARS. Don't bother checking for
|
|
|
|
duplicates in FEWVARS. */
|
|
|
|
for (gimple_stmt_iterator si = gsi; !gsi_end_p (si); gsi_next (&si))
|
2012-11-04 19:44:13 +01:00
|
|
|
{
|
|
|
|
gimple stmt = gsi_stmt (si);
|
|
|
|
if (!is_gimple_debug (stmt))
|
|
|
|
break;
|
|
|
|
|
|
|
|
tree var;
|
|
|
|
|
|
|
|
if (gimple_debug_bind_p (stmt))
|
|
|
|
var = gimple_debug_bind_get_var (stmt);
|
|
|
|
else if (gimple_debug_source_bind_p (stmt))
|
|
|
|
var = gimple_debug_source_bind_get_var (stmt);
|
|
|
|
else
|
|
|
|
gcc_unreachable ();
|
|
|
|
|
2012-11-06 18:58:52 +01:00
|
|
|
if (vars)
|
|
|
|
pointer_set_insert (vars, var);
|
|
|
|
else
|
|
|
|
VEC_quick_push (tree, fewvars, var);
|
2012-11-04 19:44:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
basic_block bb = dest;
|
|
|
|
|
|
|
|
do
|
|
|
|
{
|
|
|
|
bb = single_pred (bb);
|
|
|
|
for (gimple_stmt_iterator si = gsi_last_bb (bb);
|
|
|
|
!gsi_end_p (si); gsi_prev (&si))
|
|
|
|
{
|
|
|
|
gimple stmt = gsi_stmt (si);
|
|
|
|
if (!is_gimple_debug (stmt))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
tree var;
|
|
|
|
|
|
|
|
if (gimple_debug_bind_p (stmt))
|
|
|
|
var = gimple_debug_bind_get_var (stmt);
|
|
|
|
else if (gimple_debug_source_bind_p (stmt))
|
|
|
|
var = gimple_debug_source_bind_get_var (stmt);
|
|
|
|
else
|
|
|
|
gcc_unreachable ();
|
|
|
|
|
|
|
|
/* Discard debug bind overlaps. ??? Unlike stmts from src,
|
|
|
|
copied into a new block that will precede BB, debug bind
|
|
|
|
stmts in bypassed BBs may actually be discarded if
|
|
|
|
they're overwritten by subsequent debug bind stmts, which
|
|
|
|
might be a problem once we introduce stmt frontier notes
|
|
|
|
or somesuch. Adding `&& bb == src' to the condition
|
|
|
|
below will preserve all potentially relevant debug
|
|
|
|
notes. */
|
2012-11-06 18:58:52 +01:00
|
|
|
if (vars && pointer_set_insert (vars, var))
|
2012-11-04 19:44:13 +01:00
|
|
|
continue;
|
2012-11-06 18:58:52 +01:00
|
|
|
else if (!vars)
|
|
|
|
{
|
|
|
|
int i = VEC_length (tree, fewvars);
|
|
|
|
while (i--)
|
|
|
|
if (VEC_index (tree, fewvars, i) == var)
|
|
|
|
break;
|
|
|
|
if (i >= 0)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (VEC_length (tree, fewvars) < alloc_count)
|
|
|
|
VEC_quick_push (tree, fewvars, var);
|
|
|
|
else
|
|
|
|
{
|
|
|
|
vars = pointer_set_create ();
|
|
|
|
for (i = 0; i < alloc_count; i++)
|
|
|
|
pointer_set_insert (vars, VEC_index (tree, fewvars, i));
|
|
|
|
VEC_free (tree, stack, fewvars);
|
|
|
|
pointer_set_insert (vars, var);
|
|
|
|
}
|
|
|
|
}
|
2012-11-04 19:44:13 +01:00
|
|
|
|
|
|
|
stmt = gimple_copy (stmt);
|
|
|
|
/* ??? Should we drop the location of the copy to denote
|
|
|
|
they're artificial bindings? */
|
|
|
|
gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while (bb != src && single_pred_p (bb));
|
|
|
|
|
2012-11-06 18:58:52 +01:00
|
|
|
if (vars)
|
|
|
|
pointer_set_destroy (vars);
|
|
|
|
else if (fewvars)
|
|
|
|
VEC_free (tree, stack, fewvars);
|
2012-11-04 19:44:13 +01:00
|
|
|
}
|
|
|
|
|
2011-03-28 20:33:42 +02:00
|
|
|
/* TAKEN_EDGE represents the an edge taken as a result of jump threading.
|
|
|
|
See if we can thread around TAKEN_EDGE->dest as well. If so, return
|
|
|
|
the edge out of TAKEN_EDGE->dest that we can statically compute will be
|
|
|
|
traversed.
|
|
|
|
|
|
|
|
We are much more restrictive as to the contents of TAKEN_EDGE->dest
|
|
|
|
as the path isolation code in tree-ssa-threadupdate.c isn't prepared
|
|
|
|
to handle copying intermediate blocks on a threaded path.
|
|
|
|
|
|
|
|
Long term a more consistent and structured approach to path isolation
|
|
|
|
would be a huge help. */
|
|
|
|
static edge
|
|
|
|
thread_around_empty_block (edge taken_edge,
|
|
|
|
gimple dummy_cond,
|
|
|
|
bool handle_dominating_asserts,
|
|
|
|
tree (*simplify) (gimple, gimple),
|
|
|
|
bitmap visited)
|
|
|
|
{
|
|
|
|
basic_block bb = taken_edge->dest;
|
|
|
|
gimple_stmt_iterator gsi;
|
|
|
|
gimple stmt;
|
|
|
|
tree cond;
|
|
|
|
|
|
|
|
/* This block must have a single predecessor (E->dest). */
|
|
|
|
if (!single_pred_p (bb))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* This block must have more than one successor. */
|
|
|
|
if (single_succ_p (bb))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* This block can have no PHI nodes. This is overly conservative. */
|
|
|
|
if (!gsi_end_p (gsi_start_phis (bb)))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* Skip over DEBUG statements at the start of the block. */
|
|
|
|
gsi = gsi_start_nondebug_bb (bb);
|
|
|
|
|
|
|
|
if (gsi_end_p (gsi))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* This block can have no statements other than its control altering
|
|
|
|
statement. This is overly conservative. */
|
|
|
|
stmt = gsi_stmt (gsi);
|
|
|
|
if (gimple_code (stmt) != GIMPLE_COND
|
|
|
|
&& gimple_code (stmt) != GIMPLE_GOTO
|
|
|
|
&& gimple_code (stmt) != GIMPLE_SWITCH)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* Extract and simplify the condition. */
|
|
|
|
cond = simplify_control_stmt_condition (taken_edge, stmt, dummy_cond,
|
|
|
|
simplify, handle_dominating_asserts);
|
|
|
|
|
|
|
|
/* If the condition can be statically computed and we have not already
|
|
|
|
visited the destination edge, then add the taken edge to our thread
|
|
|
|
path. */
|
|
|
|
if (cond && is_gimple_min_invariant (cond))
|
|
|
|
{
|
|
|
|
edge taken_edge = find_taken_edge (bb, cond);
|
|
|
|
|
|
|
|
if (bitmap_bit_p (visited, taken_edge->dest->index))
|
|
|
|
return NULL;
|
|
|
|
bitmap_set_bit (visited, taken_edge->dest->index);
|
|
|
|
return taken_edge;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2011-06-16 23:52:00 +02:00
|
|
|
/* E1 and E2 are edges into the same basic block. Return TRUE if the
|
|
|
|
PHI arguments associated with those edges are equal or there are no
|
|
|
|
PHI arguments, otherwise return FALSE. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
phi_args_equal_on_edges (edge e1, edge e2)
|
|
|
|
{
|
|
|
|
gimple_stmt_iterator gsi;
|
|
|
|
int indx1 = e1->dest_idx;
|
|
|
|
int indx2 = e2->dest_idx;
|
|
|
|
|
|
|
|
for (gsi = gsi_start_phis (e1->dest); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
|
|
{
|
|
|
|
gimple phi = gsi_stmt (gsi);
|
|
|
|
|
|
|
|
if (!operand_equal_p (gimple_phi_arg_def (phi, indx1),
|
|
|
|
gimple_phi_arg_def (phi, indx2), 0))
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2011-03-28 20:33:42 +02:00
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
/* We are exiting E->src, see if E->dest ends with a conditional
|
2009-11-25 11:55:54 +01:00
|
|
|
jump which has a known value when reached via E.
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
Special care is necessary if E is a back edge in the CFG as we
|
|
|
|
may have already recorded equivalences for E->dest into our
|
|
|
|
various tables, including the result of the conditional at
|
|
|
|
the end of E->dest. Threading opportunities are severely
|
|
|
|
limited in that case to avoid short-circuiting the loop
|
|
|
|
incorrectly.
|
|
|
|
|
|
|
|
Note it is quite common for the first block inside a loop to
|
|
|
|
end with a conditional which is either always true or always
|
|
|
|
false when reached via the loop backedge. Thus we do not want
|
2007-04-09 08:51:43 +02:00
|
|
|
to blindly disable threading across a loop backedge.
|
2009-11-25 11:55:54 +01:00
|
|
|
|
2007-04-09 08:51:43 +02:00
|
|
|
DUMMY_COND is a shared cond_expr used by condition simplification as scratch,
|
|
|
|
to avoid allocating memory.
|
2009-11-25 11:55:54 +01:00
|
|
|
|
2007-04-09 08:51:43 +02:00
|
|
|
HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of
|
|
|
|
the simplified condition with left-hand sides of ASSERT_EXPRs they are
|
|
|
|
used in.
|
2009-11-25 11:55:54 +01:00
|
|
|
|
2007-04-09 08:51:43 +02:00
|
|
|
STACK is used to undo temporary equivalences created during the walk of
|
|
|
|
E->dest.
|
|
|
|
|
|
|
|
SIMPLIFY is a pass-specific function used to simplify statements. */
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
void
|
2008-07-28 16:33:56 +02:00
|
|
|
thread_across_edge (gimple dummy_cond,
|
2006-02-07 19:31:27 +01:00
|
|
|
edge e,
|
|
|
|
bool handle_dominating_asserts,
|
|
|
|
VEC(tree, heap) **stack,
|
2008-07-28 16:33:56 +02:00
|
|
|
tree (*simplify) (gimple, gimple))
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
2008-07-28 16:33:56 +02:00
|
|
|
gimple stmt;
|
2006-02-07 19:31:27 +01:00
|
|
|
|
2006-02-09 03:36:33 +01:00
|
|
|
/* If E is a backedge, then we want to verify that the COND_EXPR,
|
|
|
|
SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
|
|
|
|
by any statements in e->dest. If it is affected, then it is not
|
|
|
|
safe to thread this edge. */
|
|
|
|
if (e->flags & EDGE_DFS_BACK)
|
|
|
|
{
|
2012-10-24 02:43:24 +02:00
|
|
|
if (cond_arg_set_in_bb (e, e->dest))
|
2012-10-23 22:33:49 +02:00
|
|
|
goto fail;
|
2006-02-09 03:36:33 +01:00
|
|
|
}
|
2009-11-25 11:55:54 +01:00
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
stmt_count = 0;
|
|
|
|
|
|
|
|
/* PHIs create temporary equivalences. */
|
|
|
|
if (!record_temporary_equivalences_from_phis (e, stack))
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
/* Now walk each statement recording any context sensitive
|
|
|
|
temporary equivalences we can detect. */
|
|
|
|
stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify);
|
|
|
|
if (!stmt)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
/* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
|
|
|
|
will be taken. */
|
2008-07-28 16:33:56 +02:00
|
|
|
if (gimple_code (stmt) == GIMPLE_COND
|
|
|
|
|| gimple_code (stmt) == GIMPLE_GOTO
|
|
|
|
|| gimple_code (stmt) == GIMPLE_SWITCH)
|
2006-02-07 19:31:27 +01:00
|
|
|
{
|
|
|
|
tree cond;
|
|
|
|
|
|
|
|
/* Extract and simplify the condition. */
|
2011-03-28 20:33:42 +02:00
|
|
|
cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify,
|
|
|
|
handle_dominating_asserts);
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
if (cond && is_gimple_min_invariant (cond))
|
|
|
|
{
|
|
|
|
edge taken_edge = find_taken_edge (e->dest, cond);
|
|
|
|
basic_block dest = (taken_edge ? taken_edge->dest : NULL);
|
2011-03-28 20:33:42 +02:00
|
|
|
bitmap visited;
|
|
|
|
edge e2;
|
2006-02-07 19:31:27 +01:00
|
|
|
|
|
|
|
if (dest == e->dest)
|
|
|
|
goto fail;
|
|
|
|
|
2011-03-28 20:33:42 +02:00
|
|
|
/* DEST could be null for a computed jump to an absolute
|
|
|
|
address. If DEST is not null, then see if we can thread
|
|
|
|
through it as well, this helps capture secondary effects
|
|
|
|
of threading without having to re-run DOM or VRP. */
|
2012-10-23 22:33:49 +02:00
|
|
|
if (dest
|
|
|
|
&& ((e->flags & EDGE_DFS_BACK) == 0
|
2012-10-24 02:43:24 +02:00
|
|
|
|| ! cond_arg_set_in_bb (taken_edge, e->dest)))
|
2011-03-28 20:33:42 +02:00
|
|
|
{
|
|
|
|
/* We don't want to thread back to a block we have already
|
|
|
|
visited. This may be overly conservative. */
|
|
|
|
visited = BITMAP_ALLOC (NULL);
|
|
|
|
bitmap_set_bit (visited, dest->index);
|
|
|
|
bitmap_set_bit (visited, e->dest->index);
|
|
|
|
do
|
|
|
|
{
|
|
|
|
e2 = thread_around_empty_block (taken_edge,
|
|
|
|
dummy_cond,
|
|
|
|
handle_dominating_asserts,
|
|
|
|
simplify,
|
|
|
|
visited);
|
|
|
|
if (e2)
|
|
|
|
taken_edge = e2;
|
|
|
|
}
|
|
|
|
while (e2);
|
|
|
|
BITMAP_FREE (visited);
|
|
|
|
}
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
remove_temporary_equivalences (stack);
|
2012-11-04 19:44:13 +01:00
|
|
|
if (!taken_edge)
|
|
|
|
return;
|
|
|
|
propagate_threaded_block_debug_into (taken_edge->dest, e->dest);
|
2011-06-16 23:52:00 +02:00
|
|
|
register_jump_thread (e, taken_edge, NULL);
|
2011-04-30 05:46:17 +02:00
|
|
|
return;
|
2006-02-07 19:31:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-06-16 23:52:00 +02:00
|
|
|
/* We were unable to determine what out edge from E->dest is taken. However,
|
|
|
|
we might still be able to thread through successors of E->dest. This
|
|
|
|
often occurs when E->dest is a joiner block which then fans back out
|
|
|
|
based on redundant tests.
|
|
|
|
|
|
|
|
If so, we'll copy E->dest and redirect the appropriate predecessor to
|
|
|
|
the copy. Within the copy of E->dest, we'll thread one or more edges
|
|
|
|
to points deeper in the CFG.
|
|
|
|
|
|
|
|
This is a stopgap until we have a more structured approach to path
|
|
|
|
isolation. */
|
|
|
|
{
|
|
|
|
edge e2, e3, taken_edge;
|
|
|
|
edge_iterator ei;
|
|
|
|
bool found = false;
|
|
|
|
bitmap visited = BITMAP_ALLOC (NULL);
|
|
|
|
|
|
|
|
/* Look at each successor of E->dest to see if we can thread through it. */
|
|
|
|
FOR_EACH_EDGE (taken_edge, ei, e->dest->succs)
|
|
|
|
{
|
|
|
|
/* Avoid threading to any block we have already visited. */
|
|
|
|
bitmap_clear (visited);
|
|
|
|
bitmap_set_bit (visited, taken_edge->dest->index);
|
|
|
|
bitmap_set_bit (visited, e->dest->index);
|
|
|
|
|
|
|
|
/* Record whether or not we were able to thread through a successor
|
|
|
|
of E->dest. */
|
|
|
|
found = false;
|
|
|
|
e3 = taken_edge;
|
|
|
|
do
|
|
|
|
{
|
2012-10-23 22:33:49 +02:00
|
|
|
if ((e->flags & EDGE_DFS_BACK) == 0
|
2012-10-24 02:43:24 +02:00
|
|
|
|| ! cond_arg_set_in_bb (e3, e->dest))
|
2012-10-23 22:33:49 +02:00
|
|
|
e2 = thread_around_empty_block (e3,
|
|
|
|
dummy_cond,
|
|
|
|
handle_dominating_asserts,
|
|
|
|
simplify,
|
|
|
|
visited);
|
|
|
|
else
|
|
|
|
e2 = NULL;
|
|
|
|
|
2011-06-16 23:52:00 +02:00
|
|
|
if (e2)
|
|
|
|
{
|
|
|
|
e3 = e2;
|
|
|
|
found = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while (e2);
|
|
|
|
|
|
|
|
/* If we were able to thread through a successor of E->dest, then
|
|
|
|
record the jump threading opportunity. */
|
|
|
|
if (found)
|
|
|
|
{
|
|
|
|
edge tmp;
|
|
|
|
/* If there is already an edge from the block to be duplicated
|
|
|
|
(E2->src) to the final target (E3->dest), then make sure that
|
|
|
|
the PHI args associated with the edges E2 and E3 are the
|
|
|
|
same. */
|
|
|
|
tmp = find_edge (taken_edge->src, e3->dest);
|
|
|
|
if (!tmp || phi_args_equal_on_edges (tmp, e3))
|
2012-11-04 19:44:13 +01:00
|
|
|
{
|
|
|
|
propagate_threaded_block_debug_into (e3->dest,
|
|
|
|
taken_edge->dest);
|
|
|
|
register_jump_thread (e, taken_edge, e3);
|
|
|
|
}
|
2011-06-16 23:52:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
BITMAP_FREE (visited);
|
|
|
|
}
|
|
|
|
|
2006-02-07 19:31:27 +01:00
|
|
|
fail:
|
|
|
|
remove_temporary_equivalences (stack);
|
|
|
|
}
|