re PR tree-optimization/32604 (miscompilation at -O2)

2007-07-04  Daniel Berlin  <dberlin@dberlin.org>

	PR tree-optimization/32604
	PR tree-optimization/32606
	
	* tree-ssa-pre.c (bb_bitmap_sets): Removed antic_safe_loads.
	(compute_antic_safe): Removed.
	(ANTIC_SAFE_LOADS): Ditto.
	(compute_antic_aux): Don't print ANTIC_SAFE_LOADS.
	(execute_pre): Don't call compute_antic_safe.
	(vuse_equiv): New function.
	(make_values_for_stmt): Use it
	* tree-ssa-sccvn.c (set_ssa_val_to): Remove assert, since it is
	not always true.

From-SVN: r126338
This commit is contained in:
Daniel Berlin 2007-07-04 22:11:14 +00:00 committed by Daniel Berlin
parent 44190aed5e
commit c1130b62fc
6 changed files with 128 additions and 89 deletions

View File

@ -1,3 +1,18 @@
2007-07-04 Daniel Berlin <dberlin@dberlin.org>
PR tree-optimization/32604
PR tree-optimization/32606
* tree-ssa-pre.c (bb_bitmap_sets): Removed antic_safe_loads.
(compute_antic_safe): Removed.
(ANTIC_SAFE_LOADS): Ditto.
(compute_antic_aux): Don't print ANTIC_SAFE_LOADS.
(execute_pre): Don't call compute_antic_safe.
(vuse_equiv): New function.
(make_values_for_stmt): Use it
* tree-ssa-sccvn.c (set_ssa_val_to): Remove assert, since it is
not always true.
2007-07-04 Anatoly Sokolov <aesok@post.ru>
PR target/31331

View File

@ -1,3 +1,8 @@
2007-07-04 Daniel Berlin <dberlin@dberlin.org>
* gcc.c-torture/compile/pr32606.c: New.
* gfortran.fortran-torture/execute/pr32604.f90: New.
2007-07-04 Mark Mitchell <mark@codesourcery.com>
* gcc.dg/visibility-12.c: New test.

View File

@ -0,0 +1,30 @@
int inb(int);
void is870(unsigned int wkport, unsigned char j)
{
unsigned int tmport;
unsigned char i;
for (i = 0; i < 16; i++)
{
tmport = wkport + 0x18;
tmport += 0x07;
while ((inb(tmport) & 0x80) == 0)
{
if ((inb(tmport) & 0x01) != 0)
{
tmport -= 0x06;
tmport += 0x06;
}
}
tmport = wkport + 0x14;
tmport += 0x04;
tmport += 0x07;
widep_in1:
if ((j & 0x01) != 0)
{
tmport -= 0x06;
tmport += 0x06;
goto widep_in1;
}
while ((inb(tmport) & 0x80) == 0) {}
}
}

View File

@ -0,0 +1,61 @@
MODULE TEST
IMPLICIT NONE
INTEGER, PARAMETER :: dp=KIND(0.0D0)
TYPE mulliken_restraint_type
INTEGER :: ref_count
REAL(KIND = dp) :: strength
REAL(KIND = dp) :: TARGET
INTEGER :: natoms
INTEGER, POINTER, DIMENSION(:) :: atoms
END TYPE mulliken_restraint_type
CONTAINS
SUBROUTINE INIT(mulliken)
TYPE(mulliken_restraint_type), INTENT(INOUT) :: mulliken
ALLOCATE(mulliken%atoms(1))
mulliken%atoms(1)=1
mulliken%natoms=1
mulliken%target=0
mulliken%strength=0
END SUBROUTINE INIT
SUBROUTINE restraint_functional(mulliken_restraint_control,charges, &
charges_deriv,energy,order_p)
TYPE(mulliken_restraint_type), &
INTENT(IN) :: mulliken_restraint_control
REAL(KIND=dp), DIMENSION(:, :), POINTER :: charges, charges_deriv
REAL(KIND=dp), INTENT(OUT) :: energy, order_p
INTEGER :: I
REAL(KIND=dp) :: dum
charges_deriv=0.0_dp
order_p=0.0_dp
DO I=1,mulliken_restraint_control%natoms
order_p=order_p+charges(mulliken_restraint_control%atoms(I),1) &
-charges(mulliken_restraint_control%atoms(I),2)
ENDDO
energy=mulliken_restraint_control%strength*(order_p-mulliken_restraint_control%target)**2
dum=2*mulliken_restraint_control%strength*(order_p-mulliken_restraint_control%target)
DO I=1,mulliken_restraint_control%natoms
charges_deriv(mulliken_restraint_control%atoms(I),1)= dum
charges_deriv(mulliken_restraint_control%atoms(I),2)= -dum
ENDDO
END SUBROUTINE restraint_functional
END MODULE
USE TEST
IMPLICIT NONE
TYPE(mulliken_restraint_type) :: mulliken
REAL(KIND=dp), DIMENSION(:, :), POINTER :: charges, charges_deriv
REAL(KIND=dp) :: energy,order_p
ALLOCATE(charges(1,2),charges_deriv(1,2))
charges(1,1)=2.0_dp
charges(1,2)=1.0_dp
CALL INIT(mulliken)
CALL restraint_functional(mulliken,charges,charges_deriv,energy,order_p)
write(6,*) order_p
END

View File

@ -302,10 +302,6 @@ typedef struct bb_bitmap_sets
the current iteration. */
bitmap_set_t new_sets;
/* These are the loads that will be ANTIC_IN at the top of the
block, and are actually generated in the block. */
bitmap_set_t antic_safe_loads;
/* True if we have visited this block during ANTIC calculation. */
unsigned int visited:1;
@ -321,7 +317,6 @@ typedef struct bb_bitmap_sets
#define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
#define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
#define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
#define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
#define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
@ -1545,9 +1540,7 @@ valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree expr,
&& !union_contains_value (set1, set2, op3))
return false;
}
return bitmap_set_contains_value (ANTIC_SAFE_LOADS (block),
vh)
|| !value_dies_in_block_x (vh, block);
return !value_dies_in_block_x (vh, block);
}
}
return false;
@ -1786,9 +1779,6 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
if (ANTIC_OUT)
print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
if (ANTIC_SAFE_LOADS (block))
print_bitmap_set (dump_file, ANTIC_SAFE_LOADS (block),
"ANTIC_SAFE_LOADS", block->index);
print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
block->index);
@ -2042,74 +2032,6 @@ compute_antic (void)
sbitmap_free (changed_blocks);
}
/*
ANTIC_SAFE_LOADS are those loads generated in a block that actually
occur before any kill to their vuses in the block, and thus, are
safe at the top of the block. This function computes the set by
walking the EXP_GEN set for the block, and checking the VUSES.
This set could be computed as ANTIC calculation is proceeding, but
but because it does not actually change during that computation, it is
quicker to pre-calculate the results and use them than to do it on
the fly (particularly in the presence of multiple iteration). */
static void
compute_antic_safe (void)
{
basic_block bb;
bitmap_iterator bi;
unsigned int i;
FOR_EACH_BB (bb)
{
FOR_EACH_EXPR_ID_IN_SET (EXP_GEN (bb), i, bi)
{
tree expr = expression_for_id (i);
if (REFERENCE_CLASS_P (expr))
{
tree vh = get_value_handle (expr);
tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
ssa_op_iter i;
tree vuse;
tree stmt;
bool okay = true;
if (!maybe)
continue;
stmt = SSA_NAME_DEF_STMT (maybe);
if (TREE_CODE (stmt) == PHI_NODE)
continue;
FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i,
SSA_OP_VIRTUAL_USES)
{
tree def = SSA_NAME_DEF_STMT (vuse);
if (bb_for_stmt (def) != bb)
continue;
/* See if the vuse is defined by a statement that
comes before us in the block. Phi nodes are not
stores, so they do not count. */
if (TREE_CODE (def) != PHI_NODE
&& stmt_ann (def)->uid < stmt_ann (stmt)->uid)
{
okay = false;
break;
}
}
if (okay)
{
if (ANTIC_SAFE_LOADS (bb) == NULL)
ANTIC_SAFE_LOADS (bb) = bitmap_set_new ();
bitmap_value_insert_into_set (ANTIC_SAFE_LOADS (bb),
expr);
}
}
}
}
}
/* Return true if we can value number the call in STMT. This is true
if we have a pure or constant call. */
@ -3367,6 +3289,21 @@ make_values_for_phi (tree phi, basic_block block)
}
}
/* Return true if both the statement and the value handles have no
vuses, or both the statement and the value handle do have vuses.
Unlike SCCVN, PRE needs not only to know equivalence, but what the
actual vuses are so it can translate them through blocks. Thus,
we have to make a new value handle if the existing one has no
vuses but needs them. */
static bool
vuse_equiv (tree vh1, tree stmt)
{
bool stmt_has_vuses = !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES);
return (VALUE_HANDLE_VUSES (vh1) && stmt_has_vuses)
|| (!VALUE_HANDLE_VUSES (vh1) && !stmt_has_vuses);
}
/* Create value handles for STMT in BLOCK. Return true if we handled
the statement. */
@ -3416,7 +3353,7 @@ make_values_for_stmt (tree stmt, basic_block block)
{
/* If we already have a value number for the LHS, reuse
it rather than creating a new one. */
if (lhsval)
if (lhsval && vuse_equiv (lhsval, stmt))
{
set_value_handle (newt, lhsval);
if (!is_gimple_min_invariant (lhsval))
@ -4001,7 +3938,6 @@ execute_pre (bool do_fre)
computing ANTIC, either, even though it's plenty fast. */
if (!do_fre && n_basic_blocks < 4000)
{
compute_antic_safe ();
compute_antic ();
insert ();
}

View File

@ -1017,14 +1017,6 @@ set_ssa_val_to (tree from, tree to)
tree currval;
gcc_assert (to != NULL);
/* Make sure we don't create chains of copies, so that we get the
best value numbering. visit_copy has code to make sure this doesn't
happen, we are doing this here to assert that nothing else breaks
this. */
gcc_assert (TREE_CODE (to) != SSA_NAME
|| TREE_CODE (SSA_VAL (to)) != SSA_NAME
|| SSA_VAL (to) == to
|| to == from);
/* The only thing we allow as value numbers are ssa_names and
invariants. So assert that here. */
gcc_assert (TREE_CODE (to) == SSA_NAME || is_gimple_min_invariant (to));