common.opt (ftree-store-ccp): Mark as preserved for backward compatibility.

2008-08-29  Richard Guenther  <rguenther@suse.de>

	* common.opt (ftree-store-ccp): Mark as preserved for
	backward compatibility.
	* doc/invoke.texi (-ftree-store-ccp): Remove documentation.
	* tree-pass.h (pass_store_ccp): Remove.
	* tree-ssa-propagate.h (struct prop_value_d): Remove mem_ref field.
	(first_vdef): Remove declaration.
	(get_value_loaded_by): Likewise.
	* tree-ssa-ccp.c (do_store_ccp): Remove.
	(get_default_value): Simplify as do_store_ccp is always false
	now.  Do not initialize mem_ref.
	(set_value_varying): Likewise.
	(canonicalize_float_value): Likewise.
	(set_lattice_value): Likewise.
	(likely_value): Likewise.
	(surely_varying_stmt_p): Likewise.
	(ccp_initialize): Likewise.
	(ccp_lattice_meet): Likewise.
	(ccp_visit_phi_node): Likewise.
	(ccp_fold): Likewise.
	(evaluate_stmt): Likewise.
	(visit_assignment): Likewise.
	(ccp_visit_stmt): Likewise.
	(execute_ssa_ccp): Fold into ...
	(do_ssa_ccp): ... this.
	(do_ssa_store_ccp): Remove.
	(gate_store_ccp): Likewise.
	(pass_store_ccp): Likewise.
	* tree-ssa-copy.c (copy_prop_visit_phi_node): Do not
	initialize mem_ref.
	* tree-ssa-propagate.c (first_vdef): Remove.
	(get_value_loaded_by): Likewise.
	(replace_vuses_in): Likewise.
	(substitute_and_fold): Do not call replace_vuses_in.
	* opts.c (decode_options): Do not set flag_tree_store_ccp.

From-SVN: r139764
This commit is contained in:
Richard Guenther 2008-08-29 11:43:22 +00:00 committed by Richard Biener
parent 443aa7d576
commit dce2b2f6cb
9 changed files with 56 additions and 356 deletions

View File

@ -1,3 +1,40 @@
2008-08-29 Richard Guenther <rguenther@suse.de>
* common.opt (ftree-store-ccp): Mark as preserved for
backward compatibility.
* doc/invoke.texi (-ftree-store-ccp): Remove documentation.
* tree-pass.h (pass_store_ccp): Remove.
* tree-ssa-propagate.h (struct prop_value_d): Remove mem_ref field.
(first_vdef): Remove declaration.
(get_value_loaded_by): Likewise.
* tree-ssa-ccp.c (do_store_ccp): Remove.
(get_default_value): Simplify as do_store_ccp is always false
now. Do not initialize mem_ref.
(set_value_varying): Likewise.
(canonicalize_float_value): Likewise.
(set_lattice_value): Likewise.
(likely_value): Likewise.
(surely_varying_stmt_p): Likewise.
(ccp_initialize): Likewise.
(ccp_lattice_meet): Likewise.
(ccp_visit_phi_node): Likewise.
(ccp_fold): Likewise.
(evaluate_stmt): Likewise.
(visit_assignment): Likewise.
(ccp_visit_stmt): Likewise.
(execute_ssa_ccp): Fold into ...
(do_ssa_ccp): ... this.
(do_ssa_store_ccp): Remove.
(gate_store_ccp): Likewise.
(pass_store_ccp): Likewise.
* tree-ssa-copy.c (copy_prop_visit_phi_node): Do not
initialize mem_ref.
* tree-ssa-propagate.c (first_vdef): Remove.
(get_value_loaded_by): Likewise.
(replace_vuses_in): Likewise.
(substitute_and_fold): Do not call replace_vuses_in.
* opts.c (decode_options): Do not set flag_tree_store_ccp.
2008-08-29 Richard Guenther <rguenther@suse.de>
PR middle-end/37236

View File

@ -1111,8 +1111,8 @@ Common Report Var(flag_tree_ccp) Optimization
Enable SSA-CCP optimization on trees
ftree-store-ccp
Common Report Var(flag_tree_store_ccp) Optimization
Enable SSA-CCP optimization for stores and loads
Common
Does nothing. Preserved for backward compatibility.
ftree-ch
Common Report Var(flag_tree_ch) Optimization

View File

@ -365,7 +365,7 @@ Objective-C and Objective-C++ Dialects}.
-ftree-loop-distribution @gol
-ftree-loop-ivcanon -ftree-loop-linear -ftree-loop-optimize @gol
-ftree-parallelize-loops=@var{n} -ftree-pre -ftree-reassoc @gol
-ftree-sink -ftree-sra -ftree-store-ccp -ftree-switch-conversion @gol
-ftree-sink -ftree-sra -ftree-switch-conversion @gol
-ftree-ter -ftree-vect-loop-version -ftree-vectorize -ftree-vrp @gol
-funit-at-a-time -funroll-all-loops -funroll-loops @gol
-funsafe-loop-optimizations -funsafe-math-optimizations -funswitch-loops @gol
@ -5938,13 +5938,6 @@ Perform sparse conditional constant propagation (CCP) on trees. This
pass only operates on local scalar variables and is enabled by default
at @option{-O} and higher.
@item -ftree-store-ccp
@opindex ftree-store-ccp
Perform sparse conditional constant propagation (CCP) on trees. This
pass operates on both local scalar variables and memory stores and
loads (global variables, structures, arrays, etc). This flag is
enabled by default at @option{-O2} and higher.
@item -ftree-switch-conversion
Perform conversion of simple initializations in a switch to
initializations from a scalar array. This flag is enabled by default

View File

@ -951,14 +951,13 @@ decode_options (unsigned int argc, const char **argv)
flag_delete_null_pointer_checks = opt2;
flag_reorder_blocks = opt2;
flag_reorder_functions = opt2;
flag_tree_store_ccp = opt2;
flag_tree_vrp = opt2;
flag_tree_builtin_call_dce = opt2;
flag_tree_pre = opt2;
flag_tree_switch_conversion = 1;
flag_ipa_cp = opt2;
/* Allow more virtual operators to increase alias precision. */
/* Allow more virtual operators to increase alias precision. */
set_param_value ("max-aliased-vops",
(opt2) ? 500 : initial_max_aliased_vops);

View File

@ -380,7 +380,6 @@ extern struct gimple_opt_pass pass_fre;
extern struct gimple_opt_pass pass_linear_transform;
extern struct gimple_opt_pass pass_check_data_deps;
extern struct gimple_opt_pass pass_copy_prop;
extern struct gimple_opt_pass pass_store_ccp;
extern struct gimple_opt_pass pass_vrp;
extern struct gimple_opt_pass pass_uncprop;
extern struct gimple_opt_pass pass_return_slot;

View File

@ -227,9 +227,6 @@ typedef enum
doing the store). */
static prop_value_t *const_val;
/* True if we are also propagating constants in stores and loads. */
static bool do_store_ccp;
/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
static void
@ -321,10 +318,10 @@ static prop_value_t
get_default_value (tree var)
{
tree sym = SSA_NAME_VAR (var);
prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
prop_value_t val = { UNINITIALIZED, NULL_TREE };
tree cst_val;
if (!do_store_ccp && !is_gimple_reg (var))
if (!is_gimple_reg (var))
{
/* Short circuit for regular CCP. We are not interested in any
non-register when DO_STORE_CCP is false. */
@ -336,7 +333,6 @@ get_default_value (tree var)
initial value. */
val.lattice_val = CONSTANT;
val.value = cst_val;
val.mem_ref = sym;
}
else
{
@ -401,7 +397,6 @@ set_value_varying (tree var)
val->lattice_val = VARYING;
val->value = NULL_TREE;
val->mem_ref = NULL_TREE;
}
/* For float types, modify the value of VAL to make ccp work correctly
@ -447,7 +442,6 @@ canonicalize_float_value (prop_value_t *val)
{
val->lattice_val = UNDEFINED;
val->value = NULL;
val->mem_ref = NULL;
return;
}
}
@ -469,8 +463,7 @@ set_lattice_value (tree var, prop_value_t new_val)
gcc_assert (old_val->lattice_val < new_val.lattice_val
|| (old_val->lattice_val == new_val.lattice_val
&& ((!old_val->value && !new_val.value)
|| operand_equal_p (old_val->value, new_val.value, 0))
&& old_val->mem_ref == new_val.mem_ref));
|| operand_equal_p (old_val->value, new_val.value, 0))));
if (old_val->lattice_val != new_val.lattice_val)
{
@ -524,8 +517,7 @@ likely_value (gimple stmt)
/* If we are not doing store-ccp, statements with loads
and/or stores will never fold into a constant. */
if (!do_store_ccp
&& !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
return VARYING;
/* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
@ -613,15 +605,7 @@ surely_varying_stmt_p (gimple stmt)
return true;
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
{
if (!do_store_ccp)
return true;
/* We can only handle simple loads and stores. */
if (!stmt_makes_single_load (stmt)
&& !stmt_makes_single_store (stmt))
return true;
}
return true;
/* If it is a call and does not return a value or is not a
builtin and not an indirect call, it is varying. */
@ -692,7 +676,7 @@ ccp_initialize (void)
{
gimple phi = gsi_stmt (i);
if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
if (!is_gimple_reg (gimple_phi_result (phi)))
prop_set_simulate_again (phi, false);
else
prop_set_simulate_again (phi, true);
@ -747,14 +731,10 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
/* any M VARYING = VARYING. */
val1->lattice_val = VARYING;
val1->value = NULL_TREE;
val1->mem_ref = NULL_TREE;
}
else if (val1->lattice_val == CONSTANT
&& val2->lattice_val == CONSTANT
&& simple_cst_equal (val1->value, val2->value) == 1
&& (!do_store_ccp
|| (val1->mem_ref && val2->mem_ref
&& operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
&& simple_cst_equal (val1->value, val2->value) == 1)
{
/* Ci M Cj = Ci if (i == j)
Ci M Cj = VARYING if (i != j)
@ -763,14 +743,12 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
they come from the same memory reference. */
val1->lattice_val = CONSTANT;
val1->value = val1->value;
val1->mem_ref = val1->mem_ref;
}
else
{
/* Any other combination is VARYING. */
val1->lattice_val = VARYING;
val1->value = NULL_TREE;
val1->mem_ref = NULL_TREE;
}
}
@ -805,7 +783,6 @@ ccp_visit_phi_node (gimple phi)
case UNDEFINED:
new_val.lattice_val = UNDEFINED;
new_val.value = NULL_TREE;
new_val.mem_ref = NULL_TREE;
break;
default:
@ -837,7 +814,6 @@ ccp_visit_phi_node (gimple phi)
{
arg_val.lattice_val = CONSTANT;
arg_val.value = arg;
arg_val.mem_ref = NULL_TREE;
}
else
arg_val = *(get_value (arg));
@ -941,25 +917,6 @@ ccp_fold (gimple stmt)
}
}
else if (do_store_ccp && stmt_makes_single_load (stmt))
{
/* If the RHS is a memory load, see if the VUSEs associated with
it are a valid constant for that memory load. */
prop_value_t *val = get_value_loaded_by (stmt, const_val);
if (val && val->mem_ref)
{
if (operand_equal_p (val->mem_ref, rhs, 0))
return val->value;
/* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
complex type with a known constant value, return it. */
if ((TREE_CODE (rhs) == REALPART_EXPR
|| TREE_CODE (rhs) == IMAGPART_EXPR)
&& operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
}
}
if (kind == tcc_reference)
{
if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
@ -1324,8 +1281,6 @@ evaluate_stmt (gimple stmt)
ccp_lattice_t likelyvalue = likely_value (stmt);
bool is_constant;
val.mem_ref = NULL_TREE;
fold_defer_overflow_warnings ();
/* If the statement is likely to have a CONSTANT result, then try
@ -1429,21 +1384,6 @@ visit_assignment (gimple stmt, tree *output_p)
prop_value_t *nval = get_value (rhs);
val = *nval;
}
else if (do_store_ccp && stmt_makes_single_load (stmt))
{
/* Same as above, but the RHS is not a gimple register and yet
has a known VUSE. If STMT is loading from the same memory
location that created the SSA_NAMEs for the virtual operands,
we can propagate the value on the RHS. */
prop_value_t *nval = get_value_loaded_by (stmt, const_val);
if (nval
&& nval->mem_ref
&& operand_equal_p (nval->mem_ref, rhs, 0))
val = *nval;
else
val = evaluate_stmt (stmt);
}
else
val = evaluate_stmt (stmt);
}
@ -1468,46 +1408,6 @@ visit_assignment (gimple stmt, tree *output_p)
retval = SSA_PROP_INTERESTING;
}
}
else if (do_store_ccp && stmt_makes_single_store (stmt))
{
/* Otherwise, set the names in VDEF operands to the new
constant value and mark the LHS as the memory reference
associated with VAL. */
ssa_op_iter i;
tree vdef;
bool changed;
/* Mark VAL as stored in the LHS of this assignment. */
if (val.lattice_val == CONSTANT)
val.mem_ref = lhs;
/* Set the value of every VDEF to VAL. */
changed = false;
FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
{
/* See PR 29801. We may have VDEFs for read-only variables
(see the handling of unmodifiable variables in
add_virtual_operand); do not attempt to change their value. */
if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
continue;
changed |= set_lattice_value (vdef, val);
}
/* Note that for propagation purposes, we are only interested in
visiting statements that load the exact same memory reference
stored here. Those statements will have the exact same list
of virtual uses, so it is enough to set the output of this
statement to be its first virtual definition. */
*output_p = first_vdef (stmt);
if (changed)
{
if (val.lattice_val == VARYING)
retval = SSA_PROP_VARYING;
else
retval = SSA_PROP_INTERESTING;
}
}
return retval;
}
@ -1595,7 +1495,7 @@ ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
Mark them VARYING. */
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
{
prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
prop_value_t v = { VARYING, NULL_TREE };
set_lattice_value (def, v);
}
@ -1606,9 +1506,8 @@ ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
/* Main entry point for SSA Conditional Constant Propagation. */
static unsigned int
execute_ssa_ccp (bool store_ccp)
do_ssa_ccp (void)
{
do_store_ccp = store_ccp;
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
if (ccp_finalize ())
@ -1618,13 +1517,6 @@ execute_ssa_ccp (bool store_ccp)
}
static unsigned int
do_ssa_ccp (void)
{
return execute_ssa_ccp (false);
}
static bool
gate_ccp (void)
{
@ -1653,43 +1545,6 @@ struct gimple_opt_pass pass_ccp =
};
static unsigned int
do_ssa_store_ccp (void)
{
/* If STORE-CCP is not enabled, we just run regular CCP. */
return execute_ssa_ccp (flag_tree_store_ccp != 0);
}
static bool
gate_store_ccp (void)
{
/* STORE-CCP is enabled only with -ftree-store-ccp, but when
-fno-tree-store-ccp is specified, we should run regular CCP.
That's why the pass is enabled with either flag. */
return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
}
struct gimple_opt_pass pass_store_ccp =
{
{
GIMPLE_PASS,
"store_ccp", /* name */
gate_store_ccp, /* gate */
do_ssa_store_ccp, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_TREE_STORE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_verify_ssa
| TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
}
};
/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
is the desired result type. */

View File

@ -829,7 +829,7 @@ copy_prop_visit_phi_node (gimple phi)
{
enum ssa_prop_result retval;
unsigned i;
prop_value_t phi_val = { 0, NULL_TREE, NULL_TREE };
prop_value_t phi_val = { 0, NULL_TREE };
tree lhs = gimple_phi_result (phi);

View File

@ -823,22 +823,6 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
}
/* Return the first VDEF operand for STMT. */
tree
first_vdef (gimple stmt)
{
ssa_op_iter iter;
tree op;
/* Simply return the first operand we arrive at. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
return (op);
gcc_unreachable ();
}
/* Return true if STMT is of the form 'LHS = mem_ref', where 'mem_ref'
is a non-volatile pointer dereference, a structure reference or a
reference to a single _DECL. Ignore volatile memory references
@ -898,30 +882,6 @@ stmt_makes_single_store (gimple stmt)
}
/* If STMT makes a single memory load and all the virtual use operands
have the same value in array VALUES, return it. Otherwise, return
NULL. */
prop_value_t *
get_value_loaded_by (gimple stmt, prop_value_t *values)
{
ssa_op_iter i;
tree vuse;
prop_value_t *prev_val = NULL;
prop_value_t *val = NULL;
FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
{
val = &values[SSA_NAME_VERSION (vuse)];
if (prev_val && prev_val->value != val->value)
return NULL;
prev_val = val;
}
return val;
}
/* Propagation statistics. */
struct prop_stats_d
{
@ -972,131 +932,6 @@ replace_uses_in (gimple stmt, prop_value_t *prop_value)
}
/* Replace the VUSE references in statement STMT with the values
stored in PROP_VALUE. Return true if a reference was replaced.
Replacing VUSE operands is slightly more complex than replacing
regular USEs. We are only interested in two types of replacements
here:
1- If the value to be replaced is a constant or an SSA name for a
GIMPLE register, then we are making a copy/constant propagation
from a memory store. For instance,
# a_3 = VDEF <a_2>
a.b = x_1;
...
# VUSE <a_3>
y_4 = a.b;
This replacement is only possible iff STMT is an assignment
whose RHS is identical to the LHS of the statement that created
the VUSE(s) that we are replacing. Otherwise, we may do the
wrong replacement:
# a_3 = VDEF <a_2>
# b_5 = VDEF <b_4>
*p = 10;
...
# VUSE <b_5>
x_8 = b;
Even though 'b_5' acquires the value '10' during propagation,
there is no way for the propagator to tell whether the
replacement is correct in every reached use, because values are
computed at definition sites. Therefore, when doing final
substitution of propagated values, we have to check each use
site. Since the RHS of STMT ('b') is different from the LHS of
the originating statement ('*p'), we cannot replace 'b' with
'10'.
Similarly, when merging values from PHI node arguments,
propagators need to take care not to merge the same values
stored in different locations:
if (...)
# a_3 = VDEF <a_2>
a.b = 3;
else
# a_4 = VDEF <a_2>
a.c = 3;
# a_5 = PHI <a_3, a_4>
It would be wrong to propagate '3' into 'a_5' because that
operation merges two stores to different memory locations.
2- If the value to be replaced is an SSA name for a virtual
register, then we simply replace each VUSE operand with its
value from PROP_VALUE. This is the same replacement done by
replace_uses_in. */
static bool
replace_vuses_in (gimple stmt, prop_value_t *prop_value)
{
bool replaced = false;
ssa_op_iter iter;
use_operand_p vuse;
if (stmt_makes_single_load (stmt))
{
/* If STMT is an assignment whose RHS is a single memory load,
see if we are trying to propagate a constant or a GIMPLE
register (case #1 above). */
prop_value_t *val = get_value_loaded_by (stmt, prop_value);
tree rhs = gimple_assign_rhs1 (stmt);
if (val
&& val->value
&& (is_gimple_reg (val->value)
|| is_gimple_min_invariant (val->value))
&& simple_cst_equal (rhs, val->mem_ref) == 1)
{
/* We can only perform the substitution if the load is done
from the same memory location as the original store.
Since we already know that there are no intervening
stores between DEF_STMT and STMT, we only need to check
that the RHS of STMT is the same as the memory reference
propagated together with the value. */
gimple_assign_set_rhs1 (stmt, val->value);
if (TREE_CODE (val->value) != SSA_NAME)
prop_stats.num_const_prop++;
else
prop_stats.num_copy_prop++;
/* Since we have replaced the whole RHS of STMT, there
is no point in checking the other VUSEs, as they will
all have the same value. */
return true;
}
}
/* Otherwise, the values for every VUSE operand must be other
SSA_NAMEs that can be propagated into STMT. */
FOR_EACH_SSA_USE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
{
tree var = USE_FROM_PTR (vuse);
tree val = prop_value[SSA_NAME_VERSION (var)].value;
if (val == NULL_TREE || var == val)
continue;
/* Constants and copies propagated between real and virtual
operands are only possible in the cases handled above. They
should be ignored in any other context. */
if (is_gimple_min_invariant (val) || is_gimple_reg (val))
continue;
propagate_value (vuse, val);
prop_stats.num_copy_prop++;
replaced = true;
}
return replaced;
}
/* Replace propagated values into all the arguments for PHI using the
values from PROP_VALUE. */
@ -1321,17 +1156,11 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
gcc_assert (gsi_stmt (i) == stmt);
}
if (prop_value)
{
/* Only replace real uses if we couldn't fold the
statement using value range information (value range
information is not collected on virtuals, so we only
need to check this for real uses). */
if (!did_replace)
did_replace |= replace_uses_in (stmt, prop_value);
did_replace |= replace_vuses_in (stmt, prop_value);
}
/* Only replace real uses if we couldn't fold the
statement using value range information. */
if (prop_value
&& !did_replace)
did_replace |= replace_uses_in (stmt, prop_value);
/* If we made a replacement, fold and cleanup the statement. */
if (did_replace)

View File

@ -69,16 +69,6 @@ struct prop_value_d {
/* Propagated value. */
tree value;
/* If this value is held in an SSA name for a non-register
variable, this field holds the actual memory reference
associated with this value. This field is taken from
the LHS of the assignment that generated the associated SSA
name. However, in the case of PHI nodes, this field is copied
from the PHI arguments (assuming that all the arguments have
the same memory reference). See replace_vuses_in for a more
detailed description. */
tree mem_ref;
};
typedef struct prop_value_d prop_value_t;
@ -128,10 +118,8 @@ bool valid_gimple_rhs_p (tree);
bool valid_gimple_call_p (tree);
void move_ssa_defining_stmt_for_defs (gimple, gimple);
bool update_call_from_tree (gimple_stmt_iterator *, tree);
tree first_vdef (gimple);
bool stmt_makes_single_load (gimple);
bool stmt_makes_single_store (gimple);
prop_value_t *get_value_loaded_by (gimple, prop_value_t *);
bool substitute_and_fold (prop_value_t *, bool);
#endif /* _TREE_SSA_PROPAGATE_H */