diff --git a/gcc/ChangeLog b/gcc/ChangeLog index faeade76f98..f35a7a7b6bf 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,73 @@ +2010-03-16 Jakub Jelinek + + PR debug/43051 + PR debug/43092 + * cselib.c (cselib_preserve_constants, + cfa_base_preserved_val): New static variables. + (preserve_only_constants): New function. + (cselib_reset_table): If cfa_base_preserved_val is non-NULL, don't + clear its REG_VALUES. If cselib_preserve_constants, don't + empty the whole hash table, but preserve there VALUEs with constants, + cfa_base_preserved_val and cfa_base_preserved_val plus constant. + (cselib_preserve_cfa_base_value): New function. + (cselib_invalidate_regno): Don't invalidate cfa_base_preserved_val. + (cselib_init): Change argument to int bitfield. Set + cselib_preserve_constants to whether CSELIB_PRESERVE_CONSTANTS + is in it. + (cselib_finish): Clear cselib_preserve_constants and + cfa_base_preserved_val. + * cselib.h (enum cselib_record_what): New enum. + (cselib_init): Change argument to int. + (cselib_preserve_cfa_base_value): New prototype. + * postreload.c (reload_cse_regs_1): Adjust cselib_init caller. + * dse.c (dse_step1): Likewise. + * cfgcleanup.c (thread_jump): Likewise. + * sched-deps.c (sched_analyze): Likewise. + * gcse.c (local_cprop_pass): Likewise. + * simplify-rtx.c (simplify_replace_fn_rtx): Add argument to callback. + If FN is non-NULL, call the callback always and whenever it returns + non-NULL just return that. Only do rtx_equal_p if FN is NULL. + * rtl.h (simplify_replace_fn_rtx): Add argument to callback. + * combine.c (propagate_for_debug_subst): Add old_rtx argument, + compare from with old_rtx and if it isn't rtx_equal_p, return NULL. + * Makefile.in (var-tracking.o): Depend on $(RECOG_H). + * var-tracking.c: Include recog.h. + (bb_stack_adjust_offset): Remove. + (vt_stack_adjustments): Don't call it, instead just gather the + adjustments using insn_stack_adjust_offset_pre_post on each bb insn. + (adjust_stack_reference): Remove. + (compute_cfa_pointer): New function. + (hard_frame_pointer_adjustment, cfa_base_rtx): New static variables. + (struct adjust_mem_data): New type. + (adjust_mems, adjust_mem_uses, adjust_mem_stores, adjust_insn): New + functions. + (get_address_mode): New function. + (replace_expr_with_values): Use it. + (use_type): Don't do cselib_lookup for VAR_LOC_UNKNOWN_P. + Use get_address_mode. For cfa_base_rtx return MO_CLOBBER. + (adjust_sets): Remove. + (add_uses): Don't add extra MO_VAL_USE for cfa_base_rtx plus constant. + Use get_address_mode. + (get_adjusted_src): Remove. + (add_stores): Don't call it. Never reuse expr SET. Don't add extra + MO_VAL_USE for cfa_base_rtx plus constant. Use get_address_mode. + (add_with_sets): Don't call adjust_sets. + (fp_setter, vt_init_cfa_base): New functions. + (vt_initialize): Change return type to bool. Move most of pool etc. + initialization to the beginning of the function from end. Pass + CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS to cselib_init. + If !frame_pointer_needed, call vt_stack_adjustment before mos + vector is filled, call vt_init_cfa_base if argp/framep has been + eliminated to sp. If frame_pointer_needed and argp/framep has + been eliminated to hard frame pointer, set + hard_frame_pointer_adjustment and call vt_init_cfa_base after + encountering fp setter in the prologue. For MO_ADJUST, call + log_op_type before pusing the op into mos vector, not afterwards. + Call adjust_insn before cselib_process_insn/add_with_sets, + call cancel_changes (0) afterwards. + (variable_tracking_main_1): Adjust for vt_initialize calling + vt_stack_adjustments and returning whether it succeeded or not. + 2010-03-15 Aldy Hernandez * graphite-sese-to-poly.c (rewrite_cross_bb_scalar_deps): Skip diff --git a/gcc/Makefile.in b/gcc/Makefile.in index 9a9ca45b324..448050576e5 100644 --- a/gcc/Makefile.in +++ b/gcc/Makefile.in @@ -3032,7 +3032,8 @@ var-tracking.o : var-tracking.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \ $(RTL_H) $(TREE_H) hard-reg-set.h insn-config.h reload.h $(FLAGS_H) \ $(BASIC_BLOCK_H) output.h sbitmap.h alloc-pool.h $(FIBHEAP_H) $(HASHTAB_H) \ $(REGS_H) $(EXPR_H) $(TIMEVAR_H) $(TREE_PASS_H) $(TREE_FLOW_H) \ - cselib.h $(TARGET_H) $(TOPLEV_H) $(PARAMS_H) $(DIAGNOSTIC_H) pointer-set.h + cselib.h $(TARGET_H) $(TOPLEV_H) $(PARAMS_H) $(DIAGNOSTIC_H) pointer-set.h \ + $(RECOG_H) profile.o : profile.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \ $(TREE_H) $(FLAGS_H) output.h $(REGS_H) $(EXPR_H) $(FUNCTION_H) \ $(TOPLEV_H) $(COVERAGE_H) $(TREE_FLOW_H) value-prof.h cfghooks.h \ diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c index ffe36e45a9e..01b165a701a 100644 --- a/gcc/cfgcleanup.c +++ b/gcc/cfgcleanup.c @@ -1,6 +1,6 @@ /* Control flow optimization code for GNU compiler. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -337,7 +337,7 @@ thread_jump (edge e, basic_block b) return NULL; } - cselib_init (false); + cselib_init (0); /* First process all values computed in the source basic block. */ for (insn = NEXT_INSN (BB_HEAD (e->src)); diff --git a/gcc/combine.c b/gcc/combine.c index e5d4c5aafc2..f4f2fa06fdb 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -1,6 +1,6 @@ /* Optimize by combining instructions for GNU compiler. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -2286,10 +2286,12 @@ struct rtx_subst_pair substituted. */ static rtx -propagate_for_debug_subst (rtx from ATTRIBUTE_UNUSED, void *data) +propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data) { struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data; + if (!rtx_equal_p (from, old_rtx)) + return NULL_RTX; if (!pair->adjusted) { pair->adjusted = true; diff --git a/gcc/cselib.c b/gcc/cselib.c index f63ea3f5f1c..9073b9928be 100644 --- a/gcc/cselib.c +++ b/gcc/cselib.c @@ -45,6 +45,7 @@ along with GCC; see the file COPYING3. If not see #include "target.h" static bool cselib_record_memory; +static bool cselib_preserve_constants; static int entry_and_rtx_equal_p (const void *, const void *); static hashval_t get_value_hash (const void *); static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); @@ -135,6 +136,11 @@ static int values_became_useless; presence in the list by checking the next pointer. */ static cselib_val dummy_val; +/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx + that is constant through the whole function and should never be + eliminated. */ +static cselib_val *cfa_base_preserved_val; + /* Used to list all values that contain memory reference. May or may not contain the useless values - the list is compacted each time memory is invalidated. */ @@ -229,6 +235,35 @@ cselib_clear_table (void) cselib_reset_table (1); } +/* Remove from hash table all VALUEs except constants. */ + +static int +preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED) +{ + cselib_val *v = (cselib_val *)*x; + + if (v->locs != NULL + && v->locs->next == NULL) + { + if (CONSTANT_P (v->locs->loc) + && (GET_CODE (v->locs->loc) != CONST + || !references_value_p (v->locs->loc, 0))) + return 1; + if (cfa_base_preserved_val) + { + if (v == cfa_base_preserved_val) + return 1; + if (GET_CODE (v->locs->loc) == PLUS + && CONST_INT_P (XEXP (v->locs->loc, 1)) + && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx) + return 1; + } + } + + htab_clear_slot (cselib_hash_table, x); + return 1; +} + /* Remove all entries from the hash table, arranging for the next value to be numbered NUM. */ @@ -237,15 +272,37 @@ cselib_reset_table (unsigned int num) { unsigned int i; - for (i = 0; i < n_used_regs; i++) - REG_VALUES (used_regs[i]) = 0; - max_value_regs = 0; - n_used_regs = 0; + if (cfa_base_preserved_val) + { + unsigned int regno = REGNO (cfa_base_preserved_val->locs->loc); + unsigned int new_used_regs = 0; + for (i = 0; i < n_used_regs; i++) + if (used_regs[i] == regno) + { + new_used_regs = 1; + continue; + } + else + REG_VALUES (used_regs[i]) = 0; + gcc_assert (new_used_regs == 1); + n_used_regs = new_used_regs; + used_regs[0] = regno; + max_value_regs + = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)]; + } + else + { + for (i = 0; i < n_used_regs; i++) + REG_VALUES (used_regs[i]) = 0; + n_used_regs = 0; + } - /* ??? Preserve constants? */ - htab_empty (cselib_hash_table); + if (cselib_preserve_constants) + htab_traverse (cselib_hash_table, preserve_only_constants, NULL); + else + htab_empty (cselib_hash_table); n_useless_values = 0; @@ -434,6 +491,18 @@ cselib_preserved_value_p (cselib_val *v) return PRESERVED_VALUE_P (v->val_rtx); } +/* Arrange for a REG value to be assumed constant through the whole function, + never invalidated and preserved across cselib_reset_table calls. */ + +void +cselib_preserve_cfa_base_value (cselib_val *v) +{ + if (cselib_preserve_constants + && v->locs + && REG_P (v->locs->loc)) + cfa_base_preserved_val = v; +} + /* Clean all non-constant expressions in the hash table, but retain their values. */ @@ -1600,7 +1669,7 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) if (i < FIRST_PSEUDO_REGISTER && v != NULL) this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1; - if (this_last < regno || v == NULL) + if (this_last < regno || v == NULL || v == cfa_base_preserved_val) { l = &(*l)->next; continue; @@ -2018,7 +2087,7 @@ cselib_process_insn (rtx insn) init_alias_analysis. */ void -cselib_init (bool record_memory) +cselib_init (int record_what) { elt_list_pool = create_alloc_pool ("elt_list", sizeof (struct elt_list), 10); @@ -2027,7 +2096,8 @@ cselib_init (bool record_memory) cselib_val_pool = create_alloc_pool ("cselib_val_list", sizeof (cselib_val), 10); value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100); - cselib_record_memory = record_memory; + cselib_record_memory = record_what & CSELIB_RECORD_MEMORY; + cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS; /* (mem:BLK (scratch)) is a special mechanism to conflict with everything, see canon_true_dependence. This is only created once. */ @@ -2061,6 +2131,8 @@ void cselib_finish (void) { cselib_discard_hook = NULL; + cselib_preserve_constants = false; + cfa_base_preserved_val = NULL; free_alloc_pool (elt_list_pool); free_alloc_pool (elt_loc_list_pool); free_alloc_pool (cselib_val_pool); diff --git a/gcc/cselib.h b/gcc/cselib.h index 67e59920254..015d628bf9e 100644 --- a/gcc/cselib.h +++ b/gcc/cselib.h @@ -66,12 +66,18 @@ struct cselib_set cselib_val *dest_addr_elt; }; +enum cselib_record_what +{ + CSELIB_RECORD_MEMORY = 1, + CSELIB_PRESERVE_CONSTANTS = 2 +}; + extern void (*cselib_discard_hook) (cselib_val *); extern void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets, int n_sets); extern cselib_val *cselib_lookup (rtx, enum machine_mode, int); -extern void cselib_init (bool record_memory); +extern void cselib_init (int); extern void cselib_clear_table (void); extern void cselib_finish (void); extern void cselib_process_insn (rtx); @@ -92,5 +98,6 @@ extern unsigned int cselib_get_next_uid (void); extern void cselib_preserve_value (cselib_val *); extern bool cselib_preserved_value_p (cselib_val *); extern void cselib_preserve_only_values (void); +extern void cselib_preserve_cfa_base_value (cselib_val *); extern void dump_cselib_table (FILE *); diff --git a/gcc/dse.c b/gcc/dse.c index 239d0fbe1ab..2be8a942c75 100644 --- a/gcc/dse.c +++ b/gcc/dse.c @@ -1,5 +1,6 @@ /* RTL dead store elimination. - Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. + Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 + Free Software Foundation, Inc. Contributed by Richard Sandiford and Kenneth Zadeck @@ -2616,7 +2617,7 @@ dse_step1 (void) basic_block bb; bitmap regs_live = BITMAP_ALLOC (NULL); - cselib_init (false); + cselib_init (0); all_blocks = BITMAP_ALLOC (NULL); bitmap_set_bit (all_blocks, ENTRY_BLOCK); bitmap_set_bit (all_blocks, EXIT_BLOCK); diff --git a/gcc/gcse.c b/gcc/gcse.c index 432e34d478e..8e31ee11a58 100644 --- a/gcc/gcse.c +++ b/gcc/gcse.c @@ -1,7 +1,7 @@ /* Global common subexpression elimination/Partial redundancy elimination and global constant/copy propagation for GNU compiler. Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, - 2006, 2007, 2008, 2009 Free Software Foundation, Inc. + 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -2724,7 +2724,7 @@ local_cprop_pass (void) struct reg_use *reg_used; bool changed = false; - cselib_init (false); + cselib_init (0); FOR_EACH_BB (bb) { FOR_BB_INSNS (bb, insn) diff --git a/gcc/postreload.c b/gcc/postreload.c index f8785821f63..8b622a7365a 100644 --- a/gcc/postreload.c +++ b/gcc/postreload.c @@ -1,7 +1,7 @@ /* Perform simple optimizations to clean up the result of reload. Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 - Free Software Foundation, Inc. + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, + 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -198,7 +198,7 @@ reload_cse_regs_1 (rtx first) rtx insn; rtx testreg = gen_rtx_REG (VOIDmode, -1); - cselib_init (true); + cselib_init (CSELIB_RECORD_MEMORY); init_alias_analysis (); for (insn = first; insn; insn = NEXT_INSN (insn)) diff --git a/gcc/rtl.h b/gcc/rtl.h index 1748e738527..b8563b33d16 100644 --- a/gcc/rtl.h +++ b/gcc/rtl.h @@ -1778,7 +1778,7 @@ extern rtx simplify_subreg (enum machine_mode, rtx, enum machine_mode, extern rtx simplify_gen_subreg (enum machine_mode, rtx, enum machine_mode, unsigned int); extern rtx simplify_replace_fn_rtx (rtx, const_rtx, - rtx (*fn) (rtx, void *), void *); + rtx (*fn) (rtx, const_rtx, void *), void *); extern rtx simplify_replace_rtx (rtx, const_rtx, rtx); extern rtx simplify_rtx (const_rtx); extern rtx avoid_constant_pool_reference (rtx); diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c index 02de5f8e820..40ebc5654de 100644 --- a/gcc/sched-deps.c +++ b/gcc/sched-deps.c @@ -1,7 +1,7 @@ /* Instruction scheduling pass. This file computes dependencies between instructions. Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. Contributed by Michael Tiemann (tiemann@cygnus.com) Enhanced by, and currently maintained by, Jim Wilson (wilson@cygnus.com) @@ -3383,7 +3383,7 @@ sched_analyze (struct deps *deps, rtx head, rtx tail) rtx insn; if (sched_deps_info->use_cselib) - cselib_init (true); + cselib_init (CSELIB_RECORD_MEMORY); deps_start_bb (deps, head); diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index 5ad5b1aceb9..d189d7066f2 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -350,15 +350,14 @@ simplify_gen_relational (enum rtx_code code, enum machine_mode mode, return gen_rtx_fmt_ee (code, mode, op0, op1); } -/* Replace all occurrences of OLD_RTX in X with FN (X', DATA), where X' - is an expression in X that is equal to OLD_RTX. Canonicalize and - simplify the result. - - If FN is null, assume FN (X', DATA) == copy_rtx (DATA). */ +/* If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA) + and simplify the result. If FN is non-NULL, call this callback on each + X, if it returns non-NULL, replace X with its return value and simplify the + result. */ rtx simplify_replace_fn_rtx (rtx x, const_rtx old_rtx, - rtx (*fn) (rtx, void *), void *data) + rtx (*fn) (rtx, const_rtx, void *), void *data) { enum rtx_code code = GET_CODE (x); enum machine_mode mode = GET_MODE (x); @@ -368,17 +367,14 @@ simplify_replace_fn_rtx (rtx x, const_rtx old_rtx, rtvec vec, newvec; int i, j; - /* If X is OLD_RTX, return FN (X, DATA), with a null FN. Otherwise, - if this is an expression, try to build a new expression, substituting - recursively. If we can't do anything, return our input. */ - - if (rtx_equal_p (x, old_rtx)) + if (__builtin_expect (fn != NULL, 0)) { - if (fn) - return fn (x, data); - else - return copy_rtx ((rtx) data); + newx = fn (x, old_rtx, data); + if (newx) + return newx; } + else if (rtx_equal_p (x, old_rtx)) + return copy_rtx ((rtx) data); switch (GET_RTX_CLASS (code)) { diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index 3f609c3f769..dfe0246adb9 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,8 @@ +2010-03-16 Jakub Jelinek + + PR debug/43051 + * gcc.dg/guality/pr43051-1.c: New test. + 2010-03-15 Janis Johnson PR testsuite/43363 diff --git a/gcc/testsuite/gcc.dg/guality/pr43051-1.c b/gcc/testsuite/gcc.dg/guality/pr43051-1.c new file mode 100644 index 00000000000..77325c97e26 --- /dev/null +++ b/gcc/testsuite/gcc.dg/guality/pr43051-1.c @@ -0,0 +1,57 @@ +/* PR debug/43051 */ +/* { dg-do run } */ +/* { dg-options "-g" } */ + +extern void abort (void); + +static void __attribute__ ((noinline)) +foo (const char *x, long long y, int z) +{ + asm volatile ("" : : "r" (x), "r" ((int) y), "r" (z) : "memory"); +} + +struct S +{ + struct S *n; + int v; +}; + +struct S a[10]; + +struct S * __attribute__ ((noinline)) +bar (struct S *c, int v, struct S *e) +{ +#ifdef __i386__ + register int si asm ("esi"), di asm ("edi"), bx +# if !defined (__pic__) && !defined (__APPLE__) + asm ("ebx") +# endif + ; + asm volatile ("" : "=r" (si), "=r" (di), "=r" (bx)); +#endif + while (c < e) + { + foo ("c", (__UINTPTR_TYPE__) c, 0); /* { dg-final { gdb-test 34 "c" "\&a\[0\]" } } */ + foo ("v", v, 1); /* { dg-final { gdb-test 35 "v" "1" } } */ + foo ("e", (__UINTPTR_TYPE__) e, 2); /* { dg-final { gdb-test 36 "e" "\&a\[1\]" } } */ + if (c->v == v) + return c; + foo ("c", (__UINTPTR_TYPE__) c, 3); /* { dg-final { gdb-test 39 "c" "\&a\[0\]" } } */ + foo ("v", v, 4); /* { dg-final { gdb-test 40 "v" "1" } } */ + foo ("e", (__UINTPTR_TYPE__) e, 5); /* { dg-final { gdb-test 41 "e" "\&a\[1\]" } } */ + c++; + } +#ifdef __i386__ + asm volatile ("" : : "r" (si), "r" (di), "r" (bx)); +#endif + return 0; +} + +int +main () +{ + asm volatile ("" : : "r" (&a[0]) : "memory"); + if (bar (&a[a[0].v], a[0].v + 1, &a[a[0].v + 1])) + abort (); + return 0; +} diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 430e16830a8..362a8815174 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -113,6 +113,7 @@ #include "params.h" #include "diagnostic.h" #include "pointer-set.h" +#include "recog.h" /* var-tracking.c assumes that tree code with the same value as VALUE rtx code has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. @@ -405,9 +406,8 @@ static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); -static void bb_stack_adjust_offset (basic_block); static bool vt_stack_adjustments (void); -static rtx adjust_stack_reference (rtx, HOST_WIDE_INT); +static rtx compute_cfa_pointer (HOST_WIDE_INT); static hashval_t variable_htab_hash (const void *); static int variable_htab_eq (const void *, const void *); static void variable_htab_free (void *); @@ -490,7 +490,7 @@ static void vt_emit_notes (void); static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *); static void vt_add_function_parameters (void); -static void vt_initialize (void); +static bool vt_initialize (void); static void vt_finalize (void); /* Given a SET, calculate the amount of stack adjustment it contains @@ -617,29 +617,6 @@ insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre, } } -/* Compute stack adjustment in basic block BB. */ - -static void -bb_stack_adjust_offset (basic_block bb) -{ - HOST_WIDE_INT offset; - unsigned int i; - micro_operation *mo; - - offset = VTI (bb)->in.stack_adjust; - for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++) - { - if (mo->type == MO_ADJUST) - offset += mo->u.adjust; - else if (mo->type != MO_CALL) - { - if (MEM_P (mo->u.loc)) - mo->u.loc = adjust_stack_reference (mo->u.loc, -offset); - } - } - VTI (bb)->out.stack_adjust = offset; -} - /* Compute stack adjustments for all blocks by traversing DFS tree. Return true when the adjustments on all incoming edges are consistent. Heavily borrowed from pre_and_rev_post_order_compute. */ @@ -652,6 +629,7 @@ vt_stack_adjustments (void) /* Initialize entry block. */ VTI (ENTRY_BLOCK_PTR)->visited = true; + VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET; VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET; /* Allocate stack for back-tracking up CFG. */ @@ -675,9 +653,22 @@ vt_stack_adjustments (void) /* Check if the edge destination has been visited yet. */ if (!VTI (dest)->visited) { + rtx insn; + HOST_WIDE_INT pre, post, offset; VTI (dest)->visited = true; - VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust; - bb_stack_adjust_offset (dest); + VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; + + if (dest != EXIT_BLOCK_PTR) + for (insn = BB_HEAD (dest); + insn != NEXT_INSN (BB_END (dest)); + insn = NEXT_INSN (insn)) + if (INSN_P (insn)) + { + insn_stack_adjust_offset_pre_post (insn, &pre, &post); + offset += pre + post; + } + + VTI (dest)->out.stack_adjust = offset; if (EDGE_COUNT (dest->succs) > 0) /* Since the DEST node has been visited for the first @@ -706,13 +697,12 @@ vt_stack_adjustments (void) return true; } -/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative - to the argument pointer. Return the new rtx. */ +/* Compute a CFA-based value for the stack pointer. */ static rtx -adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment) +compute_cfa_pointer (HOST_WIDE_INT adjustment) { - rtx addr, cfa, tmp; + rtx cfa; #ifdef FRAME_POINTER_CFA_OFFSET adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl); @@ -722,12 +712,216 @@ adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment) cfa = plus_constant (arg_pointer_rtx, adjustment); #endif - addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa); - tmp = simplify_rtx (addr); - if (tmp) - addr = tmp; + return cfa; +} - return replace_equiv_address_nv (mem, addr); +/* Adjustment for hard_frame_pointer_rtx to cfa base reg, + or -1 if the replacement shouldn't be done. */ +static HOST_WIDE_INT hard_frame_pointer_adjustment = -1; + +/* Data for adjust_mems callback. */ + +struct adjust_mem_data +{ + bool store; + enum machine_mode mem_mode; + HOST_WIDE_INT stack_adjust; + rtx side_effects; +}; + +/* Helper function for adjusting used MEMs. */ + +static rtx +adjust_mems (rtx loc, const_rtx old_rtx, void *data) +{ + struct adjust_mem_data *amd = (struct adjust_mem_data *) data; + rtx mem, addr = loc, tem; + enum machine_mode mem_mode_save; + bool store_save; + switch (GET_CODE (loc)) + { + case REG: + /* Don't do any sp or fp replacements outside of MEM addresses. */ + if (amd->mem_mode == VOIDmode) + return loc; + if (loc == stack_pointer_rtx + && !frame_pointer_needed) + return compute_cfa_pointer (amd->stack_adjust); + else if (loc == hard_frame_pointer_rtx + && frame_pointer_needed + && hard_frame_pointer_adjustment != -1) + return compute_cfa_pointer (hard_frame_pointer_adjustment); + return loc; + case MEM: + mem = loc; + if (!amd->store) + { + mem = targetm.delegitimize_address (mem); + if (mem != loc && !MEM_P (mem)) + return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data); + } + + addr = XEXP (mem, 0); + mem_mode_save = amd->mem_mode; + amd->mem_mode = GET_MODE (mem); + store_save = amd->store; + amd->store = false; + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + amd->store = store_save; + amd->mem_mode = mem_mode_save; + if (mem == loc) + addr = targetm.delegitimize_address (addr); + if (addr != XEXP (mem, 0)) + mem = replace_equiv_address_nv (mem, addr); + if (!amd->store) + mem = avoid_constant_pool_reference (mem); + return mem; + case PRE_INC: + case PRE_DEC: + addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), + GEN_INT (GET_CODE (loc) == PRE_INC + ? GET_MODE_SIZE (amd->mem_mode) + : -GET_MODE_SIZE (amd->mem_mode))); + case POST_INC: + case POST_DEC: + if (addr == loc) + addr = XEXP (loc, 0); + gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), + GEN_INT ((GET_CODE (loc) == PRE_INC + || GET_CODE (loc) == POST_INC) + ? GET_MODE_SIZE (amd->mem_mode) + : -GET_MODE_SIZE (amd->mem_mode))); + amd->side_effects = alloc_EXPR_LIST (0, + gen_rtx_SET (VOIDmode, + XEXP (loc, 0), + tem), + amd->side_effects); + return addr; + case PRE_MODIFY: + addr = XEXP (loc, 1); + case POST_MODIFY: + if (addr == loc) + addr = XEXP (loc, 0); + gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + amd->side_effects = alloc_EXPR_LIST (0, + gen_rtx_SET (VOIDmode, + XEXP (loc, 0), + XEXP (loc, 1)), + amd->side_effects); + return addr; + case SUBREG: + /* First try without delegitimization of whole MEMs and + avoid_constant_pool_reference, which is more likely to succeed. */ + store_save = amd->store; + amd->store = true; + addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems, + data); + amd->store = store_save; + mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + if (mem == SUBREG_REG (loc)) + return loc; + tem = simplify_gen_subreg (GET_MODE (loc), mem, + GET_MODE (SUBREG_REG (loc)), + SUBREG_BYTE (loc)); + if (tem) + return tem; + tem = simplify_gen_subreg (GET_MODE (loc), addr, + GET_MODE (SUBREG_REG (loc)), + SUBREG_BYTE (loc)); + if (tem) + return tem; + return gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc)); + default: + break; + } + return NULL_RTX; +} + +/* Helper function for replacement of uses. */ + +static void +adjust_mem_uses (rtx *x, void *data) +{ + rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data); + if (new_x != *x) + validate_change (NULL_RTX, x, new_x, true); +} + +/* Helper function for replacement of stores. */ + +static void +adjust_mem_stores (rtx loc, const_rtx expr, void *data) +{ + if (MEM_P (loc)) + { + rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX, + adjust_mems, data); + if (new_dest != SET_DEST (expr)) + { + rtx xexpr = CONST_CAST_RTX (expr); + validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true); + } + } +} + +/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, + replace them with their value in the insn and add the side-effects + as other sets to the insn. */ + +static void +adjust_insn (basic_block bb, rtx insn) +{ + struct adjust_mem_data amd; + rtx set; + amd.mem_mode = VOIDmode; + amd.stack_adjust = -VTI (bb)->out.stack_adjust; + amd.side_effects = NULL_RTX; + + amd.store = true; + note_stores (PATTERN (insn), adjust_mem_stores, &amd); + + amd.store = false; + note_uses (&PATTERN (insn), adjust_mem_uses, &amd); + + /* For read-only MEMs containing some constant, prefer those + constants. */ + set = single_set (insn); + if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set))) + { + rtx note = find_reg_equal_equiv_note (insn); + + if (note && CONSTANT_P (XEXP (note, 0))) + validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); + } + + if (amd.side_effects) + { + rtx *pat, new_pat, s; + int i, oldn, newn; + + pat = &PATTERN (insn); + if (GET_CODE (*pat) == COND_EXEC) + pat = &COND_EXEC_CODE (*pat); + if (GET_CODE (*pat) == PARALLEL) + oldn = XVECLEN (*pat, 0); + else + oldn = 1; + for (s = amd.side_effects, newn = 0; s; newn++) + s = XEXP (s, 1); + new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); + if (GET_CODE (*pat) == PARALLEL) + for (i = 0; i < oldn; i++) + XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); + else + XVECEXP (new_pat, 0, 0) = *pat; + for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1)) + XVECEXP (new_pat, 0, i) = XEXP (s, 0); + free_EXPR_LIST_list (&amd.side_effects); + validate_change (NULL_RTX, pat, new_pat, true); + } } /* Return true if a decl_or_value DV is a DECL or NULL. */ @@ -4326,6 +4520,10 @@ var_lowpart (enum machine_mode mode, rtx loc) return gen_rtx_REG_offset (loc, mode, regno, offset); } +/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or + hard_frame_pointer_rtx is being mapped to it. */ +static rtx cfa_base_rtx; + /* Carry information about uses and stores while walking rtx. */ struct count_use_info @@ -4371,6 +4569,17 @@ find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui) return NULL; } +/* Helper function to get mode of MEM's address. */ + +static inline enum machine_mode +get_address_mode (rtx mem) +{ + enum machine_mode mode = GET_MODE (XEXP (mem, 0)); + if (mode != VOIDmode) + return mode; + return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); +} + /* Replace all registers and addresses in an expression with VALUE expressions that map back to them, unless the expression is a register. If no mapping is or can be performed, returns NULL. */ @@ -4382,9 +4591,8 @@ replace_expr_with_values (rtx loc) return NULL; else if (MEM_P (loc)) { - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc)); - cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0); + cselib_val *addr = cselib_lookup (XEXP (loc, 0), + get_address_mode (loc), 0); if (addr) return replace_equiv_address_nv (loc, addr->val_rtx); else @@ -4409,12 +4617,15 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false)) { rtx ploc = PAT_VAR_LOCATION_LOC (loc); - cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1); + if (! VAR_LOC_UNKNOWN_P (ploc)) + { + cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1); - /* ??? flag_float_store and volatile mems are never - given values, but we could in theory use them for - locations. */ - gcc_assert (val || 1); + /* ??? flag_float_store and volatile mems are never + given values, but we could in theory use them for + locations. */ + gcc_assert (val || 1); + } return MO_VAL_LOC; } else @@ -4429,7 +4640,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) { if (REG_P (loc) || (find_use_val (loc, GET_MODE (loc), cui) - && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0))) + && cselib_lookup (XEXP (loc, 0), + get_address_mode (loc), 0))) return MO_VAL_SET; } else @@ -4446,6 +4658,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) { gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER); + if (loc == cfa_base_rtx) + return MO_CLOBBER; expr = REG_EXPR (loc); if (!expr) @@ -4490,30 +4704,6 @@ log_op_type (rtx x, basic_block bb, rtx insn, fputc ('\n', out); } -/* Adjust sets if needed. Currently this optimizes read-only MEM loads - if REG_EQUAL/REG_EQUIV note is present. */ - -static void -adjust_sets (rtx insn, struct cselib_set *sets, int n_sets) -{ - if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src)) - { - /* For read-only MEMs containing some constant, prefer those - constants. */ - rtx note = find_reg_equal_equiv_note (insn), src; - - if (note && CONSTANT_P (XEXP (note, 0))) - { - sets[0].src = src = XEXP (note, 0); - if (GET_CODE (PATTERN (insn)) == COND_EXEC) - src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest), - COND_EXEC_TEST (PATTERN (insn)), - src, sets[0].dest); - sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1); - } - } -} - /* Tell whether the CONCAT used to holds a VALUE and its location needs value resolution, i.e., an attempt of mapping the location back to other incoming values. */ @@ -4577,11 +4767,14 @@ add_uses (rtx *ploc, void *data) gcc_assert (cui->sets); if (MEM_P (vloc) - && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0))) + && !REG_P (XEXP (vloc, 0)) + && !MEM_P (XEXP (vloc, 0)) + && (GET_CODE (XEXP (vloc, 0)) != PLUS + || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1)))) { rtx mloc = vloc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); + enum machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); @@ -4646,11 +4839,14 @@ add_uses (rtx *ploc, void *data) gcc_assert (cui->sets); if (MEM_P (oloc) - && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0))) + && !REG_P (XEXP (oloc, 0)) + && !MEM_P (XEXP (oloc, 0)) + && (GET_CODE (XEXP (oloc, 0)) != PLUS + || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1)))) { rtx mloc = oloc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); + enum machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); @@ -4814,21 +5010,6 @@ reverse_op (rtx val, const_rtx expr) return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret); } -/* Return SRC, or, if it is a read-only MEM for which adjust_sets - replated it with a constant from REG_EQUIV/REG_EQUAL note, - that constant. */ - -static inline rtx -get_adjusted_src (struct count_use_info *cui, rtx src) -{ - if (cui->n_sets == 1 - && MEM_P (src) - && MEM_READONLY_P (src) - && CONSTANT_P (cui->sets[0].src)) - return cui->sets[0].src; - return src; -} - /* Add stores (register and memory references) LOC which will be tracked to VTI (bb)->mos. EXPR is the RTL expression containing the store. CUIP->insn is instruction which the LOC is part of. */ @@ -4854,6 +5035,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) if (REG_P (loc)) { + gcc_assert (loc != cfa_base_rtx); if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET) || !(track_p = use_type (loc, NULL, &mode2) == MO_USE) || GET_CODE (expr) == CLOBBER) @@ -4864,10 +5046,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) else { if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) - { - src = get_adjusted_src (cui, SET_SRC (expr)); - src = var_lowpart (mode2, src); - } + src = var_lowpart (mode2, SET_SRC (expr)); loc = var_lowpart (mode2, loc); if (src == NULL) @@ -4877,10 +5056,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else { - rtx xexpr = CONST_CAST_RTX (expr); - - if (SET_SRC (expr) != src) - xexpr = gen_rtx_SET (VOIDmode, loc, src); + rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc))) mo.type = MO_COPY; else @@ -4895,12 +5071,16 @@ add_stores (rtx loc, const_rtx expr, void *cuip) || cui->sets)) { if (MEM_P (loc) && type == MO_VAL_SET - && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0))) + && !REG_P (XEXP (loc, 0)) + && !MEM_P (XEXP (loc, 0)) + && (GET_CODE (XEXP (loc, 0)) != PLUS + || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (loc, 0), 1)))) { rtx mloc = loc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); - cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); + enum machine_mode address_mode = get_address_mode (mloc); + cselib_val *val = cselib_lookup (XEXP (mloc, 0), + address_mode, 0); if (val && !cselib_preserved_value_p (val)) { @@ -4924,10 +5104,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) else { if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) - { - src = get_adjusted_src (cui, SET_SRC (expr)); - src = var_lowpart (mode2, src); - } + src = var_lowpart (mode2, SET_SRC (expr)); loc = var_lowpart (mode2, loc); if (src == NULL) @@ -4937,10 +5114,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else { - rtx xexpr = CONST_CAST_RTX (expr); - - if (SET_SRC (expr) != src) - xexpr = gen_rtx_SET (VOIDmode, loc, src); + rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); if (same_variable_part_p (SET_SRC (xexpr), MEM_EXPR (loc), INT_MEM_OFFSET (loc))) @@ -4997,13 +5171,12 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else if (resolve && GET_CODE (mo.u.loc) == SET) { - src = get_adjusted_src (cui, SET_SRC (expr)); - nloc = replace_expr_with_values (src); + nloc = replace_expr_with_values (SET_SRC (expr)); /* Avoid the mode mismatch between oexpr and expr. */ if (!nloc && mode != mode2) { - nloc = src; + nloc = SET_SRC (expr); gcc_assert (oloc == SET_DEST (expr)); } @@ -5102,8 +5275,6 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets) cselib_hook_called = true; - adjust_sets (insn, sets, n_sets); - cui.insn = insn; cui.bb = bb; cui.sets = sets; @@ -6690,7 +6861,7 @@ emit_note_insn_var_location (void **varp, void *data) complete = true; last_limit = 0; n_var_parts = 0; - if (!MAY_HAVE_DEBUG_STMTS) + if (!MAY_HAVE_DEBUG_INSNS) { for (i = 0; i < var->n_var_parts; i++) if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) @@ -7688,124 +7859,69 @@ vt_add_function_parameters (void) } +/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ + +static bool +fp_setter (rtx insn) +{ + rtx pat = PATTERN (insn); + if (RTX_FRAME_RELATED_P (insn)) + { + rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); + if (expr) + pat = XEXP (expr, 0); + } + if (GET_CODE (pat) == SET) + return SET_DEST (pat) == hard_frame_pointer_rtx; + else if (GET_CODE (pat) == PARALLEL) + { + int i; + for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) + if (GET_CODE (XVECEXP (pat, 0, i)) == SET + && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx) + return true; + } + return false; +} + +/* Initialize cfa_base_rtx, create a preserved VALUE for it and + ensure it isn't flushed during cselib_reset_table. + Can be called only if frame_pointer_rtx resp. arg_pointer_rtx + has been eliminated. */ + +static void +vt_init_cfa_base (void) +{ + cselib_val *val; + +#ifdef FRAME_POINTER_CFA_OFFSET + cfa_base_rtx = frame_pointer_rtx; +#else + cfa_base_rtx = arg_pointer_rtx; +#endif + if (!MAY_HAVE_DEBUG_INSNS) + return; + + val = cselib_lookup (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1); + preserve_value (val); + cselib_preserve_cfa_base_value (val); + val->locs->setting_insn = get_insns (); + var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx, + VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx), + 0, NULL_RTX, INSERT); +} + /* Allocate and initialize the data structures for variable tracking and parse the RTL to get the micro operations. */ -static void +static bool vt_initialize (void) { - basic_block bb; + basic_block bb, prologue_bb = NULL; + HOST_WIDE_INT fp_cfa_offset = -1; alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def)); - if (MAY_HAVE_DEBUG_INSNS) - { - cselib_init (true); - scratch_regs = BITMAP_ALLOC (NULL); - valvar_pool = create_alloc_pool ("small variable_def pool", - sizeof (struct variable_def), 256); - preserved_values = VEC_alloc (rtx, heap, 256); - } - else - { - scratch_regs = NULL; - valvar_pool = NULL; - } - - FOR_EACH_BB (bb) - { - rtx insn; - HOST_WIDE_INT pre, post = 0; - basic_block first_bb, last_bb; - - if (MAY_HAVE_DEBUG_INSNS) - { - cselib_record_sets_hook = add_with_sets; - if (dump_file && (dump_flags & TDF_DETAILS)) - fprintf (dump_file, "first value: %i\n", - cselib_get_next_uid ()); - } - - first_bb = bb; - for (;;) - { - edge e; - if (bb->next_bb == EXIT_BLOCK_PTR - || ! single_pred_p (bb->next_bb)) - break; - e = find_edge (bb, bb->next_bb); - if (! e || (e->flags & EDGE_FALLTHRU) == 0) - break; - bb = bb->next_bb; - } - last_bb = bb; - - /* Add the micro-operations to the vector. */ - FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb) - { - for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); - insn = NEXT_INSN (insn)) - { - if (INSN_P (insn)) - { - if (!frame_pointer_needed) - { - insn_stack_adjust_offset_pre_post (insn, &pre, &post); - if (pre) - { - micro_operation mo; - mo.type = MO_ADJUST; - mo.u.adjust = pre; - mo.insn = insn; - VEC_safe_push (micro_operation, heap, VTI (bb)->mos, - &mo); - - if (dump_file && (dump_flags & TDF_DETAILS)) - log_op_type (PATTERN (insn), bb, insn, - MO_ADJUST, dump_file); - } - } - - cselib_hook_called = false; - if (MAY_HAVE_DEBUG_INSNS) - { - cselib_process_insn (insn); - if (dump_file && (dump_flags & TDF_DETAILS)) - { - print_rtl_single (dump_file, insn); - dump_cselib_table (dump_file); - } - } - if (!cselib_hook_called) - add_with_sets (insn, 0, 0); - - if (!frame_pointer_needed && post) - { - micro_operation mo; - mo.type = MO_ADJUST; - mo.u.adjust = post; - mo.insn = insn; - VEC_safe_push (micro_operation, heap, VTI (bb)->mos, - &mo); - - if (dump_file && (dump_flags & TDF_DETAILS)) - log_op_type (PATTERN (insn), bb, insn, - MO_ADJUST, dump_file); - } - } - } - } - - bb = last_bb; - - if (MAY_HAVE_DEBUG_INSNS) - { - cselib_preserve_only_values (); - cselib_reset_table (cselib_get_next_uid ()); - cselib_record_sets_hook = NULL; - } - } - attrs_pool = create_alloc_pool ("attrs_def pool", sizeof (struct attrs_def), 1024); var_pool = create_alloc_pool ("variable_def pool", @@ -7843,8 +7959,182 @@ vt_initialize (void) VTI (bb)->permp = NULL; } + if (MAY_HAVE_DEBUG_INSNS) + { + cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); + scratch_regs = BITMAP_ALLOC (NULL); + valvar_pool = create_alloc_pool ("small variable_def pool", + sizeof (struct variable_def), 256); + preserved_values = VEC_alloc (rtx, heap, 256); + } + else + { + scratch_regs = NULL; + valvar_pool = NULL; + } + + if (!frame_pointer_needed) + { + rtx reg, elim; + + if (!vt_stack_adjustments ()) + return false; + +#ifdef FRAME_POINTER_CFA_OFFSET + reg = frame_pointer_rtx; +#else + reg = arg_pointer_rtx; +#endif + elim = eliminate_regs (reg, VOIDmode, NULL_RTX); + if (elim != reg) + { + if (GET_CODE (elim) == PLUS) + elim = XEXP (elim, 0); + if (elim == stack_pointer_rtx) + vt_init_cfa_base (); + } + } + else if (!crtl->stack_realign_tried) + { + rtx reg, elim; + +#ifdef FRAME_POINTER_CFA_OFFSET + reg = frame_pointer_rtx; + fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); +#else + reg = arg_pointer_rtx; + fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); +#endif + elim = eliminate_regs (reg, VOIDmode, NULL_RTX); + if (elim != reg) + { + if (GET_CODE (elim) == PLUS) + { + fp_cfa_offset -= INTVAL (XEXP (elim, 1)); + elim = XEXP (elim, 0); + } + if (elim != hard_frame_pointer_rtx) + fp_cfa_offset = -1; + else + prologue_bb = single_succ (ENTRY_BLOCK_PTR); + } + } + + hard_frame_pointer_adjustment = -1; + + FOR_EACH_BB (bb) + { + rtx insn; + HOST_WIDE_INT pre, post = 0; + basic_block first_bb, last_bb; + + if (MAY_HAVE_DEBUG_INSNS) + { + cselib_record_sets_hook = add_with_sets; + if (dump_file && (dump_flags & TDF_DETAILS)) + fprintf (dump_file, "first value: %i\n", + cselib_get_next_uid ()); + } + + first_bb = bb; + for (;;) + { + edge e; + if (bb->next_bb == EXIT_BLOCK_PTR + || ! single_pred_p (bb->next_bb)) + break; + e = find_edge (bb, bb->next_bb); + if (! e || (e->flags & EDGE_FALLTHRU) == 0) + break; + bb = bb->next_bb; + } + last_bb = bb; + + /* Add the micro-operations to the vector. */ + FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb) + { + HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust; + VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust; + for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); + insn = NEXT_INSN (insn)) + { + if (INSN_P (insn)) + { + if (!frame_pointer_needed) + { + insn_stack_adjust_offset_pre_post (insn, &pre, &post); + if (pre) + { + micro_operation mo; + mo.type = MO_ADJUST; + mo.u.adjust = pre; + mo.insn = insn; + if (dump_file && (dump_flags & TDF_DETAILS)) + log_op_type (PATTERN (insn), bb, insn, + MO_ADJUST, dump_file); + VEC_safe_push (micro_operation, heap, VTI (bb)->mos, + &mo); + VTI (bb)->out.stack_adjust += pre; + } + } + + cselib_hook_called = false; + adjust_insn (bb, insn); + if (MAY_HAVE_DEBUG_INSNS) + { + cselib_process_insn (insn); + if (dump_file && (dump_flags & TDF_DETAILS)) + { + print_rtl_single (dump_file, insn); + dump_cselib_table (dump_file); + } + } + if (!cselib_hook_called) + add_with_sets (insn, 0, 0); + cancel_changes (0); + + if (!frame_pointer_needed && post) + { + micro_operation mo; + mo.type = MO_ADJUST; + mo.u.adjust = post; + mo.insn = insn; + if (dump_file && (dump_flags & TDF_DETAILS)) + log_op_type (PATTERN (insn), bb, insn, + MO_ADJUST, dump_file); + VEC_safe_push (micro_operation, heap, VTI (bb)->mos, + &mo); + VTI (bb)->out.stack_adjust += post; + } + + if (bb == prologue_bb + && hard_frame_pointer_adjustment == -1 + && RTX_FRAME_RELATED_P (insn) + && fp_setter (insn)) + { + vt_init_cfa_base (); + hard_frame_pointer_adjustment = fp_cfa_offset; + } + } + } + gcc_assert (offset == VTI (bb)->out.stack_adjust); + } + + bb = last_bb; + + if (MAY_HAVE_DEBUG_INSNS) + { + cselib_preserve_only_values (); + cselib_reset_table (cselib_get_next_uid ()); + cselib_record_sets_hook = NULL; + } + } + + hard_frame_pointer_adjustment = -1; VTI (ENTRY_BLOCK_PTR)->flooded = true; vt_add_function_parameters (); + cfa_base_rtx = NULL_RTX; + return true; } /* Get rid of all debug insns from the insn stream. */ @@ -7946,15 +8236,11 @@ variable_tracking_main_1 (void) } mark_dfs_back_edges (); - vt_initialize (); - if (!frame_pointer_needed) + if (!vt_initialize ()) { - if (!vt_stack_adjustments ()) - { - vt_finalize (); - vt_debug_insns_local (true); - return 0; - } + vt_finalize (); + vt_debug_insns_local (true); + return 0; } success = vt_find_locations (); @@ -7968,10 +8254,8 @@ variable_tracking_main_1 (void) /* This is later restored by our caller. */ flag_var_tracking_assignments = 0; - vt_initialize (); - - if (!frame_pointer_needed && !vt_stack_adjustments ()) - gcc_unreachable (); + success = vt_initialize (); + gcc_assert (success); success = vt_find_locations (); }