From c75ab022811aa05c49657d6c48471962b48c9666 Mon Sep 17 00:00:00 2001 From: Daniel Berlin Date: Sun, 13 Mar 2005 00:46:07 +0000 Subject: [PATCH] In gcc/ada/ 2005-03-12 Daniel Berlin In gcc/ada/ 2005-03-12 Daniel Berlin * misc.c (gnat_post_options): Turn off structural aliasing for now. In gcc/ 2005-03-12 Daniel Berlin * tree-flow-inline.h (ref_contains_array_ref): New function. (lookup_subvars_for_var): Ditto. (get_subvars_for_var): Ditto. (var_can_have_subvars): Ditto. * tree-flow.h (mem_tag_kind): Add STRUCT_FIELD. (struct subvar): New type. * tree-dfa.c (okay_component_ref_for_subvars): New function. * tree-optimize.c (init_tree_optimization_passes): Call pass_create_structure_vars. * tree-ssa-alias.c: Include vec.h. (init_alias_info): Don't auto-clear call clobbered on struct-field tags. (compute_flow_insensitive_aliasing): Handle subvars. (group_aliases): Handle STRUCT_FIELD aliases. (setup_pointers_and_addressables): Ditto. Don't mark variables non-addressable if they still have addressable subvars. Also mark subvars addressable when the real variable is marked addressable. (add_pointed_to_var): Try to prune the pointed-to set by only pointing to subvars when possible. Otherwise, make sure we set addresses_needed and pt_vars to properly include subvars. (bitpos_of_field): New function. (push_fields_onto_fieldstack): Ditto. (get_or_create_used_part_for): Ditto. (create_overlap_variables_for): Ditto. (find_used_portions): Ditto. (create_structure_vars): Ditto. (pass_create_structure_vars): New structure. * tree-ssa-operands.c (finalize_ssa_v_must_defs): Remove assert. (get_expr_operands): Handle subvars. Also try to turn COMPONENT_REF accesses into must-defs now that we can accurately portray it. (note_addressable): Try to only mark as addressable those subvars we know a COMPONENT_REF touches. (overlap_subvar): New function. * tree-vect-analyze.c (vect_object_analysis): Add new parameter. Handle subvar storing. (vect_address_analysis): Update caller of vect_object_analysis. * tree-vect-transform.c (vect_create_data_ref_ptr): Copy subvars. * tree-vectorizer.h (struct _stmt_vec_info): Add subvars member. (STMT_VINFO_SUBVARS): New macro. * common.opts: add flag_tree_salias. * opts.c (decode_options): flag_tree_salias defaults to on. * doc/invoke.texi: Document fdump-tree-svars and -ftree-salias. * doc/tree-ssa.texi: Document structural alias analysis. From-SVN: r96362 --- gcc/ChangeLog | 62 +++ gcc/ada/ChangeLog | 5 + gcc/ada/misc.c | 2 + gcc/common.opt | 4 + gcc/doc/invoke.texi | 11 + gcc/doc/tree-ssa.texi | 39 +- gcc/opts.c | 1 + gcc/testsuite/gcc.dg/tree-ssa/structopt-1.c | 15 + gcc/testsuite/gcc.dg/tree-ssa/structopt-2.c | 45 ++ gcc/testsuite/gcc.dg/tree-ssa/structopt-3.c | 16 + gcc/tree-dfa.c | 43 ++ gcc/tree-flow-inline.h | 56 ++ gcc/tree-flow.h | 34 +- gcc/tree-optimize.c | 1 + gcc/tree-pass.h | 1 + gcc/tree-ssa-alias.c | 555 +++++++++++++++++++- gcc/tree-ssa-operands.c | 179 +++++-- gcc/tree-vect-analyze.c | 23 +- gcc/tree-vect-transform.c | 4 +- gcc/tree-vectorizer.h | 2 + 20 files changed, 1026 insertions(+), 72 deletions(-) create mode 100644 gcc/testsuite/gcc.dg/tree-ssa/structopt-1.c create mode 100644 gcc/testsuite/gcc.dg/tree-ssa/structopt-2.c create mode 100644 gcc/testsuite/gcc.dg/tree-ssa/structopt-3.c diff --git a/gcc/ChangeLog b/gcc/ChangeLog index f60d4156df8..f5e5829e19d 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,65 @@ +2005-03-12 Daniel Berlin + + * tree-flow-inline.h (ref_contains_array_ref): New function. + (lookup_subvars_for_var): Ditto. + (get_subvars_for_var): Ditto. + (var_can_have_subvars): Ditto. + + * tree-flow.h (mem_tag_kind): Add STRUCT_FIELD. + (struct subvar): New type. + + * tree-dfa.c (okay_component_ref_for_subvars): New function. + + * tree-optimize.c (init_tree_optimization_passes): Call + pass_create_structure_vars. + + * tree-ssa-alias.c: Include vec.h. + (init_alias_info): Don't auto-clear call clobbered on struct-field + tags. + (compute_flow_insensitive_aliasing): Handle subvars. + (group_aliases): Handle STRUCT_FIELD aliases. + (setup_pointers_and_addressables): Ditto. + Don't mark variables non-addressable if they still have + addressable subvars. + Also mark subvars addressable when the real variable is marked + addressable. + (add_pointed_to_var): Try to prune the pointed-to set by only + pointing to subvars when possible. + Otherwise, make sure we set addresses_needed and pt_vars to + properly include subvars. + (bitpos_of_field): New function. + (push_fields_onto_fieldstack): Ditto. + (get_or_create_used_part_for): Ditto. + (create_overlap_variables_for): Ditto. + (find_used_portions): Ditto. + (create_structure_vars): Ditto. + (pass_create_structure_vars): New structure. + + * tree-ssa-operands.c (finalize_ssa_v_must_defs): Remove assert. + (get_expr_operands): Handle subvars. Also try to turn + COMPONENT_REF accesses into must-defs now that we can accurately + portray it. + (note_addressable): Try to only mark as addressable those subvars + we know a COMPONENT_REF touches. + (overlap_subvar): New function. + + * tree-vect-analyze.c (vect_object_analysis): Add new parameter. + Handle subvar storing. + (vect_address_analysis): Update caller of vect_object_analysis. + + * tree-vect-transform.c (vect_create_data_ref_ptr): Copy subvars. + + * tree-vectorizer.h (struct _stmt_vec_info): Add subvars member. + (STMT_VINFO_SUBVARS): New macro. + + * common.opts: add flag_tree_salias. + + * opts.c (decode_options): flag_tree_salias defaults to on. + + * doc/invoke.texi: Document fdump-tree-svars and -ftree-salias. + + * doc/tree-ssa.texi: Document structural alias analysis. + 2005-03-12 Steven Bosscher * tree-cfg.c (make_goto_expr_edges): Don't use error_mark_node. diff --git a/gcc/ada/ChangeLog b/gcc/ada/ChangeLog index a6877578d31..328436cba77 100644 --- a/gcc/ada/ChangeLog +++ b/gcc/ada/ChangeLog @@ -1,3 +1,8 @@ +2005-03-12 Daniel Berlin + + * misc.c (gnat_post_options): Turn off structural + aliasing for now. + 2005-03-08 Laurent Guerby * system-linux-sparc.ads: Fix typo in previous commit. diff --git a/gcc/ada/misc.c b/gcc/ada/misc.c index e63277dc345..81f8249c425 100644 --- a/gcc/ada/misc.c +++ b/gcc/ada/misc.c @@ -352,6 +352,8 @@ gnat_post_options (const char **pfilename ATTRIBUTE_UNUSED) flag_no_inline = 1; if (flag_inline_functions) flag_inline_trees = 2; + + flag_tree_salias = 0; return false; } diff --git a/gcc/common.opt b/gcc/common.opt index b10c0e998e5..1fc2289db23 100644 --- a/gcc/common.opt +++ b/gcc/common.opt @@ -872,6 +872,10 @@ ftree-pre Common Report Var(flag_tree_pre) Enable SSA-PRE optimization on trees +ftree-salias +Common Report Var(flag_tree_salias) +Perform structural alias analysis + ftree-sink Common Report Var(flag_tree_sink) Enable SSA code sinking on trees diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi index a1ef8313dba..c30b161888d 100644 --- a/gcc/doc/invoke.texi +++ b/gcc/doc/invoke.texi @@ -266,6 +266,7 @@ Objective-C and Objective-C++ Dialects}. -fdump-tree-nrv -fdump-tree-vect @gol -fdump-tree-sink @gol -fdump-tree-sra@r{[}-@var{n}@r{]} @gol +-fdump-tree-salias @gol -fdump-tree-fre@r{[}-@var{n}@r{]} @gol -ftree-vectorizer-verbose=@var{n} @gol -feliminate-dwarf2-dups -feliminate-unused-debug-types @gol @@ -322,6 +323,7 @@ Objective-C and Objective-C++ Dialects}. -ftree-loop-linear -ftree-loop-im -ftree-loop-ivcanon -fivopts @gol -ftree-dominator-opts -ftree-dse -ftree-copyrename -ftree-sink @gol -ftree-ch -ftree-sra -ftree-ter -ftree-lrs -ftree-fre -ftree-vectorize @gol +-ftree-salias @gol --param @var{name}=@var{value} -O -O0 -O1 -O2 -O3 -Os} @@ -3819,6 +3821,11 @@ appending @file{.ch} to the source file name. Dump SSA related information to a file. The file name is made by appending @file{.ssa} to the source file name. +@item salias +@opindex fdump-tree-salias +Dump structure aliasing variable information to a file. This file name +is made by appending @file{.salias} to the source file name. + @item alias @opindex fdump-tree-alias Dump aliasing information for each function. The file name is made by @@ -4695,6 +4702,10 @@ that are computed on all paths leading to the redundant computation. This analysis faster than PRE, though it exposes fewer redundancies. This flag is enabled by default at @option{-O} and higher. +@item -ftree-salias +Perform structural alias analysis on trees. This flag +is enabled by default at @option{-O} and higher. + @item -ftree-sink Perform forward store motion on trees. This flag is enabled by default at @option{-O} and higher. diff --git a/gcc/doc/tree-ssa.texi b/gcc/doc/tree-ssa.texi index 6996e834fea..4679912fa9a 100644 --- a/gcc/doc/tree-ssa.texi +++ b/gcc/doc/tree-ssa.texi @@ -1208,9 +1208,46 @@ hooks to execute custom code at various points during traversal: @cindex flow-sensitive alias analysis @cindex flow-insensitive alias analysis -Alias analysis proceeds in 3 main phases: +Alias analysis proceeds in 4 main phases: @enumerate +@item Structural alias analysis. + +This phase walks the types for structure variables, and determines which +of the fields can overlap using offset and size of each field. For each +field, a ``subvariable'' called a ``Structure field tag'' (SFT)@ is +created, which represents that field as a separate variable. All +accesses that could possibly overlap with a given field will have +virtual operands for the SFT of that field. + +@smallexample +struct foo +@{ + int a; + int b; +@} +struct foo temp; +int bar (void) +@{ + int tmp1, tmp2, tmp3; + SFT.0_2 = V_MUST_DEF + temp.a = 5; + SFT.1_4 = V_MUST_DEF + temp.b = 6; + + VUSE + tmp1_5 = temp.b; + VUSE + tmp2_6 = temp.a; + + tmp3_7 = tmp1_5 + tmp2_6; + return tmp3_7; +@} +@end smallexample + +If you copy the type tag for a variable for some reason, you probably +also want to copy the subvariables for that variable. + @item Points-to and escape analysis. This phase walks the use-def chains in the SSA web looking for diff --git a/gcc/opts.c b/gcc/opts.c index 9ab16f0f562..c6940013a50 100644 --- a/gcc/opts.c +++ b/gcc/opts.c @@ -502,6 +502,7 @@ decode_options (unsigned int argc, const char **argv) flag_tree_copyrename = 1; flag_tree_fre = 1; flag_tree_sink = 1; + flag_tree_salias = 1; if (!optimize_size) { diff --git a/gcc/testsuite/gcc.dg/tree-ssa/structopt-1.c b/gcc/testsuite/gcc.dg/tree-ssa/structopt-1.c new file mode 100644 index 00000000000..99abc761ab5 --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/structopt-1.c @@ -0,0 +1,15 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-lim-details" } */ +int x; int y; +struct { int x; int y; } global; +int foo() { + int i; + for ( i=0; i<10; i++) + y += x*x; + for ( i=0; i<10; i++) + global.y += global.x*global.x; +} + +/* { dg-final { scan-tree-dump-times "Executing store motion of global.y" 1 "lim" } } */ +/* XXX: We should also check for the load motion of global.x, but there is no easy way to do this. */ + diff --git a/gcc/testsuite/gcc.dg/tree-ssa/structopt-2.c b/gcc/testsuite/gcc.dg/tree-ssa/structopt-2.c new file mode 100644 index 00000000000..327b54c162c --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/structopt-2.c @@ -0,0 +1,45 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-optimized -fno-tree-sra" } */ + +/* Even without SRA being enabled, we should be able to eliminate every structure store and load here. */ +extern void foo (const int); +int main(void) +{ + struct a + { + int e; + int f; + int g; + } a; + struct a b; + int x, c; + a.e = 50; + a.f = 9; + a.g = a.e * a.f; + foo (a.f); + foo (a.g); + x = a.f; + c = a.e; + foo (x); + foo (c); + a.e = 5; + a.f = 40; + a.g = 90; + foo (a.e); + foo (a.f); + foo (a.g); + c = a.f; + foo (c); + b.e = 9; + a.e = b.e + 1 * c; + a.f = 30; + foo (a.e); + foo (a.f); + x = a.e * a.f; + foo (x); + +} +/* { dg-final { scan-tree-dump-times "a.e" 0 "optimized" } } */ +/* { dg-final { scan-tree-dump-times "a.f" 0 "optimized" } } */ +/* { dg-final { scan-tree-dump-times "a.g" 0 "optimized" } } */ +/* { dg-final { scan-tree-dump-times "b.e" 0 "optimized" } } */ diff --git a/gcc/testsuite/gcc.dg/tree-ssa/structopt-3.c b/gcc/testsuite/gcc.dg/tree-ssa/structopt-3.c new file mode 100644 index 00000000000..793ee24feb9 --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/structopt-3.c @@ -0,0 +1,16 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-optimized" } */ + +struct foo +{ + int a; + int b; +} temp; +/* We should be able to optimize this to return 11. */ +int main(void) +{ + temp.a = 5; + temp.b = 6; + return temp.a + temp.b; +} +/* { dg-final { scan-tree-dump-times "return 11" 1 "optimized" } } */ diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c index 630ee4cbf8d..6bbec8f8d1e 100644 --- a/gcc/tree-dfa.c +++ b/gcc/tree-dfa.c @@ -1053,3 +1053,46 @@ mark_call_clobbered_vars_to_rename (void) bitmap_set_bit (vars_to_rename, var_ann (var)->uid); } } + +/* If REF is a COMPONENT_REF for a structure that can have sub-variables, and + we know where REF is accessing, return the variable in REF that has the + sub-variables. If the return value is not NULL, POFFSET will be the + offset, in bits, of REF inside the return value, and PSIZE will be the + size, in bits, of REF inside the return value. */ + +tree +okay_component_ref_for_subvars (tree ref, HOST_WIDE_INT *poffset, + HOST_WIDE_INT *psize) +{ + tree result = NULL; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; + tree offset; + enum machine_mode mode; + int unsignedp; + int volatilep; + + gcc_assert (!SSA_VAR_P (ref)); + *poffset = 0; + *psize = (unsigned int) -1; + + if (ref_contains_array_ref (ref)) + return result; + ref = get_inner_reference (ref, &bitsize, &bitpos, &offset, &mode, + &unsignedp, &volatilep, false); + if (TREE_CODE (ref) == INDIRECT_REF) + return result; + else if (offset == NULL && bitsize != -1 && SSA_VAR_P (ref)) + { + *poffset = bitpos; + *psize = bitsize; + if (get_subvars_for_var (ref) != NULL) + return ref; + } + else if (SSA_VAR_P (ref)) + { + if (get_subvars_for_var (ref) != NULL) + return ref; + } + return NULL_TREE; +} diff --git a/gcc/tree-flow-inline.h b/gcc/tree-flow-inline.h index 70450537698..7701e5cc7fd 100644 --- a/gcc/tree-flow-inline.h +++ b/gcc/tree-flow-inline.h @@ -878,4 +878,60 @@ op_iter_init_mustdef (ssa_op_iter *ptr, tree stmt, use_operand_p *kill, op_iter_init (ptr, stmt, SSA_OP_VMUSTDEFKILL); op_iter_next_mustdef (kill, def, ptr); } + +/* Return true if REF, a COMPONENT_REF, has an ARRAY_REF somewhere in it. */ + +static inline bool +ref_contains_array_ref (tree ref) +{ + while (handled_component_p (ref)) + { + if (TREE_CODE (ref) == ARRAY_REF) + return true; + ref = TREE_OPERAND (ref, 0); + } + return false; +} + +/* Given a variable VAR, lookup and return a pointer to the list of + subvariables for it. */ + +static inline subvar_t * +lookup_subvars_for_var (tree var) +{ + var_ann_t ann = var_ann (var); + gcc_assert (ann); + return &ann->subvars; +} + +/* Given a variable VAR, return a linked list of subvariables for VAR, or + NULL, if there are no subvariables. */ + +static inline subvar_t +get_subvars_for_var (tree var) +{ + subvar_t subvars; + + gcc_assert (SSA_VAR_P (var)); + + if (TREE_CODE (var) == SSA_NAME) + subvars = *(lookup_subvars_for_var (SSA_NAME_VAR (var))); + else + subvars = *(lookup_subvars_for_var (var)); + return subvars; +} + +/* Return true if V is a tree that we can have subvars for. + Normally, this is any aggregate type, however, due to implementation + limitations ATM, we exclude array types as well. */ + +static inline bool +var_can_have_subvars (tree v) +{ + return (AGGREGATE_TYPE_P (TREE_TYPE (v)) && + TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE); +} + + + #endif /* _TREE_FLOW_INLINE_H */ diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h index 479996dfdc3..25d3e5ab081 100644 --- a/gcc/tree-flow.h +++ b/gcc/tree-flow.h @@ -138,7 +138,26 @@ enum mem_tag_kind { TYPE_TAG, /* This variable is a name memory tag (NMT). */ - NAME_TAG + NAME_TAG, + + /* This variable represents a structure field. */ + STRUCT_FIELD +}; +struct subvar; +typedef struct subvar *subvar_t; + +/* This structure represents a fake sub-variable for a structure field. */ + +struct subvar GTY(()) +{ + /* Fake variable name */ + tree var; + /* Offset inside structure. */ + HOST_WIDE_INT offset; + /* Size of field. */ + HOST_WIDE_INT size; + /* Next subvar for this structure. */ + subvar_t next; }; struct var_ann_d GTY(()) @@ -211,6 +230,8 @@ struct var_ann_d GTY(()) live at the same time and this can happen for each call to the dominator optimizer. */ tree current_def; + + subvar_t subvars; }; @@ -556,6 +577,9 @@ extern tree make_rename_temp (tree, const char *); extern void record_vars (tree); extern bool block_may_fallthru (tree block); +typedef tree tree_on_heap; +DEF_VEC_MALLOC_P (tree_on_heap); + /* In tree-ssa-alias.c */ extern void dump_may_aliases_for (FILE *, tree); extern void debug_may_aliases_for (tree); @@ -567,13 +591,15 @@ extern void dump_points_to_info_for (FILE *, tree); extern void debug_points_to_info_for (tree); extern bool may_be_aliased (tree); extern struct ptr_info_def *get_ptr_info (tree); - +static inline subvar_t get_subvars_for_var (tree); +static inline bool ref_contains_array_ref (tree); +extern tree okay_component_ref_for_subvars (tree, HOST_WIDE_INT *, + HOST_WIDE_INT *); +static inline bool var_can_have_subvars (tree); /* Call-back function for walk_use_def_chains(). At each reaching definition, a function with this prototype is called. */ typedef bool (*walk_use_def_chains_fn) (tree, tree, void *); -typedef tree tree_on_heap; -DEF_VEC_MALLOC_P (tree_on_heap); /* In tree-ssa.c */ extern void init_tree_ssa (void); diff --git a/gcc/tree-optimize.c b/gcc/tree-optimize.c index 524ce6aa435..bb31a0edc35 100644 --- a/gcc/tree-optimize.c +++ b/gcc/tree-optimize.c @@ -348,6 +348,7 @@ init_tree_optimization_passes (void) p = &pass_all_optimizations.sub; NEXT_PASS (pass_referenced_vars); + NEXT_PASS (pass_create_structure_vars); NEXT_PASS (pass_build_ssa); NEXT_PASS (pass_may_alias); NEXT_PASS (pass_rename_ssa_copies); diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h index 30c7dd29a16..da5b8994fcf 100644 --- a/gcc/tree-pass.h +++ b/gcc/tree-pass.h @@ -167,5 +167,6 @@ extern struct tree_opt_pass pass_rest_of_compilation; extern struct tree_opt_pass pass_sink_code; extern struct tree_opt_pass pass_fre; extern struct tree_opt_pass pass_linear_transform; +extern struct tree_opt_pass pass_create_structure_vars; #endif /* GCC_TREE_PASS_H */ diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index a8679747d03..e38a8c2490c 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -42,6 +42,7 @@ Boston, MA 02111-1307, USA. */ #include "tree-pass.h" #include "convert.h" #include "params.h" +#include "vec.h" /* 'true' after aliases have been computed (see compute_may_aliases). */ bool aliases_computed_p; @@ -524,8 +525,15 @@ init_alias_info (void) variables, clear the call-clobbered flag. Variables that are intrinsically call-clobbered (globals, local statics, etc) will not be marked by the aliasing code, so we can't - remove them from CALL_CLOBBERED_VARS. */ - if (ann->mem_tag_kind != NOT_A_TAG || !is_global_var (var)) + remove them from CALL_CLOBBERED_VARS. + + NB: STRUCT_FIELDS are still call clobbered if they are for + a global variable, so we *don't* clear their call clobberedness + just because they are tags, though we will clear it if they + aren't for global variables. */ + if (ann->mem_tag_kind == NAME_TAG + || ann->mem_tag_kind == TYPE_TAG + || !is_global_var (var)) clear_call_clobbered (var); } @@ -982,13 +990,28 @@ compute_flow_insensitive_aliasing (struct alias_info *ai) if (may_alias_p (p_map->var, p_map->set, var, v_map->set)) { + subvar_t svars; size_t num_tag_refs, num_var_refs; num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid); num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid); /* Add VAR to TAG's may-aliases set. */ - add_may_alias (tag, var); + + /* If this is an aggregate, we may have subvariables for it + that need to be pointed to. */ + if (var_can_have_subvars (var) + && (svars = get_subvars_for_var (var))) + { + subvar_t sv; + + for (sv = svars; sv; sv = sv->next) + add_may_alias (tag, sv->var); + } + else + { + add_may_alias (tag, var); + } /* Update the total number of virtual operands due to aliasing. Since we are adding one more alias to TAG's @@ -1040,7 +1063,7 @@ compute_flow_insensitive_aliasing (struct alias_info *ai) sbitmap may_aliases2 = p_map2->may_aliases; /* If the pointers may not point to each other, do nothing. */ - if (!may_alias_p (p_map1->var, p_map1->set, p_map2->var, p_map2->set)) + if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set)) continue; /* The two pointers may alias each other. If they already have @@ -1293,7 +1316,9 @@ group_aliases (struct alias_info *ai) tree alias = VARRAY_TREE (aliases, j); var_ann_t ann = var_ann (alias); - if (ann->mem_tag_kind == NOT_A_TAG && ann->may_aliases) + if ((ann->mem_tag_kind == NOT_A_TAG + || ann->mem_tag_kind == STRUCT_FIELD) + && ann->may_aliases) { tree new_alias; @@ -1378,13 +1403,19 @@ setup_pointers_and_addressables (struct alias_info *ai) { tree var = referenced_var (i); var_ann_t v_ann = var_ann (var); + subvar_t svars; /* Name memory tags already have flow-sensitive aliasing information, so they need not be processed by compute_flow_insensitive_aliasing. Similarly, type memory tags are already accounted for when we process their - associated pointer. */ - if (v_ann->mem_tag_kind != NOT_A_TAG) + associated pointer. + + Structure fields, on the other hand, have to have some of this + information processed for them, but it's pointless to mark them + non-addressable (since they are fake variables anyway). */ + if (v_ann->mem_tag_kind != NOT_A_TAG + && v_ann->mem_tag_kind != STRUCT_FIELD) continue; /* Remove the ADDRESSABLE flag from every addressable variable whose @@ -1392,20 +1423,36 @@ setup_pointers_and_addressables (struct alias_info *ai) of ADDR_EXPR constants into INDIRECT_REF expressions and the removal of dead pointer assignments done by the early scalar cleanup passes. */ - if (TREE_ADDRESSABLE (var)) + if (TREE_ADDRESSABLE (var) && v_ann->mem_tag_kind != STRUCT_FIELD) { if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid) && TREE_CODE (var) != RESULT_DECL && !is_global_var (var)) { - /* The address of VAR is not needed, remove the - addressable bit, so that it can be optimized as a - regular variable. */ - mark_non_addressable (var); - + bool okay_to_mark = true; /* Since VAR is now a regular GIMPLE register, we will need to rename VAR into SSA afterwards. */ bitmap_set_bit (vars_to_rename, v_ann->uid); + + if (var_can_have_subvars (var) + && (svars = get_subvars_for_var (var))) + { + subvar_t sv; + + for (sv = svars; sv; sv = sv->next) + { + var_ann_t svann = var_ann (sv->var); + if (bitmap_bit_p (ai->addresses_needed, svann->uid)) + okay_to_mark = false; + bitmap_set_bit (vars_to_rename, svann->uid); + } + } + /* The address of VAR is not needed, remove the + addressable bit, so that it can be optimized as a + regular variable. */ + if (okay_to_mark) + mark_non_addressable (var); + } else { @@ -1414,6 +1461,14 @@ setup_pointers_and_addressables (struct alias_info *ai) clobber memory. In those cases, we need to clobber all call-clobbered variables and all addressables. */ bitmap_set_bit (addressable_vars, v_ann->uid); + if (var_can_have_subvars (var) + && (svars = get_subvars_for_var (var))) + { + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + bitmap_set_bit (addressable_vars, var_ann (sv->var)->uid); + } + } } @@ -1422,7 +1477,7 @@ setup_pointers_and_addressables (struct alias_info *ai) if (may_be_aliased (var)) { create_alias_map_for (var, ai); - bitmap_set_bit (vars_to_rename, var_ann (var)->uid); + bitmap_set_bit (vars_to_rename, var_ann (var)->uid); } /* Add pointer variables that have been dereferenced to the POINTERS @@ -1579,7 +1634,17 @@ maybe_create_global_var (struct alias_info *ai) .GLOBAL_VAR has been created, make it an alias for all call-clobbered variables. */ if (global_var && var != global_var) - add_may_alias (var, global_var); + { + subvar_t svars; + add_may_alias (var, global_var); + if (var_can_have_subvars (var) + && (svars = get_subvars_for_var (var))) + { + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + bitmap_set_bit (vars_to_rename, var_ann (sv->var)->uid); + } + } bitmap_set_bit (vars_to_rename, var_ann (var)->uid); } @@ -1644,7 +1709,6 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set, alias_stats.tbaa_resolved++; return false; } - alias_stats.alias_mayalias++; return true; } @@ -1894,23 +1958,73 @@ static void add_pointed_to_var (struct alias_info *ai, tree ptr, tree value) { struct ptr_info_def *pi = get_ptr_info (ptr); - tree pt_var; + tree pt_var = NULL_TREE; + HOST_WIDE_INT offset, size; + tree addrop; size_t uid; + tree ref; + subvar_t svars; gcc_assert (TREE_CODE (value) == ADDR_EXPR); - pt_var = TREE_OPERAND (value, 0); - if (REFERENCE_CLASS_P (pt_var)) - pt_var = get_base_address (pt_var); + addrop = TREE_OPERAND (value, 0); + if (REFERENCE_CLASS_P (addrop)) + pt_var = get_base_address (addrop); + else + pt_var = addrop; - if (pt_var && SSA_VAR_P (pt_var)) + /* If this is a component_ref, see if we can get a smaller number of + variables to take the address of. */ + if (TREE_CODE (addrop) == COMPONENT_REF + && (ref = okay_component_ref_for_subvars (addrop, &offset ,&size))) + { + subvar_t sv; + svars = get_subvars_for_var (ref); + + uid = var_ann (pt_var)->uid; + bitmap_set_bit (ai->addresses_needed, uid); + if (pi->pt_vars == NULL) + pi->pt_vars = BITMAP_GGC_ALLOC (); + /* If the variable is a global, mark the pointer as pointing to + global memory (which will make its tag a global variable). */ + if (is_global_var (pt_var)) + pi->pt_global_mem = 1; + + for (sv = svars; sv; sv = sv->next) + { + if (offset == sv->offset && size == sv->size) + bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid); + else if (offset >= sv->offset && offset < (sv->offset + sv->size)) + bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid); + else if (offset < sv->offset + && (offset + size > sv->offset)) + bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid); + } + } + else if (pt_var && SSA_VAR_P (pt_var)) { + uid = var_ann (pt_var)->uid; bitmap_set_bit (ai->addresses_needed, uid); if (pi->pt_vars == NULL) pi->pt_vars = BITMAP_GGC_ALLOC (); - bitmap_set_bit (pi->pt_vars, uid); + + /* If this is an aggregate, we may have subvariables for it that need + to be pointed to. */ + if (var_can_have_subvars (pt_var) + && (svars = get_subvars_for_var (pt_var))) + { + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + { + uid = var_ann (sv->var)->uid; + bitmap_set_bit (ai->addresses_needed, uid); + bitmap_set_bit (pi->pt_vars, uid); + } + } + else + bitmap_set_bit (pi->pt_vars, uid); /* If the variable is a global, mark the pointer as pointing to global memory (which will make its tag a global variable). */ @@ -2540,3 +2654,400 @@ may_be_aliased (tree var) return true; } + +/* This structure is simply used during pushing fields onto the fieldstack + to track the offset of the field, since bitpos_of_field gives it relative + to its immediate containing type, and we want it relative to the ultimate + containing object. */ + +typedef struct fieldoff +{ + tree field; + HOST_WIDE_INT offset; +} *fieldoff_t; + +DEF_VEC_MALLOC_P(fieldoff_t); + +/* Return the position, in bits, of FIELD_DECL from the beginning of its + structure. + Return -1 if the position is conditional or otherwise non-constant + integer. */ + +static HOST_WIDE_INT +bitpos_of_field (const tree fdecl) +{ + + if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST + || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST) + return -1; + + return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8) + + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1); +} + +/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields + of TYPE onto fieldstack, recording their offsets along the way. + OFFSET is used to keep track of the offset in this entire structure, rather + than just the immediately containing structure. */ + +static void +push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack, + HOST_WIDE_INT offset) +{ + fieldoff_t pair; + tree field = TYPE_FIELDS (type); + if (!field) + return; + if (var_can_have_subvars (field) + && TREE_CODE (field) == FIELD_DECL) + { + size_t before = VEC_length (fieldoff_t, *fieldstack); + /* Empty structures may have actual size, like in C++. So see if we + actually end up pushing a field, and if not, if the size is non-zero, + push the field onto the stack */ + push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack, offset); + if (before == VEC_length (fieldoff_t, *fieldstack) + && DECL_SIZE (field) + && !integer_zerop (DECL_SIZE (field))) + { + pair = xmalloc (sizeof (struct fieldoff)); + pair->field = field; + pair->offset = offset; + VEC_safe_push (fieldoff_t, *fieldstack, pair); + } + } + else if (TREE_CODE (field) == FIELD_DECL) + { + pair = xmalloc (sizeof (struct fieldoff)); + pair->field = field; + pair->offset = offset + bitpos_of_field (field); + VEC_safe_push (fieldoff_t, *fieldstack, pair); + } + for (field = TREE_CHAIN (field); field; field = TREE_CHAIN (field)) + { + if (TREE_CODE (field) != FIELD_DECL) + continue; + if (var_can_have_subvars (field)) + { + push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack, + offset + bitpos_of_field (field)); + } + else + { + pair = xmalloc (sizeof (struct fieldoff)); + pair->field = field; + pair->offset = offset + bitpos_of_field (field); + VEC_safe_push (fieldoff_t, *fieldstack, pair); + } + } +} + + +/* This represents the used range of a variable. */ + +typedef struct used_part +{ + HOST_WIDE_INT minused; + HOST_WIDE_INT maxused; +} *used_part_t; + +/* An array of used_part structures, indexed by variable uid. */ + +static used_part_t *used_portions; + +/* Given a variable uid, UID, get or create the entry in the used portions + table for the variable. */ + +static used_part_t +get_or_create_used_part_for (size_t uid) +{ + used_part_t up; + if (used_portions[uid] == NULL) + { + up = xcalloc (1, sizeof (struct used_part)); + up->minused = INT_MAX; + up->maxused = 0; + } + else + up = used_portions[uid]; + return up; +} + + + +/* Given an aggregate VAR, create the subvariables that represent its + fields. */ + +static void +create_overlap_variables_for (tree var) +{ + VEC(fieldoff_t) *fieldstack = NULL; + used_part_t up; + size_t uid = var_ann (var)->uid; + + if (used_portions[uid] == NULL) + return; + + push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0); + if (VEC_length (fieldoff_t, fieldstack) != 0) + { + subvar_t *subvars; + fieldoff_t fo; + bool notokay = false; + int i; + + /* Not all fields have DECL_SIZE set, and those that don't, we don't + know their size, and thus, can't handle. + The same is true of fields with DECL_SIZE that is not an integer + constant (such as variable sized fields). + Fields with offsets which are not constant will have an offset < 0 + We *could* handle fields that are constant sized arrays, but + currently don't. Doing so would require some extra changes to + tree-ssa-operands.c. */ + + for (i = 0; VEC_iterate (fieldoff_t, fieldstack, i, fo); i++) + { + if (!DECL_SIZE (fo->field) + || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST + || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE + || fo->offset < 0) + { + notokay = true; + break; + } + } + /* Cleanup after ourselves if we can't create overlap variables. */ + if (notokay) + { + while (VEC_length (fieldoff_t, fieldstack) != 0) + { + fo = VEC_pop (fieldoff_t, fieldstack); + free (fo); + } + VEC_free (fieldoff_t, fieldstack); + return; + } + /* Otherwise, create the variables. */ + subvars = lookup_subvars_for_var (var); + up = used_portions[uid]; + + while (VEC_length (fieldoff_t, fieldstack) != 0) + { + subvar_t sv = ggc_alloc (sizeof (struct subvar)); + HOST_WIDE_INT fosize; + var_ann_t ann; + + fo = VEC_pop (fieldoff_t, fieldstack); + fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field)); + + if ((fo->offset <= up->minused + && fo->offset + fosize <= up->minused) + || fo->offset >= up->maxused) + { + free (fo); + continue; + } + + sv->offset = fo->offset; + sv->size = fosize; + sv->next = *subvars; + sv->var = create_tmp_var_raw (TREE_TYPE (fo->field), "SFT"); + if (dump_file) + { + fprintf (dump_file, "structure field tag %s created for var %s", + get_name (sv->var), get_name (var)); + fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC, + sv->offset); + fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC, + sv->size); + fprintf (dump_file, "\n"); + + } + + /* We need to copy the various flags from var to sv->var, so that + they are is_global_var iff the original variable was. */ + + DECL_EXTERNAL (sv->var) = DECL_EXTERNAL (var); + TREE_PUBLIC (sv->var) = TREE_PUBLIC (var); + TREE_STATIC (sv->var) = TREE_STATIC (var); + TREE_READONLY (sv->var) = TREE_READONLY (var); + + /* Like other memory tags, these need to be marked addressable to + keep is_gimple_reg from thinking they are real. */ + TREE_ADDRESSABLE (sv->var) = 1; + + DECL_CONTEXT (sv->var) = DECL_CONTEXT (var); + + ann = get_var_ann (sv->var); + ann->mem_tag_kind = STRUCT_FIELD; + ann->type_mem_tag = NULL; + add_referenced_tmp_var (sv->var); + + *subvars = sv; + free (fo); + } + } + + VEC_free (fieldoff_t, fieldstack); +} + + +/* Find the conservative answer to the question of what portions of what + structures are used by this statement. We assume that if we have a + component ref with a known size + offset, that we only need that part + of the structure. For unknown cases, or cases where we do something + to the whole structure, we assume we need to create fields for the + entire structure. */ + +static tree +find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) +{ + switch (TREE_CODE (*tp)) + { + case COMPONENT_REF: + { + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; + tree offset; + enum machine_mode mode; + int unsignedp; + int volatilep; + tree ref; + ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode, + &unsignedp, &volatilep, false); + if (DECL_P (ref) && offset == NULL && bitsize != -1) + { + size_t uid = var_ann (ref)->uid; + used_part_t up; + + up = get_or_create_used_part_for (uid); + + if (bitpos <= up->minused) + up->minused = bitpos; + if ((bitpos + bitsize >= up->maxused)) + up->maxused = bitpos + bitsize; + + used_portions[uid] = up; + + *walk_subtrees = 0; + return NULL_TREE; + } + else if (DECL_P (ref)) + { + if (DECL_SIZE (ref) + && var_can_have_subvars (ref) + && TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST) + { + used_part_t up; + size_t uid = var_ann (ref)->uid; + + up = get_or_create_used_part_for (uid); + + up->minused = 0; + up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref)); + + used_portions[uid] = up; + + *walk_subtrees = 0; + return NULL_TREE; + } + } + } + break; + case VAR_DECL: + case PARM_DECL: + { + tree var = *tp; + if (DECL_SIZE (var) + && var_can_have_subvars (var) + && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST) + { + used_part_t up; + size_t uid = var_ann (var)->uid; + + up = get_or_create_used_part_for (uid); + + up->minused = 0; + up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var)); + + used_portions[uid] = up; + *walk_subtrees = 0; + return NULL_TREE; + } + } + break; + + default: + break; + + } + return NULL_TREE; +} + +/* We are about to create some new referenced variables, and we need the + before size. */ + +static size_t old_referenced_vars; + + +/* Create structure field variables for structures used in this function. */ + +static void +create_structure_vars (void) +{ + basic_block bb; + size_t i; + + old_referenced_vars = num_referenced_vars; + used_portions = xcalloc (num_referenced_vars, sizeof (used_part_t)); + + FOR_EACH_BB (bb) + { + block_stmt_iterator bsi; + for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) + { + walk_tree_without_duplicates (bsi_stmt_ptr (bsi), + find_used_portions, + NULL); + } + } + for (i = 0; i < old_referenced_vars; i++) + { + tree var = referenced_var (i); + /* The C++ FE creates vars without DECL_SIZE set, for some reason. */ + if (var + && DECL_SIZE (var) + && var_can_have_subvars (var) + && var_ann (var)->mem_tag_kind == NOT_A_TAG + && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST) + create_overlap_variables_for (var); + } + for (i = 0; i < old_referenced_vars; i++) + free (used_portions[i]); + + free (used_portions); +} + +static bool +gate_structure_vars (void) +{ + return flag_tree_salias != 0; +} + +struct tree_opt_pass pass_create_structure_vars = +{ + "salias", /* name */ + gate_structure_vars, /* gate */ + create_structure_vars, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + PROP_cfg, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func, /* todo_flags_finish */ + 0 /* letter */ +}; + diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index 47b4f1b930f..8db74e1fadb 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -683,8 +683,12 @@ finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p, if (num == 0) return NULL; - /* There should only be a single V_MUST_DEF per assignment. */ - gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1); + /* In the presence of subvars, there may be more than one V_MUST_DEF per + statement (one for each subvar). It is a bit expensive to verify that + all must-defs in a statement belong to subvars if there is more than one + MUST-def, so we don't do it. Suffice to say, if you reach here without + having subvars, and have num >1, you have hit a bug. */ + old_ops = *old_ops_p; @@ -907,7 +911,6 @@ build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops, lhs = TREE_OPERAND (lhs, 0); if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF - && TREE_CODE (lhs) != COMPONENT_REF && TREE_CODE (lhs) != BIT_FIELD_REF && TREE_CODE (lhs) != REALPART_EXPR && TREE_CODE (lhs) != IMAGPART_EXPR) @@ -1021,6 +1024,49 @@ get_stmt_operands (tree stmt) } +/* Return true if OFFSET and SIZE define a range that overlaps with some + portion of the range of SV, a subvar. If there was an exact overlap, + *EXACT will be set to true upon return. */ + +static bool +overlap_subvar (HOST_WIDE_INT offset, HOST_WIDE_INT size, + subvar_t sv, bool *exact) +{ + /* There are three possible cases of overlap. + 1. We can have an exact overlap, like so: + |offset, offset + size | + |sv->offset, sv->offset + sv->size | + + 2. We can have offset starting after sv->offset, like so: + + |offset, offset + size | + |sv->offset, sv->offset + sv->size | + + 3. We can have offset starting before sv->offset, like so: + + |offset, offset + size | + |sv->offset, sv->offset + sv->size| + */ + + if (exact) + *exact = false; + if (offset == sv->offset && size == sv->size) + { + if (exact) + *exact = true; + return true; + } + else if (offset >= sv->offset && offset < (sv->offset + sv->size)) + { + return true; + } + else if (offset < sv->offset && (offset + size > sv->offset)) + { + return true; + } + return false; + +} /* Recursively scan the expression pointed by EXPR_P in statement referred to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the operands found. */ @@ -1068,11 +1114,25 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case PARM_DECL: case RESULT_DECL: case CONST_DECL: - /* If we found a variable, add it to DEFS or USES depending - on the operand flags. */ - add_stmt_operand (expr_p, s_ann, flags); - return; - + { + subvar_t svars; + + /* Add the subvars for a variable if it has subvars, to DEFS or USES. + Otherwise, add the variable itself. + Whether it goes to USES or DEFS depends on the operand flags. */ + if (var_can_have_subvars (expr) + && (svars = get_subvars_for_var (expr))) + { + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + add_stmt_operand (&sv->var, s_ann, flags); + } + else + { + add_stmt_operand (expr_p, s_ann, flags); + } + return; + } case MISALIGNED_INDIRECT_REF: get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); /* fall through */ @@ -1104,30 +1164,39 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case COMPONENT_REF: case REALPART_EXPR: case IMAGPART_EXPR: - /* Similarly to arrays, references to compound variables (complex - types and structures/unions) are globbed. - - FIXME: This means that - - a.x = 6; - a.y = 7; - foo (a.x, a.y); - - will not be constant propagated because the two partial - definitions to 'a' will kill each other. Note that SRA may be - able to fix this problem if 'a' can be scalarized. */ - - /* If the LHS of the compound reference is not a regular variable, - recurse to keep looking for more operands in the subexpression. */ - if (SSA_VAR_P (TREE_OPERAND (expr, 0))) - add_stmt_operand (expr_p, s_ann, flags); - else - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); - - if (code == COMPONENT_REF) - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); - return; + { + tree ref; + HOST_WIDE_INT offset, size; + /* This component ref becomes an access to all of the subvariables + it can touch, if we can determine that, but *NOT* the real one. + If we can't determine which fields we could touch, the recursion + will eventually get to a variable and add *all* of its subvars, or + whatever is the minimum correct subset. */ + ref = okay_component_ref_for_subvars (expr, &offset, &size); + if (ref) + { + subvar_t svars = get_subvars_for_var (ref); + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + { + bool exact; + if (overlap_subvar (offset, size, sv, &exact)) + { + if (exact) + flags &= ~opf_kill_def; + add_stmt_operand (&sv->var, s_ann, flags); + } + } + } + else + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), + flags & ~opf_kill_def); + + if (code == COMPONENT_REF) + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); + return; + } case WITH_SIZE_EXPR: /* WITH_SIZE_EXPR is a pass-through reference to its first argument, and an rvalue reference to its second argument. */ @@ -1158,7 +1227,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) op = TREE_OPERAND (expr, 0); if (TREE_CODE (op) == ARRAY_REF || TREE_CODE (op) == ARRAY_RANGE_REF - || TREE_CODE (op) == COMPONENT_REF || TREE_CODE (op) == REALPART_EXPR || TREE_CODE (op) == IMAGPART_EXPR) subflags = opf_is_def; @@ -1554,9 +1622,10 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) { if (flags & opf_kill_def) { - /* Only regular variables may get a V_MUST_DEF - operand. */ - gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG); + /* Only regular variables or struct fields may get a + V_MUST_DEF operand. */ + gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG + || v_ann->mem_tag_kind == STRUCT_FIELD); /* V_MUST_DEF for non-aliased, non-GIMPLE register variable definitions. */ append_v_must_def (var); @@ -1615,26 +1684,60 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) } } - + /* Record that VAR had its address taken in the statement with annotations S_ANN. */ static void note_addressable (tree var, stmt_ann_t s_ann) { + tree ref; + subvar_t svars; + HOST_WIDE_INT offset; + HOST_WIDE_INT size; + if (!s_ann) return; + + /* If this is a COMPONENT_REF, and we know exactly what it touches, we only + take the address of the subvariables it will touch. + Otherwise, we take the address of all the subvariables, plus the real + ones. */ + if (var && TREE_CODE (var) == COMPONENT_REF + && (ref = okay_component_ref_for_subvars (var, &offset, &size))) + { + subvar_t sv; + svars = get_subvars_for_var (ref); + + if (s_ann->addresses_taken == NULL) + s_ann->addresses_taken = BITMAP_GGC_ALLOC (); + + for (sv = svars; sv; sv = sv->next) + { + if (overlap_subvar (offset, size, sv, NULL)) + bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid); + } + return; + } + var = get_base_address (var); if (var && SSA_VAR_P (var)) { if (s_ann->addresses_taken == NULL) - s_ann->addresses_taken = BITMAP_GGC_ALLOC (); + s_ann->addresses_taken = BITMAP_GGC_ALLOC (); + bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid); + if (var_can_have_subvars (var) + && (svars = get_subvars_for_var (var))) + { + subvar_t sv; + for (sv = svars; sv; sv = sv->next) + bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid); + } } } - /* Add clobbering definitions for .GLOBAL_VAR or for each of the call clobbered variables in the function. */ diff --git a/gcc/tree-vect-analyze.c b/gcc/tree-vect-analyze.c index 29622b2fcf7..2595e497723 100644 --- a/gcc/tree-vect-analyze.c +++ b/gcc/tree-vect-analyze.c @@ -70,7 +70,7 @@ static bool vect_base_addr_differ_p (struct data_reference *, struct data_reference *drb, bool *); static tree vect_object_analysis (tree, tree, bool, tree, struct data_reference **, tree *, tree *, - tree *, bool *, tree *); + tree *, bool *, tree *, subvar_t *); static tree vect_address_analysis (tree, tree, bool, tree, struct data_reference *, tree *, tree *, tree *, bool *); @@ -1377,6 +1377,7 @@ vect_address_analysis (tree expr, tree stmt, bool is_read, tree vectype, tree oprnd0, oprnd1, base_address, offset_expr, base_addr0, base_addr1; tree address_offset = ssize_int (0), address_misalign = ssize_int (0); tree dummy; + subvar_t dummy2; switch (TREE_CODE (expr)) { @@ -1426,9 +1427,10 @@ vect_address_analysis (tree expr, tree stmt, bool is_read, tree vectype, return base_addr0 ? base_addr0 : base_addr1; case ADDR_EXPR: - base_address = vect_object_analysis (TREE_OPERAND (expr, 0), stmt, is_read, - vectype, &dr, offset, misalign, step, - base_aligned, &dummy); + base_address = vect_object_analysis (TREE_OPERAND (expr, 0), stmt, + is_read, vectype, &dr, offset, + misalign, step, base_aligned, + &dummy, &dummy2); return base_address; case SSA_NAME: @@ -1507,6 +1509,7 @@ vect_address_analysis (tree expr, tree stmt, bool is_read, tree vectype, STEP - evolution of the DR_REF in the loop BASE_ALIGNED - indicates if BASE is aligned MEMTAG - memory tag for aliasing purposes + SUBVAR - Sub-variables of the variable If something unexpected is encountered (an unsupported form of data-ref), then NULL_TREE is returned. */ @@ -1515,7 +1518,8 @@ static tree vect_object_analysis (tree memref, tree stmt, bool is_read, tree vectype, struct data_reference **dr, tree *offset, tree *misalign, tree *step, - bool *base_aligned, tree *memtag) + bool *base_aligned, tree *memtag, + subvar_t *subvars) { tree base = NULL_TREE, base_address = NULL_TREE; tree object_offset = ssize_int (0), object_misalign = ssize_int (0); @@ -1611,6 +1615,8 @@ vect_object_analysis (tree memref, tree stmt, bool is_read, us to object. */ DR_BASE_NAME ((*dr)) = memref; + if (SSA_VAR_P (memref) && var_can_have_subvars (memref)) + *subvars = get_subvars_for_var (memref); base_address = build_fold_addr_expr (memref); *memtag = memref; } @@ -1698,6 +1704,9 @@ vect_object_analysis (tree memref, tree stmt, bool is_read, /* MEMREF cannot be analyzed. */ return NULL_TREE; + if (SSA_VAR_P (*memtag) && var_can_have_subvars (*memtag)) + *subvars = get_subvars_for_var (*memtag); + /* Part 2: Combine the results of object and address analysis to calculate INITIAL_OFFSET, STEP and misalignment info. */ *offset = size_binop (PLUS_EXPR, object_offset, address_offset); @@ -1780,6 +1789,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo) tree scalar_type, vectype; tree base, offset, misalign, step, tag; bool base_aligned; + subvar_t subvars; /* Assumption: there exists a data-ref in stmt, if and only if it has vuses/vdefs. */ @@ -1843,7 +1853,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo) dr = NULL; base = vect_object_analysis (memref, stmt, is_read, vectype, &dr, &offset, &misalign, &step, - &base_aligned, &tag); + &base_aligned, &tag, &subvars); if (!base) { if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS, @@ -1860,6 +1870,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo) STMT_VINFO_VECT_MISALIGNMENT (stmt_info) = misalign; STMT_VINFO_VECT_BASE_ALIGNED_P (stmt_info) = base_aligned; STMT_VINFO_MEMTAG (stmt_info) = tag; + STMT_VINFO_SUBVARS (stmt_info) = subvars; STMT_VINFO_VECTYPE (stmt_info) = vectype; VARRAY_PUSH_GENERIC_PTR (*datarefs, dr); STMT_VINFO_DATA_REF (stmt_info) = dr; diff --git a/gcc/tree-vect-transform.c b/gcc/tree-vect-transform.c index 6cd6a1295dd..1a82f288cb2 100644 --- a/gcc/tree-vect-transform.c +++ b/gcc/tree-vect-transform.c @@ -353,12 +353,14 @@ vect_create_data_ref_ptr (tree stmt, block_stmt_iterator *bsi, tree offset, tag = STMT_VINFO_MEMTAG (stmt_info); gcc_assert (tag); get_var_ann (vect_ptr)->type_mem_tag = tag; - + get_var_ann (vect_ptr)->subvars = STMT_VINFO_SUBVARS (stmt_info); + /* Mark for renaming all aliased variables (i.e, the may-aliases of the type-mem-tag). */ nvuses = NUM_VUSES (vuses); nv_may_defs = NUM_V_MAY_DEFS (v_may_defs); nv_must_defs = NUM_V_MUST_DEFS (v_must_defs); + for (i = 0; i < nvuses; i++) { tree use = VUSE_OP (vuses, i); diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h index f974c708786..b761f4d0ef3 100644 --- a/gcc/tree-vectorizer.h +++ b/gcc/tree-vectorizer.h @@ -172,6 +172,7 @@ typedef struct _stmt_vec_info { /* Aliasing information. */ tree memtag; + subvar_t subvars; /** The following fields are used to store the information about data-reference. {base_address + initial_offset} is the first location @@ -213,6 +214,7 @@ typedef struct _stmt_vec_info { #define STMT_VINFO_VEC_STMT(S) (S)->vectorized_stmt #define STMT_VINFO_DATA_REF(S) (S)->data_ref_info #define STMT_VINFO_MEMTAG(S) (S)->memtag +#define STMT_VINFO_SUBVARS(S) (S)->subvars #define STMT_VINFO_VECT_DR_BASE_ADDRESS(S)(S)->base_address #define STMT_VINFO_VECT_INIT_OFFSET(S) (S)->initial_offset #define STMT_VINFO_VECT_STEP(S) (S)->step