IPA C++ refactoring 1/N

* cgraph.h (symtab_node):
  (void register_symbol (void)): created from symtab_register_node
  (void remove (void)): created from symtab_remove_node
  (void dump (FILE *f)): created from dump_symtab_node
  (void DEBUG_FUNCTION debug (void)): created from debug_symtab_node
  (void DEBUG_FUNCTION verify (void)): created from verify_symtab_node
  (struct ipa_ref *add_reference (symtab_node *referred_node,
    enum ipa_ref_use use_type)): created from add_reference 
  (struct ipa_ref *add_reference (symtab_node *referred_node,
    enum ipa_ref_use use_type, gimple stmt)): created from add_reference
  (struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
    gimple stmt)): created from maybe_add_reference
  (bool semantically_equivalent_p (symtab_node *target)): created from
    symtab_semantically_equivalent_p
  (void remove_from_same_comdat_group (void)): created from
    remove_from_same_comdat_group
  (void add_to_same_comdat_group (symtab_node *old_node)): created from
    symtab_add_to_same_comdat_group
  (void dissolve_same_comdat_group_list (void)): created from
    symtab_dissolve_same_comdat_group_list
  (bool used_from_object_file_p (void)): created from symtab_used_from_object_file_p
  (symtab_node *ultimate_alias_target (enum availability *avail = NULL)):
    created from symtab_alias_ultimate_target
  (inline symtab_node *next_defined_symbol (void)): created from
    symtab_next_defined_symbol
  (bool resolve_alias (symtab_node *target)): created from
    symtab_resolve_alias
  (bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
    void *data, bool include_overwrite)): created from symtab_for_node_and_aliases
  (symtab_node *noninterposable_alias (void)): created from symtab_nonoverwritable_alias
  (inline symtab_node *get_alias_target (void)): created from symtab_alias_target
  (void set_section (const char *section)): created from set_section_1 
  (enum availability get_availability (void)): created from symtab_node_availability
  (void make_decl_local (void)): created from symtab_make_decl_local
  (bool real_symbol_p (void)): created from symtab_read_node
  (can_be_discarded_p (void)): created from symtab_can_be_discarded
  (inline bool comdat_local_p (void)): created from symtab_comdat_local_p
  (inline bool in_same_comdat_group_p (symtab_node *target)): created from
    symtab_in_same_comdat_p;
  (bool address_taken_from_non_vtable_p (void)): created from
    address_taken_from_non_vtable_p
  (static inline symtab_node *get (const_tree decl)): created from symtab_get_node
  (static void dump_table (FILE *)): created from dump_symtab
  (static inline DEBUG_FUNCTION void debug_symtab (void)): created from debug_symtab
  (static DEBUG_FUNCTION void verify_symtab_nodes (void)): created from verify_symtab
  (static bool used_from_object_file_p_worker (symtab_node *node)): created from
    symtab_used_from_object_file_p 
  (void dump_base (FILE *)): created from dump_symtab_base
  (bool DEBUG_FUNCTION verify_base (void)): created from verify_symtab_base
  (void unregister (void)): created from symtab_unregister_node
  (struct symbol_priority_map *priority_info (void)): created from symtab_priority_info
  (static bool set_implicit_section (symtab_node *n, void *)): created from set_implicit_section
  (static bool noninterposable_alias (symtab_node *node, void *data)): created from
    symtab_nonoverwritable_alias_1
  * cgraph.h (cgraph_node):
  (bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL)):
    created from cgraph_remove_node_and_inline_clones
  (void record_stmt_references (gimple stmt)): created from ipa_record_stmt_references
  (void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
    bool update_speculative = true)): created from cgraph_set_call_stmt_including_clones
  (cgraph_node *function_symbol (enum availability *avail = NULL)):
    created from cgraph_function_node
  (cgraph_node *create_clone (tree decl, gcov_type count, int freq, bool update_original,
    vec<cgraph_edge *> redirect_callers, bool call_duplication_hook,
    struct cgraph_node *new_inlined_to, bitmap args_to_skip)):
    created from cgraph_create_clone 
  (cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
    vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, const char * suffix)):
    created from cgraph_create_virtual_clone
  (cgraph_node *find_replacement (void)): created from cgraph_find_replacement_node
  (cgraph_node *create_version_clone (tree new_decl, vec<cgraph_edge *> redirect_callers,
    bitmap bbs_to_copy)): created from cgraph_copy_node_for_versioning
  (cgraph_node *create_version_clone_with_body (vec<cgraph_edge *> redirect_callers,
    vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, bool skip_return,
    bitmap bbs_to_copy, basic_block new_entry_block, const char *clone_name)):
    created from cgraph_function_version_info
  (struct cgraph_function_version_info *insert_new_function_version (void)):
    created from insert_new_cgraph_node_version
  (struct cgraph_function_version_info *function_version (void)): created from
    get_cgraph_node_version
  (void analyze (void)): created from analyze_function
  (cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
    HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value, tree virtual_offset,
    tree real_alias) cgraph_add_thunk
  (inline cgraph_node *get_alias_target (void)): created from cgraph_alias_target
  (cgraph_node *ultimate_alias_target (availability *availability = NULL)):
    created from cgraph_function_or_thunk_node
  (bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)):
    created from expand_thunk
  (void reset (void)): created from cgraph_reset_node
  (void create_wrapper (cgraph_node *target)): created from cgraph_make_wrapper
  (void DEBUG_FUNCTION verify_node (void)): created from verify_cgraph_node
  (void remove (void)): created from cgraph_remove_node
  (void dump (FILE *f)): created from dump_cgraph_node
  (void DEBUG_FUNCTION debug (void)): created from debug_cgraph_node
  (bool get_body (void)): created from cgraph_get_body
  (void release_body (void)): created from cgraph_release_function_body
  (void unnest (void)): created from cgraph_unnest_node
  (void make_local (void)): created from cgraph_make_node_local
  (void mark_address_taken (void)): created from cgraph_mark_address_taken_node
  (struct cgraph_edge *create_edge (cgraph_node *callee, gimple call_stmt,
    gcov_type count, int freq)): created from cgraph_create_edge
  (struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
    gcov_type count, int freq)): created from cgraph_create_indirect_edge
  (void create_edge_including_clones (struct cgraph_node *callee, gimple old_stmt,
    gimple stmt, gcov_type count, int freq, cgraph_inline_failed_t reason)):
    created from cgraph_create_edge_including_clones
  (cgraph_edge *get_edge (gimple call_stmt)): created from cgraph_edge
  (vec<cgraph_edge *> collect_callers (void)): created from collect_callers_of_node
  (void remove_callers (void)): created from cgraph_node_remove_callers
  (void remove_callees (void)): created from cgraph_node_remove_callees
  (enum availability get_availability (void)): created from cgraph_function_body_availability
  (void set_nothrow_flag (bool nothrow)): created from cgraph_set_nothrow_flag
  (void set_const_flag (bool readonly, bool looping)): created from cgraph_set_const_flag
  (void set_pure_flag (bool pure, bool looping)): created from cgraph_set_pure_flag
  (void call_duplication_hooks (cgraph_node *node2)): created from
    cgraph_call_node_duplication_hooks
  (bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, void *),
    void *data, bool include_overwritable)): created from cgraph_for_node_and_aliases
  (bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node, void *data),
    void *data, bool include_overwritable)): created from cgraph_for_node_thunks_and_aliases
  (void call_function_insertion_hooks (void)):
    created from cgraph_call_function_insertion_hooks
  (inline void mark_force_output (void)): created from cgraph_mark_force_output_node
  (bool local_p (void)): created from cgraph_local_node
  (bool can_be_local_p (void)): created from cgraph_node_can_be_local_p
  (bool cannot_return_p (void)): created from cgraph_node_cannot_return
  (bool only_called_directly_p (void)): created from cgraph_only_called_directly_p
  (inline bool only_called_directly_or_aliased_p (void)):
    created from cgraph_only_called_directly_or_aliased_p
  (bool will_be_removed_from_program_if_no_direct_calls_p (void)):
    created from cgraph_will_be_removed_from_program_if_no_direct_calls
  (bool can_remove_if_no_direct_calls_and_refs_p (void)):
    created from cgraph_can_remove_if_no_direct_calls_and_refs_p
  (bool can_remove_if_no_direct_calls_p (void)):
    created from cgraph_can_remove_if_no_direct_calls_p
  (inline bool has_gimple_body_p (void)):
    created from cgraph_function_with_gimple_body_p
  (bool optimize_for_size_p (void)): created from cgraph_optimize_for_size_p
  (static void dump_cgraph (FILE *f)): created from dump_cgraph
  (static inline void debug_cgraph (void)): created from debug_cgraph
  (static void record_function_versions (tree decl1, tree decl2)):
    created from record_function_versions
  (static void delete_function_version (tree decl)):
    created from delete_function_version
  (static void add_new_function (tree fndecl, bool lowered)):
    created from cgraph_add_new_function
  (static inline cgraph_node *get (const_tree decl)): created from cgraph_get_node
  (static cgraph_node * create (tree decl)): created from cgraph_create_node
  (static cgraph_node * create_empty (void)): created from cgraph_create_empty_node
  (static cgraph_node * get_create (tree)): created from cgraph_get_create_node
  (static cgraph_node *get_for_asmname (tree asmname)):
    created from cgraph_node_for_asm
  (static cgraph_node * create_same_body_alias (tree alias, tree decl)):
    created from cgraph_same_body_alias 
  (static bool used_from_object_file_p_worker (cgraph_node *node,
    void *): new function
  (static bool non_local_p (cgraph_node *node, void *)):
    created from cgraph_non_local_node_p_1
  (static void DEBUG_FUNCTION verify_cgraph_nodes (void)):
    created from verify_cgraph
  (static bool make_local (cgraph_node *node, void *)):
    created from cgraph_make_node_local
  (static cgraph_node *create_alias (tree alias, tree target)):
    created from cgraph_create_function_alias
  (static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
    gimple call_stmt, gcov_type count, int freq, bool indir_unknown_callee)):
    created from cgraph_create_edge_1
  * cgraph.h (varpool_node):
  (void remove (void)): created from varpool_remove_node
  (void dump (FILE *f)): created from dump_varpool_node

From-SVN: r212982
This commit is contained in:
Martin Liska 2014-07-24 14:07:13 +02:00 committed by Martin Liska
parent 785129aa16
commit d52f529517
80 changed files with 2890 additions and 2427 deletions

View File

@ -1,3 +1,177 @@
2014-07-24 Martin Liska <mliska@suse.cz>
* cgraph.h (symtab_node):
(void register_symbol (void)): created from symtab_register_node
(void remove (void)): created from symtab_remove_node
(void dump (FILE *f)): created from dump_symtab_node
(void DEBUG_FUNCTION debug (void)): created from debug_symtab_node
(void DEBUG_FUNCTION verify (void)): created from verify_symtab_node
(struct ipa_ref *add_reference (symtab_node *referred_node,
enum ipa_ref_use use_type)): created from add_reference
(struct ipa_ref *add_reference (symtab_node *referred_node,
enum ipa_ref_use use_type, gimple stmt)): created from add_reference
(struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
gimple stmt)): created from maybe_add_reference
(bool semantically_equivalent_p (symtab_node *target)): created from
symtab_semantically_equivalent_p
(void remove_from_same_comdat_group (void)): created from
remove_from_same_comdat_group
(void add_to_same_comdat_group (symtab_node *old_node)): created from
symtab_add_to_same_comdat_group
(void dissolve_same_comdat_group_list (void)): created from
symtab_dissolve_same_comdat_group_list
(bool used_from_object_file_p (void)): created from symtab_used_from_object_file_p
(symtab_node *ultimate_alias_target (enum availability *avail = NULL)):
created from symtab_alias_ultimate_target
(inline symtab_node *next_defined_symbol (void)): created from
symtab_next_defined_symbol
(bool resolve_alias (symtab_node *target)): created from
symtab_resolve_alias
(bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
void *data, bool include_overwrite)): created from symtab_for_node_and_aliases
(symtab_node *noninterposable_alias (void)): created from symtab_nonoverwritable_alias
(inline symtab_node *get_alias_target (void)): created from symtab_alias_target
(void set_section (const char *section)): created from set_section_1
(enum availability get_availability (void)): created from symtab_node_availability
(void make_decl_local (void)): created from symtab_make_decl_local
(bool real_symbol_p (void)): created from symtab_read_node
(can_be_discarded_p (void)): created from symtab_can_be_discarded
(inline bool comdat_local_p (void)): created from symtab_comdat_local_p
(inline bool in_same_comdat_group_p (symtab_node *target)): created from
symtab_in_same_comdat_p;
(bool address_taken_from_non_vtable_p (void)): created from
address_taken_from_non_vtable_p
(static inline symtab_node *get (const_tree decl)): created from symtab_get_node
(static void dump_table (FILE *)): created from dump_symtab
(static inline DEBUG_FUNCTION void debug_symtab (void)): created from debug_symtab
(static DEBUG_FUNCTION void verify_symtab_nodes (void)): created from verify_symtab
(static bool used_from_object_file_p_worker (symtab_node *node)): created from
symtab_used_from_object_file_p
(void dump_base (FILE *)): created from dump_symtab_base
(bool DEBUG_FUNCTION verify_base (void)): created from verify_symtab_base
(void unregister (void)): created from symtab_unregister_node
(struct symbol_priority_map *priority_info (void)): created from symtab_priority_info
(static bool set_implicit_section (symtab_node *n, void *)): created from set_implicit_section
(static bool noninterposable_alias (symtab_node *node, void *data)): created from
symtab_nonoverwritable_alias_1
* cgraph.h (cgraph_node):
(bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL)):
created from cgraph_remove_node_and_inline_clones
(void record_stmt_references (gimple stmt)): created from ipa_record_stmt_references
(void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
bool update_speculative = true)): created from cgraph_set_call_stmt_including_clones
(cgraph_node *function_symbol (enum availability *avail = NULL)):
created from cgraph_function_node
(cgraph_node *create_clone (tree decl, gcov_type count, int freq, bool update_original,
vec<cgraph_edge *> redirect_callers, bool call_duplication_hook,
struct cgraph_node *new_inlined_to, bitmap args_to_skip)):
created from cgraph_create_clone
(cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, const char * suffix)):
created from cgraph_create_virtual_clone
(cgraph_node *find_replacement (void)): created from cgraph_find_replacement_node
(cgraph_node *create_version_clone (tree new_decl, vec<cgraph_edge *> redirect_callers,
bitmap bbs_to_copy)): created from cgraph_copy_node_for_versioning
(cgraph_node *create_version_clone_with_body (vec<cgraph_edge *> redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, bool skip_return,
bitmap bbs_to_copy, basic_block new_entry_block, const char *clone_name)):
created from cgraph_function_version_info
(struct cgraph_function_version_info *insert_new_function_version (void)):
created from insert_new_cgraph_node_version
(struct cgraph_function_version_info *function_version (void)): created from
get_cgraph_node_version
(void analyze (void)): created from analyze_function
(cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value, tree virtual_offset,
tree real_alias) cgraph_add_thunk
(inline cgraph_node *get_alias_target (void)): created from cgraph_alias_target
(cgraph_node *ultimate_alias_target (availability *availability = NULL)):
created from cgraph_function_or_thunk_node
(bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)):
created from expand_thunk
(void reset (void)): created from cgraph_reset_node
(void create_wrapper (cgraph_node *target)): created from cgraph_make_wrapper
(void DEBUG_FUNCTION verify_node (void)): created from verify_cgraph_node
(void remove (void)): created from cgraph_remove_node
(void dump (FILE *f)): created from dump_cgraph_node
(void DEBUG_FUNCTION debug (void)): created from debug_cgraph_node
(bool get_body (void)): created from cgraph_get_body
(void release_body (void)): created from cgraph_release_function_body
(void unnest (void)): created from cgraph_unnest_node
(void make_local (void)): created from cgraph_make_node_local
(void mark_address_taken (void)): created from cgraph_mark_address_taken_node
(struct cgraph_edge *create_edge (cgraph_node *callee, gimple call_stmt,
gcov_type count, int freq)): created from cgraph_create_edge
(struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
gcov_type count, int freq)): created from cgraph_create_indirect_edge
(void create_edge_including_clones (struct cgraph_node *callee, gimple old_stmt,
gimple stmt, gcov_type count, int freq, cgraph_inline_failed_t reason)):
created from cgraph_create_edge_including_clones
(cgraph_edge *get_edge (gimple call_stmt)): created from cgraph_edge
(vec<cgraph_edge *> collect_callers (void)): created from collect_callers_of_node
(void remove_callers (void)): created from cgraph_node_remove_callers
(void remove_callees (void)): created from cgraph_node_remove_callees
(enum availability get_availability (void)): created from cgraph_function_body_availability
(void set_nothrow_flag (bool nothrow)): created from cgraph_set_nothrow_flag
(void set_const_flag (bool readonly, bool looping)): created from cgraph_set_const_flag
(void set_pure_flag (bool pure, bool looping)): created from cgraph_set_pure_flag
(void call_duplication_hooks (cgraph_node *node2)): created from
cgraph_call_node_duplication_hooks
(bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, void *),
void *data, bool include_overwritable)): created from cgraph_for_node_and_aliases
(bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node, void *data),
void *data, bool include_overwritable)): created from cgraph_for_node_thunks_and_aliases
(void call_function_insertion_hooks (void)):
created from cgraph_call_function_insertion_hooks
(inline void mark_force_output (void)): created from cgraph_mark_force_output_node
(bool local_p (void)): created from cgraph_local_node
(bool can_be_local_p (void)): created from cgraph_node_can_be_local_p
(bool cannot_return_p (void)): created from cgraph_node_cannot_return
(bool only_called_directly_p (void)): created from cgraph_only_called_directly_p
(inline bool only_called_directly_or_aliased_p (void)):
created from cgraph_only_called_directly_or_aliased_p
(bool will_be_removed_from_program_if_no_direct_calls_p (void)):
created from cgraph_will_be_removed_from_program_if_no_direct_calls
(bool can_remove_if_no_direct_calls_and_refs_p (void)):
created from cgraph_can_remove_if_no_direct_calls_and_refs_p
(bool can_remove_if_no_direct_calls_p (void)):
created from cgraph_can_remove_if_no_direct_calls_p
(inline bool has_gimple_body_p (void)):
created from cgraph_function_with_gimple_body_p
(bool optimize_for_size_p (void)): created from cgraph_optimize_for_size_p
(static void dump_cgraph (FILE *f)): created from dump_cgraph
(static inline void debug_cgraph (void)): created from debug_cgraph
(static void record_function_versions (tree decl1, tree decl2)):
created from record_function_versions
(static void delete_function_version (tree decl)):
created from delete_function_version
(static void add_new_function (tree fndecl, bool lowered)):
created from cgraph_add_new_function
(static inline cgraph_node *get (const_tree decl)): created from cgraph_get_node
(static cgraph_node * create (tree decl)): created from cgraph_create_node
(static cgraph_node * create_empty (void)): created from cgraph_create_empty_node
(static cgraph_node * get_create (tree)): created from cgraph_get_create_node
(static cgraph_node *get_for_asmname (tree asmname)):
created from cgraph_node_for_asm
(static cgraph_node * create_same_body_alias (tree alias, tree decl)):
created from cgraph_same_body_alias
(static bool used_from_object_file_p_worker (cgraph_node *node,
void *): new function
(static bool non_local_p (cgraph_node *node, void *)):
created from cgraph_non_local_node_p_1
(static void DEBUG_FUNCTION verify_cgraph_nodes (void)):
created from verify_cgraph
(static bool make_local (cgraph_node *node, void *)):
created from cgraph_make_node_local
(static cgraph_node *create_alias (tree alias, tree target)):
created from cgraph_create_function_alias
(static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
gimple call_stmt, gcov_type count, int freq, bool indir_unknown_callee)):
created from cgraph_create_edge_1
* cgraph.h (varpool_node):
(void remove (void)): created from varpool_remove_node
(void dump (FILE *f)): created from dump_varpool_node
2014-07-24 Richard Biener <rguenther@suse.de>
PR ipa/61823

View File

@ -1302,7 +1302,7 @@ asan_protect_global (tree decl)
to be an array of such vars, putting padding in there
breaks this assumption. */
|| (DECL_SECTION_NAME (decl) != NULL
&& !symtab_get_node (decl)->implicit_section)
&& !symtab_node::get (decl)->implicit_section)
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))

View File

@ -143,7 +143,7 @@ c_genericize (tree fndecl)
}
/* Dump all nested functions now. */
cgn = cgraph_get_create_node (fndecl);
cgn = cgraph_node::get_create (fndecl);
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
c_genericize (cgn->decl);
}

View File

@ -171,7 +171,7 @@ call_graph_add_fn (tree fndecl)
gcc_assert (cfun->decl == outer);
push_cfun (f);
cgraph_create_node (fndecl);
cgraph_node::create (fndecl);
pop_cfun_to (outer);
}

View File

@ -2586,9 +2586,9 @@ duplicate_decls (tree newdecl, tree olddecl)
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
struct symtab_node *snode = symtab_get_node (newdecl);
struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
symtab_remove_node (snode);
snode->remove ();
}
ggc_free (newdecl);
return true;
@ -8699,7 +8699,7 @@ finish_function (void)
This should be cleaned up later and this conditional removed. */
if (cgraph_global_info_ready)
{
cgraph_add_new_function (fndecl, false);
cgraph_node::add_new_function (fndecl, false);
return;
}
cgraph_finalize_function (fndecl, false);
@ -8709,7 +8709,7 @@ finish_function (void)
/* Register this function with cgraph just far enough to get it
added to our parent's nested function list. Handy, since the
C front end doesn't have such a list. */
(void) cgraph_get_create_node (fndecl);
(void) cgraph_node::get_create (fndecl);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -79,9 +79,9 @@ record_reference (tree *tp, int *walk_subtrees, void *data)
decl = get_base_var (*tp);
if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_create_node (decl);
struct cgraph_node *node = cgraph_node::get_create (decl);
if (!ctx->only_vars)
cgraph_mark_address_taken_node (node);
node->mark_address_taken ();
ctx->varpool_node->add_reference (node, IPA_REF_ADDR);
}
@ -142,10 +142,10 @@ record_eh_tables (struct cgraph_node *node, struct function *fun)
if (DECL_FUNCTION_PERSONALITY (node->decl))
{
tree per_decl = DECL_FUNCTION_PERSONALITY (node->decl);
struct cgraph_node *per_node = cgraph_get_create_node (per_decl);
struct cgraph_node *per_node = cgraph_node::get_create (per_decl);
node->add_reference (per_node, IPA_REF_ADDR);
cgraph_mark_address_taken_node (per_node);
per_node->mark_address_taken ();
}
i = fun->eh->region_tree;
@ -223,8 +223,8 @@ mark_address (gimple stmt, tree addr, tree, void *data)
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_create_node (addr);
cgraph_mark_address_taken_node (node);
struct cgraph_node *node = cgraph_node::get_create (addr);
node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (addr && TREE_CODE (addr) == VAR_DECL
@ -248,8 +248,8 @@ mark_load (gimple stmt, tree t, tree, void *data)
{
/* ??? This can happen on platforms with descriptors when these are
directly manipulated in the code. Pretend that it's an address. */
struct cgraph_node *node = cgraph_get_create_node (t);
cgraph_mark_address_taken_node (node);
struct cgraph_node *node = cgraph_node::get_create (t);
node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (t && TREE_CODE (t) == VAR_DECL
@ -278,11 +278,12 @@ mark_store (gimple stmt, tree t, tree, void *data)
return false;
}
/* Record all references from NODE that are taken in statement STMT. */
/* Record all references from cgraph_node that are taken in statement STMT. */
void
ipa_record_stmt_references (struct cgraph_node *node, gimple stmt)
cgraph_node::record_stmt_references (gimple stmt)
{
walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store,
walk_stmt_load_store_addr_ops (stmt, this, mark_load, mark_store,
mark_address);
}
@ -320,7 +321,7 @@ unsigned int
pass_build_cgraph_edges::execute (function *fun)
{
basic_block bb;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct pointer_set_t *visited_nodes = pointer_set_create ();
gimple_stmt_iterator gsi;
tree decl;
@ -344,37 +345,37 @@ pass_build_cgraph_edges::execute (function *fun)
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
cgraph_create_edge (node, cgraph_get_create_node (decl),
stmt, bb->count, freq);
node->create_edge (cgraph_node::get_create (decl),
stmt, bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
cgraph_create_indirect_edge (node, stmt,
gimple_call_flags (stmt),
bb->count, freq);
node->create_indirect_edge (stmt,
gimple_call_flags (stmt),
bb->count, freq);
}
ipa_record_stmt_references (node, stmt);
node->record_stmt_references (stmt);
if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
&& gimple_omp_parallel_child_fn (stmt))
{
tree fn = gimple_omp_parallel_child_fn (stmt);
node->add_reference (cgraph_get_create_node (fn),
node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
if (gimple_code (stmt) == GIMPLE_OMP_TASK)
{
tree fn = gimple_omp_task_child_fn (stmt);
if (fn)
node->add_reference (cgraph_get_create_node (fn),
node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
fn = gimple_omp_task_copy_fn (stmt);
if (fn)
node->add_reference (cgraph_get_create_node (fn),
node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
ipa_record_stmt_references (node, gsi_stmt (gsi));
node->record_stmt_references (gsi_stmt (gsi));
}
/* Look for initializers of constant variables and private statics. */
@ -422,10 +423,10 @@ unsigned int
rebuild_cgraph_edges (void)
{
basic_block bb;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
cgraph_node_remove_callees (node);
node->remove_callees ();
node->remove_all_references ();
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
@ -443,19 +444,19 @@ rebuild_cgraph_edges (void)
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
bb->count, freq);
node->create_edge (cgraph_node::get_create (decl), stmt,
bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
cgraph_create_indirect_edge (node, stmt,
gimple_call_flags (stmt),
bb->count, freq);
node->create_indirect_edge (stmt,
gimple_call_flags (stmt),
bb->count, freq);
}
ipa_record_stmt_references (node, stmt);
node->record_stmt_references (stmt);
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
ipa_record_stmt_references (node, gsi_stmt (gsi));
node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
gcc_assert (!node->global.inlined_to);
@ -470,7 +471,7 @@ void
cgraph_rebuild_references (void)
{
basic_block bb;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
struct ipa_ref *ref = NULL;
int i;
@ -487,9 +488,9 @@ cgraph_rebuild_references (void)
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
ipa_record_stmt_references (node, gsi_stmt (gsi));
node->record_stmt_references (gsi_stmt (gsi));
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
ipa_record_stmt_references (node, gsi_stmt (gsi));
node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
}
@ -564,8 +565,8 @@ public:
unsigned int
pass_remove_cgraph_callee_edges::execute (function *)
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
cgraph_node_remove_callees (node);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
node->remove_callees ();
node->remove_all_references ();
return 0;
}

View File

@ -128,21 +128,21 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
via cgraph_resolve_speculation and not here. */
&& !e->speculative)
{
struct cgraph_node *callee = cgraph_get_node (decl);
struct cgraph_node *callee = cgraph_node::get (decl);
gcc_checking_assert (callee);
new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
new_edge = n->create_edge (callee, call_stmt, count, freq);
}
else
{
new_edge = cgraph_create_indirect_edge (n, call_stmt,
e->indirect_info->ecf_flags,
count, freq);
new_edge = n->create_indirect_edge (call_stmt,
e->indirect_info->ecf_flags,
count, freq);
*new_edge->indirect_info = *e->indirect_info;
}
}
else
{
new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
new_edge = n->create_edge (e->callee, call_stmt, count, freq);
if (e->indirect_info)
{
new_edge->indirect_info
@ -304,7 +304,7 @@ static cgraph_node *
duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
{
cgraph_node *new_thunk, *thunk_of;
thunk_of = cgraph_function_or_thunk_node (thunk->callees->callee);
thunk_of = thunk->callees->callee->ultimate_alias_target ();
if (thunk_of->thunk.thunk_p)
node = duplicate_thunk_for_node (thunk_of, node);
@ -341,7 +341,7 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
new_thunk = cgraph_create_node (new_decl);
new_thunk = cgraph_node::create (new_decl);
set_new_clone_decl_and_node_flags (new_thunk);
new_thunk->definition = true;
new_thunk->thunk = thunk->thunk;
@ -350,13 +350,13 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
new_thunk->clone.args_to_skip = node->clone.args_to_skip;
new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
struct cgraph_edge *e = cgraph_create_edge (new_thunk, node, NULL, 0,
CGRAPH_FREQ_BASE);
struct cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
CGRAPH_FREQ_BASE);
e->call_stmt_cannot_inline_p = true;
cgraph_call_edge_duplication_hooks (thunk->callees, e);
if (!expand_thunk (new_thunk, false, false))
if (!new_thunk->expand_thunk (false, false))
new_thunk->analyzed = true;
cgraph_call_node_duplication_hooks (thunk, new_thunk);
thunk->call_duplication_hooks (new_thunk);
return new_thunk;
}
@ -367,7 +367,7 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
void
redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
{
cgraph_node *orig_to = cgraph_function_or_thunk_node (e->callee);
cgraph_node *orig_to = e->callee->ultimate_alias_target ();
if (orig_to->thunk.thunk_p)
n = duplicate_thunk_for_node (orig_to, n);
@ -390,67 +390,67 @@ redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
will see this in node's global.inlined_to, when invoked. Can be NULL if the
node is not inlined. */
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
bool update_original,
vec<cgraph_edge_p> redirect_callers,
bool call_duplication_hook,
struct cgraph_node *new_inlined_to,
bitmap args_to_skip)
cgraph_node *
cgraph_node::create_clone (tree decl, gcov_type gcov_count, int freq,
bool update_original,
vec<cgraph_edge *> redirect_callers,
bool call_duplication_hook,
struct cgraph_node *new_inlined_to,
bitmap args_to_skip)
{
struct cgraph_node *new_node = cgraph_create_empty_node ();
struct cgraph_node *new_node = cgraph_node::create_empty ();
struct cgraph_edge *e;
gcov_type count_scale;
unsigned i;
new_node->decl = decl;
symtab_register_node (new_node);
new_node->origin = n->origin;
new_node->lto_file_data = n->lto_file_data;
new_node->register_symbol ();
new_node->origin = origin;
new_node->lto_file_data = lto_file_data;
if (new_node->origin)
{
new_node->next_nested = new_node->origin->nested;
new_node->origin->nested = new_node;
}
new_node->analyzed = n->analyzed;
new_node->definition = n->definition;
new_node->local = n->local;
new_node->analyzed = analyzed;
new_node->definition = definition;
new_node->local = local;
new_node->externally_visible = false;
new_node->local.local = true;
new_node->global = n->global;
new_node->global = global;
new_node->global.inlined_to = new_inlined_to;
new_node->rtl = n->rtl;
new_node->rtl = rtl;
new_node->count = count;
new_node->frequency = n->frequency;
new_node->tp_first_run = n->tp_first_run;
new_node->frequency = frequency;
new_node->tp_first_run = tp_first_run;
new_node->clone.tree_map = NULL;
new_node->clone.args_to_skip = args_to_skip;
if (!args_to_skip)
new_node->clone.combined_args_to_skip = n->clone.combined_args_to_skip;
else if (n->clone.combined_args_to_skip)
new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
else if (clone.combined_args_to_skip)
{
new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
bitmap_ior (new_node->clone.combined_args_to_skip,
n->clone.combined_args_to_skip, args_to_skip);
clone.combined_args_to_skip, args_to_skip);
}
else
new_node->clone.combined_args_to_skip = args_to_skip;
if (n->count)
if (count)
{
if (new_node->count > n->count)
if (new_node->count > count)
count_scale = REG_BR_PROB_BASE;
else
count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count);
count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
}
else
count_scale = 0;
if (update_original)
{
n->count -= count;
if (n->count < 0)
n->count = 0;
count -= gcov_count;
if (count < 0)
count = 0;
}
FOR_EACH_VEC_ELT (redirect_callers, i, e)
@ -464,23 +464,23 @@ cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
redirect_edge_duplicating_thunks (e, new_node);
}
for (e = n->callees;e; e=e->next_callee)
for (e = callees;e; e=e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
for (e = n->indirect_calls; e; e = e->next_callee)
for (e = indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
new_node->clone_references (n);
new_node->clone_references (this);
new_node->next_sibling_clone = n->clones;
if (n->clones)
n->clones->prev_sibling_clone = new_node;
n->clones = new_node;
new_node->clone_of = n;
new_node->next_sibling_clone = clones;
if (clones)
clones->prev_sibling_clone = new_node;
clones = new_node;
new_node->clone_of = this;
if (call_duplication_hook)
cgraph_call_node_duplication_hooks (n, new_node);
call_duplication_hooks (new_node);
return new_node;
}
@ -516,13 +516,11 @@ clone_function_name (tree decl, const char *suffix)
bitmap interface.
*/
struct cgraph_node *
cgraph_create_virtual_clone (struct cgraph_node *old_node,
vec<cgraph_edge_p> redirect_callers,
vec<ipa_replace_map_p, va_gc> *tree_map,
bitmap args_to_skip,
const char * suffix)
cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map,
bitmap args_to_skip, const char * suffix)
{
tree old_decl = old_node->decl;
tree old_decl = decl;
struct cgraph_node *new_node = NULL;
tree new_decl;
size_t len, i;
@ -532,7 +530,7 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
if (!in_lto_p)
gcc_checking_assert (tree_versionable_function_p (old_decl));
gcc_assert (old_node->local.can_change_signature || !args_to_skip);
gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node */
if (!args_to_skip)
@ -560,9 +558,9 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
SET_DECL_RTL (new_decl, NULL);
new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
CGRAPH_FREQ_BASE, false,
redirect_callers, false, NULL, args_to_skip);
new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
redirect_callers, false, NULL, args_to_skip);
/* Update the properties.
Make clone visible only within this translation unit. Make sure
that is not weak also.
@ -581,26 +579,25 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
new_node->maybe_add_reference (map->new_tree, IPA_REF_ADDR, NULL);
if (old_node->ipa_transforms_to_apply.exists ())
if (ipa_transforms_to_apply.exists ())
new_node->ipa_transforms_to_apply
= old_node->ipa_transforms_to_apply.copy ();
cgraph_call_node_duplication_hooks (old_node, new_node);
= ipa_transforms_to_apply.copy ();
call_duplication_hooks (new_node);
return new_node;
}
/* NODE is being removed from symbol table; see if its entry can be replaced by
other inline clone. */
struct cgraph_node *
cgraph_find_replacement_node (struct cgraph_node *node)
/* callgraph node being removed from symbol table; see if its entry can be
replaced by other inline clone. */
cgraph_node *
cgraph_node::find_replacement (void)
{
struct cgraph_node *next_inline_clone, *replacement;
for (next_inline_clone = node->clones;
for (next_inline_clone = clones;
next_inline_clone
&& next_inline_clone->decl != node->decl;
&& next_inline_clone->decl != decl;
next_inline_clone = next_inline_clone->next_sibling_clone)
;
@ -620,32 +617,32 @@ cgraph_find_replacement_node (struct cgraph_node *node)
= next_inline_clone->prev_sibling_clone;
if (next_inline_clone->prev_sibling_clone)
{
gcc_assert (node->clones != next_inline_clone);
gcc_assert (clones != next_inline_clone);
next_inline_clone->prev_sibling_clone->next_sibling_clone
= next_inline_clone->next_sibling_clone;
}
else
{
gcc_assert (node->clones == next_inline_clone);
node->clones = next_inline_clone->next_sibling_clone;
gcc_assert (clones == next_inline_clone);
clones = next_inline_clone->next_sibling_clone;
}
new_clones = node->clones;
node->clones = NULL;
new_clones = clones;
clones = NULL;
/* Copy clone info. */
next_inline_clone->clone = node->clone;
next_inline_clone->clone = clone;
/* Now place it into clone tree at same level at NODE. */
next_inline_clone->clone_of = node->clone_of;
next_inline_clone->clone_of = clone_of;
next_inline_clone->prev_sibling_clone = NULL;
next_inline_clone->next_sibling_clone = NULL;
if (node->clone_of)
if (clone_of)
{
if (node->clone_of->clones)
node->clone_of->clones->prev_sibling_clone = next_inline_clone;
next_inline_clone->next_sibling_clone = node->clone_of->clones;
node->clone_of->clones = next_inline_clone;
if (clone_of->clones)
clone_of->clones->prev_sibling_clone = next_inline_clone;
next_inline_clone->next_sibling_clone = clone_of->clones;
clone_of->clones = next_inline_clone;
}
/* Merge the clone list. */
@ -657,7 +654,7 @@ cgraph_find_replacement_node (struct cgraph_node *node)
{
n = next_inline_clone->clones;
while (n->next_sibling_clone)
n = n->next_sibling_clone;
n = n->next_sibling_clone;
n->next_sibling_clone = new_clones;
new_clones->prev_sibling_clone = n;
}
@ -683,21 +680,20 @@ cgraph_find_replacement_node (struct cgraph_node *node)
call. */
void
cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
gimple old_stmt, gimple new_stmt,
bool update_speculative)
cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
bool update_speculative)
{
struct cgraph_node *node;
struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
struct cgraph_edge *edge = get_edge (old_stmt);
if (edge)
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
node = orig->clones;
node = clones;
if (node)
while (node != orig)
while (node != this)
{
struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
struct cgraph_edge *edge = node->get_edge (old_stmt);
if (edge)
{
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
@ -722,9 +718,9 @@ cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
node = node->next_sibling_clone;
else
{
while (node != orig && !node->next_sibling_clone)
while (node != this && !node->next_sibling_clone)
node = node->clone_of;
if (node != orig)
if (node != this)
node = node->next_sibling_clone;
}
}
@ -738,27 +734,26 @@ cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
frequencies of the clones. */
void
cgraph_create_edge_including_clones (struct cgraph_node *orig,
struct cgraph_node *callee,
gimple old_stmt,
gimple stmt, gcov_type count,
int freq,
cgraph_inline_failed_t reason)
cgraph_node::create_edge_including_clones (struct cgraph_node *callee,
gimple old_stmt, gimple stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
if (!cgraph_edge (orig, stmt))
if (!get_edge (stmt))
{
edge = cgraph_create_edge (orig, callee, stmt, count, freq);
edge = create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
node = orig->clones;
node = clones;
if (node)
while (node != orig)
while (node != this)
{
struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
struct cgraph_edge *edge = node->get_edge (old_stmt);
/* It is possible that clones already contain the edge while
master didn't. Either we promoted indirect call into direct
@ -766,10 +761,9 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
master where edges has been removed. */
if (edge)
cgraph_set_call_stmt (edge, stmt);
else if (!cgraph_edge (node, stmt))
else if (! node->get_edge (stmt))
{
edge = cgraph_create_edge (node, callee, stmt, count,
freq);
edge = node->create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
@ -779,9 +773,9 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
node = node->next_sibling_clone;
else
{
while (node != orig && !node->next_sibling_clone)
while (node != this && !node->next_sibling_clone)
node = node->clone_of;
if (node != orig)
if (node != this)
node = node->next_sibling_clone;
}
}
@ -793,23 +787,23 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
tree. */
bool
cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
{
struct cgraph_edge *e, *next;
bool found = false;
if (node == forbidden_node)
if (this == forbidden_node)
{
cgraph_remove_edge (node->callers);
cgraph_remove_edge (callers);
return true;
}
for (e = node->callees; e; e = next)
for (e = callees; e; e = next)
{
next = e->next_callee;
if (!e->inline_failed)
found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
}
cgraph_remove_node (node);
remove ();
return found;
}
@ -835,9 +829,9 @@ update_call_expr (struct cgraph_node *new_version)
/* Create a new cgraph node which is the new version of
OLD_VERSION node. REDIRECT_CALLERS holds the callers
callgraph node. REDIRECT_CALLERS holds the callers
edges which should be redirected to point to
NEW_VERSION. ALL the callees edges of OLD_VERSION
NEW_VERSION. ALL the callees edges of the node
are cloned to the new version node. Return the new
version node.
@ -845,37 +839,34 @@ update_call_expr (struct cgraph_node *new_version)
was copied to prevent duplications of calls that are dead
in the clone. */
struct cgraph_node *
cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
tree new_decl,
vec<cgraph_edge_p> redirect_callers,
bitmap bbs_to_copy)
cgraph_node *
cgraph_node::create_version_clone (tree new_decl,
vec<cgraph_edge *> redirect_callers,
bitmap bbs_to_copy)
{
struct cgraph_node *new_version;
struct cgraph_edge *e;
unsigned i;
gcc_assert (old_version);
new_version = cgraph_node::create (new_decl);
new_version = cgraph_create_node (new_decl);
new_version->analyzed = old_version->analyzed;
new_version->definition = old_version->definition;
new_version->local = old_version->local;
new_version->analyzed = analyzed;
new_version->definition = definition;
new_version->local = local;
new_version->externally_visible = false;
new_version->local.local = new_version->definition;
new_version->global = old_version->global;
new_version->rtl = old_version->rtl;
new_version->count = old_version->count;
new_version->global = global;
new_version->rtl = rtl;
new_version->count = count;
for (e = old_version->callees; e; e=e->next_callee)
for (e = callees; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
e->lto_stmt_uid, REG_BR_PROB_BASE,
CGRAPH_FREQ_BASE,
true);
for (e = old_version->indirect_calls; e; e=e->next_callee)
for (e = indirect_calls; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
@ -889,7 +880,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
cgraph_redirect_edge_callee (e, new_version);
}
cgraph_call_node_duplication_hooks (old_version, new_version);
call_duplication_hooks (new_version);
return new_version;
}
@ -904,7 +895,6 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
TREE_MAP is a mapping of tree nodes we want to replace with
new ones (according to results of prior analysis).
OLD_VERSION_NODE is the node that is versioned.
If non-NULL ARGS_TO_SKIP determine function parameters to remove
from new version.
@ -914,24 +904,21 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
Return the new version's cgraph node. */
struct cgraph_node *
cgraph_function_versioning (struct cgraph_node *old_version_node,
vec<cgraph_edge_p> redirect_callers,
vec<ipa_replace_map_p, va_gc> *tree_map,
bitmap args_to_skip,
bool skip_return,
bitmap bbs_to_copy,
basic_block new_entry_block,
const char *clone_name)
cgraph_node *
cgraph_node::create_version_clone_with_body
(vec<cgraph_edge *> redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
const char *clone_name)
{
tree old_decl = old_version_node->decl;
tree old_decl = decl;
struct cgraph_node *new_version_node = NULL;
tree new_decl;
if (!tree_versionable_function_p (old_decl))
return NULL;
gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node for the new version. */
if (!args_to_skip && !skip_return)
@ -951,13 +938,12 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
/* Create the new version's call-graph node.
and update the edges of the new node. */
new_version_node =
cgraph_copy_node_for_versioning (old_version_node, new_decl,
redirect_callers, bbs_to_copy);
new_version_node = create_version_clone (new_decl, redirect_callers,
bbs_to_copy);
if (old_version_node->ipa_transforms_to_apply.exists ())
if (ipa_transforms_to_apply.exists ())
new_version_node->ipa_transforms_to_apply
= old_version_node->ipa_transforms_to_apply.copy ();
= ipa_transforms_to_apply.copy ();
/* Copy the OLD_VERSION_NODE function tree to the new version. */
tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
skip_return, bbs_to_copy, new_entry_block);
@ -967,7 +953,7 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
that is not weak also.
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
symtab_make_decl_local (new_version_node->decl);
new_version_node->make_decl_local ();
DECL_VIRTUAL_P (new_version_node->decl) = 0;
new_version_node->externally_visible = 0;
new_version_node->local.local = 1;
@ -983,7 +969,7 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
cgraph_call_function_insertion_hooks (new_version_node);
new_version_node->call_function_insertion_hooks ();
return new_version_node;
}
@ -1018,8 +1004,8 @@ cgraph_materialize_clone (struct cgraph_node *node)
node->prev_sibling_clone = NULL;
if (!node->clone_of->analyzed && !node->clone_of->clones)
{
cgraph_release_function_body (node->clone_of);
cgraph_node_remove_callees (node->clone_of);
node->clone_of->release_body ();
node->clone_of->remove_callees ();
node->clone_of->remove_all_references ();
}
node->clone_of = NULL;
@ -1042,7 +1028,7 @@ cgraph_materialize_all_clones (void)
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materializing clones\n");
#ifdef ENABLE_CHECKING
verify_cgraph ();
cgraph_node::verify_cgraph_nodes ();
#endif
/* We can also do topological order, but number of iterations should be
@ -1057,7 +1043,7 @@ cgraph_materialize_all_clones (void)
&& !gimple_has_body_p (node->decl))
{
if (!node->clone_of->clone_of)
cgraph_get_body (node->clone_of);
node->clone_of->get_body ();
if (gimple_has_body_p (node->clone_of->decl))
{
if (cgraph_dump_file)
@ -1104,7 +1090,7 @@ cgraph_materialize_all_clones (void)
FOR_EACH_FUNCTION (node)
if (!node->analyzed && node->callees)
{
cgraph_node_remove_callees (node);
node->remove_callees ();
node->remove_all_references ();
}
else
@ -1112,7 +1098,7 @@ cgraph_materialize_all_clones (void)
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
#ifdef ENABLE_CHECKING
verify_cgraph ();
cgraph_node::verify_cgraph_nodes ();
#endif
symtab_remove_unreachable_nodes (false, cgraph_dump_file);
}

View File

@ -220,7 +220,6 @@ cgraph_node_set cgraph_new_nodes;
static void expand_all_functions (void);
static void mark_functions_to_output (void);
static void expand_function (struct cgraph_node *);
static void analyze_function (struct cgraph_node *);
static void handle_alias_pairs (void);
FILE *cgraph_dump_file;
@ -320,7 +319,7 @@ cgraph_process_new_functions (void)
it into reachable functions list. */
cgraph_finalize_function (fndecl, false);
cgraph_call_function_insertion_hooks (node);
node->call_function_insertion_hooks ();
enqueue_node (node);
break;
@ -332,7 +331,7 @@ cgraph_process_new_functions (void)
gimple_register_cfg_hooks ();
if (!node->analyzed)
analyze_function (node);
node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
if (cgraph_state == CGRAPH_STATE_IPA_SSA
&& !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
@ -342,14 +341,14 @@ cgraph_process_new_functions (void)
free_dominance_info (CDI_POST_DOMINATORS);
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
cgraph_call_function_insertion_hooks (node);
node->call_function_insertion_hooks ();
break;
case CGRAPH_STATE_EXPANSION:
/* Functions created during expansion shall be compiled
directly. */
node->process = 0;
cgraph_call_function_insertion_hooks (node);
node->call_function_insertion_hooks ();
expand_function (node);
break;
@ -373,27 +372,27 @@ cgraph_process_new_functions (void)
body for expanding the function but this is difficult to do. */
void
cgraph_reset_node (struct cgraph_node *node)
cgraph_node::reset (void)
{
/* If node->process is set, then we have already begun whole-unit analysis.
/* If process is set, then we have already begun whole-unit analysis.
This is *not* testing for whether we've already emitted the function.
That case can be sort-of legitimately seen with real function redefinition
errors. I would argue that the front end should never present us with
such a case, but don't enforce that for now. */
gcc_assert (!node->process);
gcc_assert (!process);
/* Reset our data structures so we can analyze the function again. */
memset (&node->local, 0, sizeof (node->local));
memset (&node->global, 0, sizeof (node->global));
memset (&node->rtl, 0, sizeof (node->rtl));
node->analyzed = false;
node->definition = false;
node->alias = false;
node->weakref = false;
node->cpp_implicit_alias = false;
memset (&local, 0, sizeof (local));
memset (&global, 0, sizeof (global));
memset (&rtl, 0, sizeof (rtl));
analyzed = false;
definition = false;
alias = false;
weakref = false;
cpp_implicit_alias = false;
cgraph_node_remove_callees (node);
node->remove_all_references ();
remove_callees ();
remove_all_references ();
}
/* Return true when there are references to NODE. */
@ -421,14 +420,14 @@ referred_to_p (symtab_node *node)
void
cgraph_finalize_function (tree decl, bool no_collect)
{
struct cgraph_node *node = cgraph_get_create_node (decl);
struct cgraph_node *node = cgraph_node::get_create (decl);
if (node->definition)
{
/* Nested functions should only be defined once. */
gcc_assert (!DECL_CONTEXT (decl)
|| TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
cgraph_reset_node (node);
node->reset ();
node->local.redefined_extern_inline = true;
}
@ -488,7 +487,7 @@ cgraph_finalize_function (tree decl, bool no_collect)
processing to avoid need the passes to be re-entrant. */
void
cgraph_add_new_function (tree fndecl, bool lowered)
cgraph_node::add_new_function (tree fndecl, bool lowered)
{
gcc::pass_manager *passes = g->get_passes ();
struct cgraph_node *node;
@ -499,7 +498,7 @@ cgraph_add_new_function (tree fndecl, bool lowered)
break;
case CGRAPH_STATE_CONSTRUCTION:
/* Just enqueue function to be processed at nearest occurrence. */
node = cgraph_get_create_node (fndecl);
node = cgraph_node::get_create (fndecl);
if (lowered)
node->lowered = true;
if (!cgraph_new_nodes)
@ -512,7 +511,7 @@ cgraph_add_new_function (tree fndecl, bool lowered)
case CGRAPH_STATE_EXPANSION:
/* Bring the function into finalized state and enqueue for later
analyzing and compilation. */
node = cgraph_get_create_node (fndecl);
node = cgraph_node::get_create (fndecl);
node->local.local = false;
node->definition = true;
node->force_output = true;
@ -538,11 +537,11 @@ cgraph_add_new_function (tree fndecl, bool lowered)
case CGRAPH_STATE_FINISHED:
/* At the very end of compilation we have to do all the work up
to expansion. */
node = cgraph_create_node (fndecl);
node = cgraph_node::create (fndecl);
if (lowered)
node->lowered = true;
node->definition = true;
analyze_function (node);
node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
@ -599,40 +598,39 @@ output_asm_statements (void)
}
/* Analyze the function scheduled to be output. */
static void
analyze_function (struct cgraph_node *node)
void
cgraph_node::analyze (void)
{
tree decl = node->decl;
tree decl = this->decl;
location_t saved_loc = input_location;
input_location = DECL_SOURCE_LOCATION (decl);
if (node->thunk.thunk_p)
if (thunk.thunk_p)
{
cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
NULL, 0, CGRAPH_FREQ_BASE);
if (!expand_thunk (node, false, false))
create_edge (cgraph_node::get (thunk.alias),
NULL, 0, CGRAPH_FREQ_BASE);
if (!expand_thunk (false, false))
{
node->thunk.alias = NULL;
node->analyzed = true;
thunk.alias = NULL;
analyzed = true;
return;
}
node->thunk.alias = NULL;
thunk.alias = NULL;
}
if (node->alias)
symtab_resolve_alias
(node, cgraph_get_node (node->alias_target));
else if (node->dispatcher_function)
if (alias)
resolve_alias (cgraph_node::get (alias_target));
else if (dispatcher_function)
{
/* Generate the dispatcher body of multi-versioned functions. */
struct cgraph_function_version_info *dispatcher_version_info
= get_cgraph_node_version (node);
= function_version ();
if (dispatcher_version_info != NULL
&& (dispatcher_version_info->dispatcher_resolver
== NULL_TREE))
{
tree resolver = NULL_TREE;
gcc_assert (targetm.generate_version_dispatcher_body);
resolver = targetm.generate_version_dispatcher_body (node);
resolver = targetm.generate_version_dispatcher_body (this);
gcc_assert (resolver != NULL_TREE);
}
}
@ -640,7 +638,7 @@ analyze_function (struct cgraph_node *node)
{
push_cfun (DECL_STRUCT_FUNCTION (decl));
assign_assembler_name_if_neeeded (node->decl);
assign_assembler_name_if_neeeded (decl);
/* Make sure to gimplify bodies only once. During analyzing a
function we lower it, which will require gimplified nested
@ -651,11 +649,11 @@ analyze_function (struct cgraph_node *node)
dump_function (TDI_generic, decl);
/* Lower the function. */
if (!node->lowered)
if (!lowered)
{
if (node->nested)
lower_nested_functions (node->decl);
gcc_assert (!node->nested);
if (nested)
lower_nested_functions (decl);
gcc_assert (!nested);
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
@ -664,12 +662,12 @@ analyze_function (struct cgraph_node *node)
free_dominance_info (CDI_DOMINATORS);
compact_blocks ();
bitmap_obstack_release (NULL);
node->lowered = true;
lowered = true;
}
pop_cfun ();
}
node->analyzed = true;
analyzed = true;
input_location = saved_loc;
}
@ -686,11 +684,10 @@ cgraph_process_same_body_aliases (void)
symtab_node *node;
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias && !node->analyzed)
symtab_resolve_alias
(node,
TREE_CODE (node->alias_target) == VAR_DECL
node->resolve_alias
(TREE_CODE (node->alias_target) == VAR_DECL
? (symtab_node *)varpool_node_for_decl (node->alias_target)
: (symtab_node *)cgraph_get_create_node (node->alias_target));
: (symtab_node *)cgraph_node::get_create (node->alias_target));
cpp_implicit_aliases_done = true;
}
@ -748,7 +745,7 @@ process_function_and_variable_attributes (struct cgraph_node *first,
{
tree decl = node->decl;
if (DECL_PRESERVE_P (decl))
cgraph_mark_force_output_node (node);
node->mark_force_output ();
else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
@ -893,8 +890,8 @@ walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
if (targets.length () == 1)
target = targets[0];
else
target = cgraph_get_create_node
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
target = cgraph_node::create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (cgraph_dump_file)
{
@ -957,7 +954,7 @@ analyze_functions (void)
if (cpp_implicit_aliases_done)
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias)
fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
if (optimize && flag_devirtualize)
build_type_inheritance_graph ();
@ -1019,13 +1016,13 @@ analyze_functions (void)
&& !cnode->thunk.thunk_p
&& !cnode->dispatcher_function)
{
cgraph_reset_node (cnode);
cnode->reset ();
cnode->local.redefined_extern_inline = true;
continue;
}
if (!cnode->analyzed)
analyze_function (cnode);
cnode->analyze ();
for (edge = cnode->callees; edge; edge = edge->next_callee)
if (edge->callee->definition)
@ -1050,7 +1047,7 @@ analyze_functions (void)
if (DECL_ABSTRACT_ORIGIN (decl))
{
struct cgraph_node *origin_node
= cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
= cgraph_node::get (DECL_ABSTRACT_ORIGIN (decl));
origin_node->used_as_abstract_origin = true;
}
}
@ -1082,7 +1079,7 @@ analyze_functions (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nInitial ");
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
}
if (cgraph_dump_file)
@ -1097,7 +1094,7 @@ analyze_functions (void)
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", node->name ());
symtab_remove_node (node);
node->remove ();
continue;
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
@ -1107,7 +1104,7 @@ analyze_functions (void)
if (cnode->definition && !gimple_has_body_p (decl)
&& !cnode->alias
&& !cnode->thunk.thunk_p)
cgraph_reset_node (cnode);
cnode->reset ();
gcc_assert (!cnode->definition || cnode->thunk.thunk_p
|| cnode->alias
@ -1123,7 +1120,7 @@ analyze_functions (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nReclaimed ");
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
}
bitmap_obstack_release (NULL);
pointer_set_destroy (reachable_call_targets);
@ -1157,7 +1154,7 @@ handle_alias_pairs (void)
if (!target_node
&& lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
{
symtab_node *node = symtab_get_node (p->decl);
symtab_node *node = symtab_node::get (p->decl);
if (node)
{
node->alias_target = p->target;
@ -1170,7 +1167,7 @@ handle_alias_pairs (void)
else if (!target_node)
{
error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
symtab_node *node = symtab_get_node (p->decl);
symtab_node *node = symtab_node::get (p->decl);
if (node)
node->alias = false;
alias_pairs->unordered_remove (i);
@ -1192,10 +1189,10 @@ handle_alias_pairs (void)
if (TREE_CODE (p->decl) == FUNCTION_DECL
&& target_node && is_a <cgraph_node *> (target_node))
{
struct cgraph_node *src_node = cgraph_get_node (p->decl);
struct cgraph_node *src_node = cgraph_node::get (p->decl);
if (src_node && src_node->definition)
cgraph_reset_node (src_node);
cgraph_create_function_alias (p->decl, target_node->decl);
src_node->reset ();
cgraph_node::create_alias (p->decl, target_node->decl);
alias_pairs->unordered_remove (i);
}
else if (TREE_CODE (p->decl) == VAR_DECL
@ -1252,11 +1249,11 @@ mark_functions_to_output (void)
if (node->same_comdat_group)
{
struct cgraph_node *next;
for (next = cgraph (node->same_comdat_group);
for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
next = cgraph (next->same_comdat_group))
next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if (!next->thunk.thunk_p && !next->alias
&& !symtab_comdat_local_p (next))
&& !next->comdat_local_p ())
next->process = 1;
}
}
@ -1280,7 +1277,7 @@ mark_functions_to_output (void)
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
dump_cgraph_node (stderr, node);
node->debug ();
internal_error ("failed to reclaim unneeded function");
}
#endif
@ -1310,7 +1307,7 @@ mark_functions_to_output (void)
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
dump_cgraph_node (stderr, node);
node->debug ();
internal_error ("failed to reclaim unneeded function in same "
"comdat group");
}
@ -1472,14 +1469,14 @@ thunk_adjust (gimple_stmt_iterator * bsi,
thunks that are not lowered. */
bool
expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimple_thunk)
cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
{
bool this_adjusting = node->thunk.this_adjusting;
HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
bool this_adjusting = thunk.this_adjusting;
HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
HOST_WIDE_INT virtual_value = thunk.virtual_value;
tree virtual_offset = NULL;
tree alias = node->callees->callee->decl;
tree thunk_fndecl = node->decl;
tree alias = callees->callee->decl;
tree thunk_fndecl = decl;
tree a;
@ -1495,7 +1492,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
return false;
if (in_lto_p)
cgraph_get_body (node);
get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
@ -1530,8 +1527,8 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
free_after_compilation (cfun);
set_cfun (NULL);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
node->thunk.thunk_p = false;
node->analyzed = false;
thunk.thunk_p = false;
analyzed = false;
}
else
{
@ -1548,7 +1545,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
gimple ret;
if (in_lto_p)
cgraph_get_body (node);
get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
@ -1559,7 +1556,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
DECL_IGNORED_P (thunk_fndecl) = 1;
bitmap_obstack_initialize (NULL);
if (node->thunk.virtual_offset_p)
if (thunk.virtual_offset_p)
virtual_offset = size_int (virtual_value);
/* Build the return declaration for the function. */
@ -1617,7 +1614,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
vargs.quick_push (tmp);
}
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
node->callees->call_stmt = call;
callees->call_stmt = call;
gimple_call_set_from_thunk (call, true);
if (restmp)
{
@ -1697,8 +1694,8 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
node->thunk.thunk_p = false;
node->lowered = true;
thunk.thunk_p = false;
lowered = true;
bitmap_obstack_release (NULL);
}
current_function_decl = NULL;
@ -1720,7 +1717,7 @@ assemble_thunks_and_aliases (struct cgraph_node *node)
struct cgraph_node *thunk = e->caller;
e = e->next_caller;
expand_thunk (thunk, true, false);
thunk->expand_thunk (true, false);
assemble_thunks_and_aliases (thunk);
}
else
@ -1755,7 +1752,7 @@ expand_function (struct cgraph_node *node)
announce_function (decl);
node->process = 0;
gcc_assert (node->lowered);
cgraph_get_body (node);
node->get_body ();
/* Generate RTL for the body of DECL. */
@ -1819,7 +1816,7 @@ expand_function (struct cgraph_node *node)
gimple_set_body (decl, NULL);
if (DECL_STRUCT_FUNCTION (decl) == 0
&& !cgraph_get_node (decl)->origin)
&& !cgraph_node::get (decl)->origin)
{
/* Stop pointing to the local nodes about to be freed.
But DECL_INITIAL must remain nonzero so we know this
@ -1847,10 +1844,10 @@ expand_function (struct cgraph_node *node)
FIXME: Perhaps thunks should be move before function IFF they are not in comdat
groups. */
assemble_thunks_and_aliases (node);
cgraph_release_function_body (node);
node->release_body ();
/* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
points to the dead function body. */
cgraph_node_remove_callees (node);
node->remove_callees ();
node->remove_all_references ();
}
@ -2135,7 +2132,7 @@ output_weakrefs (void)
? DECL_ASSEMBLER_NAME (node->alias_target)
: node->alias_target);
else if (node->analyzed)
target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
else
{
gcc_unreachable ();
@ -2164,7 +2161,7 @@ compile (void)
return;
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
#endif
timevar_push (TV_CGRAPHOPT);
@ -2200,7 +2197,7 @@ compile (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
dump_symtab (cgraph_dump_file);
symtab_node:: dump_table (cgraph_dump_file);
}
if (post_ipa_mem_report)
{
@ -2214,7 +2211,7 @@ compile (void)
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
#endif
cgraph_materialize_all_clones ();
@ -2222,7 +2219,7 @@ compile (void)
execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
symtab_remove_unreachable_nodes (true, dump_file);
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);
mark_functions_to_output ();
@ -2272,10 +2269,10 @@ compile (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\nFinal ");
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
if (!seen_error ())
@ -2288,7 +2285,7 @@ compile (void)
|| gimple_has_body_p (node->decl))
{
error_found = true;
dump_cgraph_node (stderr, node);
node->debug ();
}
if (error_found)
internal_error ("nodes with unreleased memory found");
@ -2343,41 +2340,39 @@ finalize_compilation_unit (void)
timevar_pop (TV_CGRAPH);
}
/* Creates a wrapper from SOURCE node to TARGET node. Thunk is used for this
/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
kind of wrapper method. */
void
cgraph_make_wrapper (struct cgraph_node *source, struct cgraph_node *target)
cgraph_node::create_wrapper (struct cgraph_node *target)
{
/* Preserve DECL_RESULT so we get right by reference flag. */
tree decl_result = DECL_RESULT (source->decl);
tree decl_result = DECL_RESULT (decl);
/* Remove the function's body. */
cgraph_release_function_body (source);
cgraph_reset_node (source);
release_body ();
reset ();
DECL_RESULT (source->decl) = decl_result;
DECL_INITIAL (source->decl) = NULL;
allocate_struct_function (source->decl, false);
DECL_RESULT (decl) = decl_result;
DECL_INITIAL (decl) = NULL;
allocate_struct_function (decl, false);
set_cfun (NULL);
/* Turn alias into thunk and expand it into GIMPLE representation. */
source->definition = true;
source->thunk.thunk_p = true;
source->thunk.this_adjusting = false;
definition = true;
thunk.thunk_p = true;
thunk.this_adjusting = false;
struct cgraph_edge *e = cgraph_create_edge (source, target, NULL, 0,
CGRAPH_FREQ_BASE);
struct cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
if (!expand_thunk (source, false, true))
source->analyzed = true;
if (!expand_thunk (false, true))
analyzed = true;
e->call_stmt_cannot_inline_p = true;
/* Inline summary set-up. */
analyze_function (source);
inline_analyze_function (source);
analyze ();
inline_analyze_function (this);
}
#include "gt-cgraphunit.h"

View File

@ -9188,7 +9188,7 @@ ix86_code_end (void)
#endif
if (USE_HIDDEN_LINKONCE)
{
cgraph_create_node (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
cgraph_node::create (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
targetm.asm_out.unique_section (decl, 0);
switch_to_section (get_named_section (decl, NULL, 0));
@ -9554,7 +9554,7 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
&& cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
{
int count = frame->nregs;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
cfun->machine->use_fast_prologue_epilogue_nregs = count;
@ -32028,10 +32028,10 @@ ix86_get_function_versions_dispatcher (void *decl)
gcc_assert (fn != NULL && DECL_FUNCTION_VERSIONED (fn));
node = cgraph_get_node (fn);
node = cgraph_node::get (fn);
gcc_assert (node != NULL);
node_v = get_cgraph_node_version (node);
node_v = node->function_version ();
gcc_assert (node_v != NULL);
if (node_v->dispatcher_resolver != NULL)
@ -32078,11 +32078,11 @@ ix86_get_function_versions_dispatcher (void *decl)
/* Right now, the dispatching is done via ifunc. */
dispatch_decl = make_dispatcher_decl (default_node->decl);
dispatcher_node = cgraph_get_create_node (dispatch_decl);
dispatcher_node = cgraph_node::get_create (dispatch_decl);
gcc_assert (dispatcher_node != NULL);
dispatcher_node->dispatcher_function = 1;
dispatcher_version_info
= insert_new_cgraph_node_version (dispatcher_node);
= dispatcher_node->insert_new_function_version ();
dispatcher_version_info->next = default_version_info;
dispatcher_node->definition = 1;
@ -32191,8 +32191,8 @@ make_resolver_func (const tree default_decl,
push_cfun (DECL_STRUCT_FUNCTION (decl));
*empty_bb = init_lowered_empty_function (decl, false);
cgraph_add_new_function (decl, true);
cgraph_call_function_insertion_hooks (cgraph_get_create_node (decl));
cgraph_node::add_new_function (decl, true);
cgraph_node::get_create (decl)->call_function_insertion_hooks ();
pop_cfun ();
@ -32203,7 +32203,7 @@ make_resolver_func (const tree default_decl,
/* Create the alias for dispatch to resolver here. */
/*cgraph_create_function_alias (dispatch_decl, decl);*/
cgraph_same_body_alias (NULL, dispatch_decl, decl);
cgraph_node::create_same_body_alias (dispatch_decl, decl);
XDELETEVEC (resolver_name);
return decl;
}
@ -32227,7 +32227,7 @@ ix86_generate_version_dispatcher_body (void *node_p)
node = (cgraph_node *)node_p;
node_version_info = get_cgraph_node_version (node);
node_version_info = node->function_version ();
gcc_assert (node->dispatcher_function
&& node_version_info != NULL);

View File

@ -6848,13 +6848,13 @@ mark_versions_used (tree fn)
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
node = cgraph_get_node (fn);
node = cgraph_node::get (fn);
if (node == NULL)
return;
gcc_assert (node->dispatcher_function);
node_v = get_cgraph_node_version (node);
node_v = node->function_version ();
if (node_v == NULL)
return;

View File

@ -1147,7 +1147,7 @@ add_method (tree type, tree method, tree using_decl)
if (DECL_ASSEMBLER_NAME_SET_P (method))
mangle_decl (method);
}
record_function_versions (fn, method);
cgraph_node::record_function_versions (fn, method);
continue;
}
if (DECL_INHERITED_CTOR_BASE (method))

View File

@ -1072,7 +1072,7 @@ decls_match (tree newdecl, tree olddecl)
if (DECL_ASSEMBLER_NAME_SET_P (olddecl))
mangle_decl (olddecl);
}
record_function_versions (olddecl, newdecl);
cgraph_node::record_function_versions (olddecl, newdecl);
return 0;
}
}
@ -2074,10 +2074,11 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
{
struct symtab_node *symbol;
if (TREE_CODE (olddecl) == FUNCTION_DECL)
symbol = cgraph_get_create_node (newdecl);
symbol = cgraph_node::get_create (newdecl);
else
symbol = varpool_node_for_decl (newdecl);
symbol->set_comdat_group (symtab_get_node (olddecl)->get_comdat_group ());
symbol->set_comdat_group (symtab_node::get
(olddecl)->get_comdat_group ());
}
DECL_DEFER_OUTPUT (newdecl) |= DECL_DEFER_OUTPUT (olddecl);
@ -2382,13 +2383,13 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
DECL_FUNCTION_VERSIONED (newdecl) = 1;
/* newdecl will be purged after copying to olddecl and is no longer
a version. */
delete_function_version (newdecl);
cgraph_node::delete_function_version (newdecl);
}
if (TREE_CODE (newdecl) == FUNCTION_DECL)
{
int function_size;
struct symtab_node *snode = symtab_get_node (olddecl);
struct symtab_node *snode = symtab_node::get (olddecl);
function_size = sizeof (struct tree_decl_common);
@ -2450,7 +2451,7 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
if (TREE_CODE (olddecl) == VAR_DECL
&& (TREE_STATIC (olddecl) || TREE_PUBLIC (olddecl) || DECL_EXTERNAL (olddecl)))
snode = symtab_get_node (olddecl);
snode = symtab_node::get (olddecl);
memcpy ((char *) olddecl + sizeof (struct tree_decl_common),
(char *) newdecl + sizeof (struct tree_decl_common),
size - sizeof (struct tree_decl_common)
@ -2502,9 +2503,9 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
struct symtab_node *snode = symtab_get_node (newdecl);
struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
symtab_remove_node (snode);
snode->remove ();
}
ggc_free (newdecl);

View File

@ -1928,7 +1928,7 @@ mark_needed (tree decl)
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
struct cgraph_node *node = cgraph_get_create_node (decl);
struct cgraph_node *node = cgraph_node::get_create (decl);
node->forced_by_abi = true;
}
else if (TREE_CODE (decl) == VAR_DECL)
@ -2055,7 +2055,7 @@ maybe_emit_vtables (tree ctype)
{
current = varpool_node_for_decl (vtbl);
if (last)
symtab_add_to_same_comdat_group (current, last);
current->add_to_same_comdat_group (last);
last = current;
}
}
@ -2125,7 +2125,7 @@ constrain_visibility (tree decl, int visibility, bool tmpl)
if (TREE_CODE (decl) == FUNCTION_DECL
|| TREE_CODE (decl) == VAR_DECL)
{
struct symtab_node *snode = symtab_get_node (decl);
struct symtab_node *snode = symtab_node::get (decl);
if (snode)
snode->set_comdat_group (NULL);
@ -4252,8 +4252,8 @@ handle_tls_init (void)
if (single_init_fn == NULL_TREE)
continue;
cgraph_node *alias
= cgraph_same_body_alias (cgraph_get_create_node (fn),
single_init_fn, fn);
= cgraph_node::get_create (fn)->create_same_body_alias
(single_init_fn, fn);
gcc_assert (alias != NULL);
}
#endif
@ -4521,21 +4521,21 @@ cp_write_global_declarations (void)
{
struct cgraph_node *node, *next;
node = cgraph_get_node (decl);
node = cgraph_node::get (decl);
if (node->cpp_implicit_alias)
node = cgraph_alias_target (node);
node = node->get_alias_target ();
cgraph_for_node_and_aliases (node, clear_decl_external,
NULL, true);
node->call_for_symbol_thunks_and_aliases (clear_decl_external,
NULL, true);
/* If we mark !DECL_EXTERNAL one of the symbols in some comdat
group, we need to mark all symbols in the same comdat group
that way. */
if (node->same_comdat_group)
for (next = cgraph (node->same_comdat_group);
for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
next = cgraph (next->same_comdat_group))
cgraph_for_node_and_aliases (next, clear_decl_external,
NULL, true);
next = dyn_cast<cgraph_node *> (next->same_comdat_group))
next->call_for_symbol_thunks_and_aliases (clear_decl_external,
NULL, true);
}
/* If we're going to need to write this function out, and
@ -4545,7 +4545,7 @@ cp_write_global_declarations (void)
if (!DECL_EXTERNAL (decl)
&& decl_needed_p (decl)
&& !TREE_ASM_WRITTEN (decl)
&& !cgraph_get_node (decl)->definition)
&& !cgraph_node::get (decl)->definition)
{
/* We will output the function; no longer consider it in this
loop. */

View File

@ -1038,9 +1038,8 @@ maybe_add_lambda_conv_op (tree type)
if (DECL_ONE_ONLY (statfn))
{
/* Put the thunk in the same comdat group as the call op. */
symtab_add_to_same_comdat_group
(cgraph_get_create_node (statfn),
cgraph_get_create_node (callop));
cgraph_node::get_create (statfn)->add_to_same_comdat_group
(cgraph_node::get_create (callop));
}
tree body = begin_function_body ();
tree compound_stmt = begin_compound_stmt (0);

View File

@ -3487,8 +3487,8 @@ mangle_decl (const tree decl)
if (TREE_CODE (decl) == FUNCTION_DECL)
{
/* Don't create an alias to an unreferenced function. */
if (struct cgraph_node *n = cgraph_get_node (decl))
cgraph_same_body_alias (n, alias, decl);
if (struct cgraph_node *n = cgraph_node::get (decl))
n->create_same_body_alias (alias, decl);
}
else
varpool_extra_name_alias (alias, decl);

View File

@ -260,9 +260,9 @@ make_alias_for_thunk (tree function)
if (!flag_syntax_only)
{
struct cgraph_node *funcn, *aliasn;
funcn = cgraph_get_node (function);
funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
aliasn = cgraph_same_body_alias (funcn, alias, function);
aliasn = cgraph_node::create_same_body_alias (alias, function);
DECL_ASSEMBLER_NAME (function);
gcc_assert (aliasn != NULL);
}
@ -359,13 +359,13 @@ use_thunk (tree thunk_fndecl, bool emit_p)
tree fn = function;
struct symtab_node *symbol;
if ((symbol = symtab_get_node (function))
if ((symbol = symtab_node::get (function))
&& symbol->alias)
{
if (symbol->analyzed)
fn = symtab_alias_ultimate_target (symtab_get_node (function))->decl;
fn = symtab_node::get (function)->ultimate_alias_target ()->decl;
else
fn = symtab_get_node (function)->alias_target;
fn = symtab_node::get (function)->alias_target;
}
resolve_unique_section (fn, 0, flag_function_sections);
@ -375,8 +375,8 @@ use_thunk (tree thunk_fndecl, bool emit_p)
/* Output the thunk into the same section as function. */
set_decl_section_name (thunk_fndecl, DECL_SECTION_NAME (fn));
symtab_get_node (thunk_fndecl)->implicit_section
= symtab_get_node (fn)->implicit_section;
symtab_node::get (thunk_fndecl)->implicit_section
= symtab_node::get (fn)->implicit_section;
}
}
@ -395,14 +395,13 @@ use_thunk (tree thunk_fndecl, bool emit_p)
a = nreverse (t);
DECL_ARGUMENTS (thunk_fndecl) = a;
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
funcn = cgraph_get_node (function);
funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
thunk_node = cgraph_add_thunk (funcn, thunk_fndecl, function,
this_adjusting, fixed_offset, virtual_value,
virtual_offset, alias);
thunk_node = funcn->create_thunk (thunk_fndecl, function,
this_adjusting, fixed_offset, virtual_value,
virtual_offset, alias);
if (DECL_ONE_ONLY (function))
symtab_add_to_same_comdat_group (thunk_node,
funcn);
thunk_node->add_to_same_comdat_group (funcn);
if (!this_adjusting
|| !targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,

View File

@ -191,7 +191,7 @@ cdtor_comdat_group (tree complete, tree base)
diff_seen = true;
}
grp_name[idx] = '\0';
gcc_assert (diff_seen || symtab_get_node (complete)->alias);
gcc_assert (diff_seen || symtab_node::get (complete)->alias);
return get_identifier (grp_name);
}
@ -287,16 +287,16 @@ maybe_thunk_body (tree fn, bool force)
else if (HAVE_COMDAT_GROUP)
{
tree comdat_group = cdtor_comdat_group (fns[1], fns[0]);
cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[1]),
cgraph_get_create_node (fns[0]));
symtab_add_to_same_comdat_group (symtab_get_node (fn),
symtab_get_node (fns[0]));
cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
cgraph_node::get_create (fns[1])->add_to_same_comdat_group
(cgraph_node::get_create (fns[0]));
symtab_node::get (fn)->add_to_same_comdat_group
(symtab_node::get (fns[0]));
if (fns[2])
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[2]),
symtab_get_node (fns[0]));
cgraph_node::get_create (fns[2])->add_to_same_comdat_group
(symtab_node::get (fns[0]));
TREE_PUBLIC (fn) = false;
DECL_EXTERNAL (fn) = false;
DECL_INTERFACE_KNOWN (fn) = true;
@ -475,7 +475,7 @@ maybe_clone_body (tree fn)
name of fn was corrupted by write_mangled_name by adding *INTERNAL*
to it. By doing so, it also corrupted the comdat group. */
if (DECL_ONE_ONLY (fn))
cgraph_get_create_node (clone)->set_comdat_group (cxx_comdat_group (clone));
cgraph_node::get_create (clone)->set_comdat_group (cxx_comdat_group (clone));
DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
@ -542,8 +542,8 @@ maybe_clone_body (tree fn)
if (can_alias
&& fns[0]
&& idx == 1
&& cgraph_same_body_alias (cgraph_get_create_node (fns[0]),
clone, fns[0]))
&& cgraph_node::get_create (fns[0])->create_same_body_alias
(clone, fns[0]))
{
alias = true;
if (DECL_ONE_ONLY (fns[0]))
@ -552,11 +552,11 @@ maybe_clone_body (tree fn)
into the same, *[CD]5* comdat group instead of
*[CD][12]*. */
comdat_group = cdtor_comdat_group (fns[1], fns[0]);
cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
if (symtab_get_node (clone)->same_comdat_group)
symtab_remove_from_same_comdat_group (symtab_get_node (clone));
symtab_add_to_same_comdat_group (symtab_get_node (clone),
symtab_get_node (fns[0]));
cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
if (symtab_node::get (clone)->same_comdat_group)
symtab_node::get (clone)->remove_from_same_comdat_group ();
symtab_node::get (clone)->add_to_same_comdat_group
(symtab_node::get (fns[0]));
}
}
@ -568,9 +568,8 @@ maybe_clone_body (tree fn)
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
if (comdat_group)
symtab_add_to_same_comdat_group
(cgraph_get_create_node (clone),
symtab_get_node (fns[0]));
cgraph_node::get_create (clone)->add_to_same_comdat_group
(symtab_node::get (fns[0]));
}
else if (alias)
/* No need to populate body. */ ;

View File

@ -4031,7 +4031,7 @@ cp_fix_function_decl_p (tree decl)
&& !DECL_THUNK_P (decl)
&& !DECL_EXTERNAL (decl))
{
struct cgraph_node *node = cgraph_get_node (decl);
struct cgraph_node *node = cgraph_node::get (decl);
/* Don't fix same_body aliases. Although they don't have their own
CFG, they share it with what they alias to. */

View File

@ -1186,7 +1186,7 @@ vtv_generate_init_routine (void)
DECL_STATIC_CONSTRUCTOR (vtv_fndecl) = 0;
gimplify_function_tree (vtv_fndecl);
cgraph_add_new_function (vtv_fndecl, false);
cgraph_node::add_new_function (vtv_fndecl, false);
cgraph_process_new_functions ();
@ -1248,7 +1248,7 @@ vtable_find_or_create_map_decl (tree base_type)
we can find and protect them. */
set_decl_section_name (var_decl, ".vtable_map_vars");
symtab_get_node (var_decl)->implicit_section = true;
symtab_node::get (var_decl)->implicit_section = true;
DECL_INITIAL (var_decl) = initial_value;
comdat_linkage (var_decl);

View File

@ -15371,7 +15371,7 @@ reference_to_unused (tree * tp, int * walk_subtrees,
optimizing and gimplifying the CU by now.
So if *TP has no call graph node associated
to it, it means *TP will not be emitted. */
if (!cgraph_get_node (*tp))
if (!cgraph_node::get (*tp))
return *tp;
}
else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))

View File

@ -1990,15 +1990,14 @@ set_nothrow_function_flags (void)
}
if (crtl->nothrow
&& (cgraph_function_body_availability (cgraph_get_node
(current_function_decl))
&& (cgraph_node::get (current_function_decl)->get_availability ()
>= AVAIL_AVAILABLE))
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *e;
for (e = node->callers; e; e = e->next_caller)
e->can_throw_external = false;
cgraph_set_nothrow_flag (node, true);
node->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Marking function nothrow: %s\n\n",

View File

@ -16037,7 +16037,7 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
{
struct symtab_node *symbol;
symbol = symtab_get_node (base);
symbol = symtab_node::get (base);
if (symbol)
return symbol->nonzero_address ();
else

View File

@ -4800,7 +4800,7 @@ generate_coarray_init (gfc_namespace * ns __attribute((unused)))
set_cfun (NULL);
if (decl_function_context (fndecl))
(void) cgraph_create_node (fndecl);
(void) cgraph_node::create (fndecl);
else
cgraph_finalize_function (fndecl, true);
@ -5893,7 +5893,7 @@ gfc_generate_function_code (gfc_namespace * ns)
function has already called cgraph_create_node, which also created
the cgraph node for this function. */
if (!has_coarray_vars || gfc_option.coarray != GFC_FCOARRAY_LIB)
(void) cgraph_create_node (fndecl);
(void) cgraph_node::create (fndecl);
}
else
cgraph_finalize_function (fndecl, true);

View File

@ -99,7 +99,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
static objects are defined. */
if (cgraph_function_flags_ready)
return true;
snode = symtab_get_node (decl);
snode = symtab_node::get (decl);
if (!snode || !snode->definition)
return false;
node = dyn_cast <cgraph_node *> (snode);
@ -124,7 +124,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
if (DECL_VISIBILITY_SPECIFIED (decl)
&& DECL_EXTERNAL (decl)
&& DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
&& (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
&& (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
return false;
/* When function is public, we always can introduce new reference.
Exception are the COMDAT functions where introducing a direct
@ -145,7 +145,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
if (!cgraph_function_flags_ready)
return true;
snode = symtab_get_node (decl);
snode = symtab_node::get (decl);
if (!snode
|| ((!snode->definition || DECL_EXTERNAL (decl))
&& (!snode->in_other_partition
@ -201,7 +201,7 @@ canonicalize_constructor_val (tree cval, tree from_decl)
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
cgraph_get_create_node (base);
cgraph_node::get_create (base);
}
/* Fixup types in global initializers. */
if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
@ -1107,8 +1107,8 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
if (dump_file && virtual_method_call_p (callee)
&& !possible_polymorphic_call_target_p
(callee, cgraph_get_node (gimple_call_addr_fndecl
(OBJ_TYPE_REF_EXPR (callee)))))
(callee, cgraph_node::get (gimple_call_addr_fndecl
(OBJ_TYPE_REF_EXPR (callee)))))
{
fprintf (dump_file,
"Type inheritance inconsistent devirtualization of ");
@ -3354,7 +3354,7 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
cgraph_get_create_node (fn);
cgraph_node::get_create (fn);
return fn;
}

View File

@ -101,12 +101,12 @@ update_call_edge_frequencies (gimple_seq_node first, basic_block bb)
to avoid calling them if we never see any calls. */
if (cfun_node == NULL)
{
cfun_node = cgraph_get_node (current_function_decl);
cfun_node = cgraph_node::get (current_function_decl);
bb_freq = (compute_call_stmt_bb_frequency
(current_function_decl, bb));
}
e = cgraph_edge (cfun_node, n);
e = cfun_node->get_edge (n);
if (e != NULL)
e->frequency = bb_freq;
}

View File

@ -826,7 +826,7 @@ copy_if_shared (tree *tp, void *data)
static void
unshare_body (tree fndecl)
{
struct cgraph_node *cgn = cgraph_get_node (fndecl);
struct cgraph_node *cgn = cgraph_node::get (fndecl);
/* If the language requires deep unsharing, we need a pointer set to make
sure we don't repeatedly unshare subtrees of unshareable nodes. */
struct pointer_set_t *visited
@ -876,7 +876,7 @@ unmark_visited (tree *tp)
static void
unvisit_body (tree fndecl)
{
struct cgraph_node *cgn = cgraph_get_node (fndecl);
struct cgraph_node *cgn = cgraph_node::get (fndecl);
unmark_visited (&DECL_SAVED_TREE (fndecl));
unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
@ -8764,7 +8764,7 @@ gimplify_body (tree fndecl, bool do_parms)
unshare_body (fndecl);
unvisit_body (fndecl);
cgn = cgraph_get_node (fndecl);
cgn = cgraph_node::get (fndecl);
if (cgn && cgn->origin)
nonlocal_vlas = pointer_set_create ();

View File

@ -165,7 +165,7 @@ enqueue_references (symtab_node **first,
for (i = 0; symbol->iterate_reference (i, ref); i++)
{
symtab_node *node = symtab_alias_ultimate_target (ref->referred, NULL);
symtab_node *node = ref->referred->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
@ -182,8 +182,7 @@ enqueue_references (symtab_node **first,
enqueue_references (first, edge->callee);
else
{
symtab_node *node = symtab_alias_ultimate_target (edge->callee,
NULL);
symtab_node *node = edge->callee->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
@ -204,7 +203,7 @@ set_comdat_group (symtab_node *symbol,
gcc_assert (!symbol->get_comdat_group ());
symbol->set_comdat_group (head->get_comdat_group ());
symtab_add_to_same_comdat_group (symbol, head);
symbol->add_to_same_comdat_group (head);
return false;
}
@ -225,7 +224,7 @@ ipa_comdats (void)
ERROR_MARK_NODE as bottom for the propagation. */
FOR_EACH_DEFINED_SYMBOL (symbol)
if (!symtab_real_symbol_p (symbol))
if (!symbol->real_symbol_p ())
;
else if ((group = symbol->get_comdat_group ()) != NULL)
{
@ -248,7 +247,7 @@ ipa_comdats (void)
&& (DECL_STATIC_CONSTRUCTOR (symbol->decl)
|| DECL_STATIC_DESTRUCTOR (symbol->decl))))
{
map.put (symtab_alias_ultimate_target (symbol, NULL), error_mark_node);
map.put (symbol->ultimate_alias_target (), error_mark_node);
/* Mark the symbol so we won't waste time visiting it for dataflow. */
symbol->aux = (symtab_node *) (void *) 1;
@ -316,7 +315,7 @@ ipa_comdats (void)
symbol->aux = NULL;
if (!symbol->get_comdat_group ()
&& !symbol->alias
&& symtab_real_symbol_p (symbol))
&& symbol->real_symbol_p ())
{
tree group = *map.get (symbol);
@ -325,11 +324,12 @@ ipa_comdats (void)
if (dump_file)
{
fprintf (dump_file, "Localizing symbol\n");
dump_symtab_node (dump_file, symbol);
symbol->dump (dump_file);
fprintf (dump_file, "To group: %s\n", IDENTIFIER_POINTER (group));
}
symtab_for_node_and_aliases (symbol, set_comdat_group,
*comdat_head_map.get (group), true);
symbol->call_for_symbol_and_aliases (set_comdat_group,
*comdat_head_map.get (group),
true);
}
}
return 0;

View File

@ -428,7 +428,7 @@ determine_versionability (struct cgraph_node *node)
reason = "alias or thunk";
else if (!node->local.versionable)
reason = "not a tree_versionable_function";
else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
else if (node->get_availability () <= AVAIL_INTERPOSABLE)
reason = "insufficient body availability";
else if (!opt_for_fn (node->decl, optimize)
|| !opt_for_fn (node->decl, flag_ipa_cp))
@ -442,7 +442,7 @@ determine_versionability (struct cgraph_node *node)
}
/* Don't clone decls local to a comdat group; it breaks and for C++
decloned constructors, inlining is always better anyway. */
else if (symtab_comdat_local_p (node))
else if (node->comdat_local_p ())
reason = "comdat-local function";
if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
@ -491,8 +491,8 @@ gather_caller_stats (struct cgraph_node *node, void *data)
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p)
cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
stats, false);
cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
stats, false);
else
{
stats->count_sum += cs->count;
@ -512,7 +512,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
{
struct caller_statistics stats;
gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
gcc_checking_assert (node->has_gimple_body_p ());
if (!flag_ipa_cp_clone)
{
@ -533,7 +533,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
}
init_caller_stats (&stats);
cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
if (inline_summary (node)->self_size < stats.n_calls)
{
@ -698,7 +698,7 @@ initialize_node_lattices (struct cgraph_node *node)
bool disable = false, variable = false;
int i;
gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
gcc_checking_assert (node->has_gimple_body_p ());
if (!node->local.local)
{
/* When cloning is allowed, we can assume that externally visible
@ -897,7 +897,7 @@ ipcp_verify_propagated_values (void)
{
if (dump_file)
{
dump_symtab (dump_file);
symtab_node::dump_table (dump_file);
fprintf (dump_file, "\nIPA lattices after constant "
"propagation, before gcc_unreachable:\n");
print_all_lattices (dump_file, true, false);
@ -1435,10 +1435,10 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
bool ret = false;
int i, args_count, parms_count;
callee = cgraph_function_node (cs->callee, &availability);
callee = cs->callee->function_symbol (&availability);
if (!callee->definition)
return false;
gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
gcc_checking_assert (callee->has_gimple_body_p ());
callee_info = IPA_NODE_REF (callee);
args = IPA_EDGE_REF (cs);
@ -1452,7 +1452,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
of aliases first. */
alias_or_thunk = cs->callee;
while (alias_or_thunk->alias)
alias_or_thunk = cgraph_alias_target (alias_or_thunk);
alias_or_thunk = alias_or_thunk->get_alias_target ();
if (alias_or_thunk->thunk.thunk_p)
{
ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
@ -1468,7 +1468,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
struct ipcp_param_lattices *dest_plats;
dest_plats = ipa_get_parm_lattices (callee_info, i);
if (availability == AVAIL_OVERWRITABLE)
if (availability == AVAIL_INTERPOSABLE)
ret |= set_all_contains_variable (dest_plats);
else
{
@ -1599,7 +1599,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
(ie, cgraph_get_node (target)))
(ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
}
@ -1647,7 +1647,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
}
if (target && !possible_polymorphic_call_target_p (ie,
cgraph_get_node (target)))
cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
@ -1694,10 +1694,10 @@ devirtualization_time_bonus (struct cgraph_node *node,
/* Only bare minimum benefit for clearly un-inlineable targets. */
res += 1;
callee = cgraph_get_node (target);
callee = cgraph_node::get (target);
if (!callee || !callee->definition)
continue;
callee = cgraph_function_node (callee, &avail);
callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
continue;
isummary = inline_summary (callee);
@ -1931,7 +1931,8 @@ estimate_local_effects (struct cgraph_node *node)
int time, size;
init_caller_stats (&stats);
cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
false);
estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
known_aggs_ptrs, &size, &time, &hints);
time -= devirtualization_time_bonus (node, known_csts, known_binfos,
@ -1945,7 +1946,7 @@ estimate_local_effects (struct cgraph_node *node)
"time_benefit: %i\n", size, base_time - time);
if (size <= 0
|| cgraph_will_be_removed_from_program_if_no_direct_calls (node))
|| node->will_be_removed_from_program_if_no_direct_calls_p ())
{
info->do_clone_for_all_contexts = true;
base_time = time;
@ -2207,12 +2208,12 @@ propagate_constants_topo (struct topo_info *topo)
{
unsigned j;
struct cgraph_node *v, *node = topo->order[i];
vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* First, iteratively propagate within the strongly connected component
until all lattices stabilize. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
if (cgraph_function_with_gimple_body_p (v))
if (v->has_gimple_body_p ())
push_node_to_stack (topo, v);
v = pop_node_from_stack (topo);
@ -2231,7 +2232,7 @@ propagate_constants_topo (struct topo_info *topo)
the local effects of the discovered constants and all valid values to
their topological sort. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
if (cgraph_function_with_gimple_body_p (v))
if (v->has_gimple_body_p ())
{
struct cgraph_edge *cs;
@ -2314,7 +2315,7 @@ ipcp_propagate_stage (struct topo_info *topo)
struct ipa_node_params *info = IPA_NODE_REF (node);
determine_versionability (node);
if (cgraph_function_with_gimple_body_p (node))
if (node->has_gimple_body_p ())
{
info->lattices = XCNEWVEC (struct ipcp_param_lattices,
ipa_get_param_count (info));
@ -2407,8 +2408,8 @@ ipcp_discover_new_direct_edges (struct cgraph_node *node,
/* Vector of pointers which for linked lists of clones of an original crgaph
edge. */
static vec<cgraph_edge_p> next_edge_clone;
static vec<cgraph_edge_p> prev_edge_clone;
static vec<cgraph_edge *> next_edge_clone;
static vec<cgraph_edge *> prev_edge_clone;
static inline void
grow_edge_clone_vectors (void)
@ -2481,7 +2482,7 @@ cgraph_edge_brings_value_p (struct cgraph_edge *cs,
struct ipcp_value_source *src)
{
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
cgraph_node *real_dest = cgraph_function_node (cs->callee);
cgraph_node *real_dest = cs->callee->function_symbol ();
struct ipa_node_params *dst_info = IPA_NODE_REF (real_dest);
if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
@ -2569,11 +2570,11 @@ get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
/* Return a vector of incoming edges that do bring value VAL. It is assumed
their number is known and equal to CALLER_COUNT. */
static vec<cgraph_edge_p>
static vec<cgraph_edge *>
gather_edges_for_value (struct ipcp_value *val, int caller_count)
{
struct ipcp_value_source *src;
vec<cgraph_edge_p> ret;
vec<cgraph_edge *> ret;
ret.create (caller_count);
for (src = val->sources; src; src = src->next)
@ -2657,10 +2658,12 @@ update_profiling_info (struct cgraph_node *orig_node,
return;
init_caller_stats (&stats);
cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
false);
orig_sum = stats.count_sum;
init_caller_stats (&stats);
cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
false);
new_sum = stats.count_sum;
if (orig_node_count < orig_sum + new_sum)
@ -2755,10 +2758,10 @@ static struct cgraph_node *
create_specialized_node (struct cgraph_node *node,
vec<tree> known_vals,
struct ipa_agg_replacement_value *aggvals,
vec<cgraph_edge_p> callers)
vec<cgraph_edge *> callers)
{
struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
struct ipa_agg_replacement_value *av;
struct cgraph_node *new_node;
int i, count = ipa_get_param_count (info);
@ -2798,8 +2801,8 @@ create_specialized_node (struct cgraph_node *node,
}
}
new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
args_to_skip, "constprop");
new_node = node->create_virtual_clone (callers, replace_trees,
args_to_skip, "constprop");
ipa_set_node_agg_value_chain (new_node, aggvals);
for (av = aggvals; av; av = av->next)
new_node->maybe_add_reference (av->value, IPA_REF_ADDR, NULL);
@ -2830,7 +2833,7 @@ create_specialized_node (struct cgraph_node *node,
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
vec<tree> known_vals,
vec<cgraph_edge_p> callers)
vec<cgraph_edge *> callers)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
@ -3139,7 +3142,7 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
vec<cgraph_edge_p> callers)
vec<cgraph_edge *> callers)
{
struct ipa_node_params *dest_info = IPA_NODE_REF (node);
struct ipa_agg_replacement_value *res = NULL;
@ -3346,10 +3349,9 @@ perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
while (cs)
{
enum availability availability;
struct cgraph_node *dst = cgraph_function_node (cs->callee,
&availability);
struct cgraph_node *dst = cs->callee->function_symbol (&availability);
if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
&& availability > AVAIL_OVERWRITABLE
&& availability > AVAIL_INTERPOSABLE
&& cgraph_edge_brings_value_p (cs, src))
{
if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
@ -3422,7 +3424,7 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
struct ipa_agg_replacement_value *aggvals;
int freq_sum, caller_count;
gcov_type count_sum;
vec<cgraph_edge_p> callers;
vec<cgraph_edge *> callers;
vec<tree> kv;
if (val->spec_node)
@ -3542,14 +3544,14 @@ decide_whether_version_node (struct cgraph_node *node)
if (info->do_clone_for_all_contexts)
{
struct cgraph_node *clone;
vec<cgraph_edge_p> callers;
vec<cgraph_edge *> callers;
if (dump_file)
fprintf (dump_file, " - Creating a specialized node of %s/%i "
"for all known contexts.\n", node->name (),
node->order);
callers = collect_callers_of_node (node);
callers = node->collect_callers ();
move_binfos_to_values (known_csts, known_binfos);
clone = create_specialized_node (node, known_csts,
known_aggs_to_agg_replacement_list (known_aggs),
@ -3582,7 +3584,7 @@ spread_undeadness (struct cgraph_node *node)
struct cgraph_node *callee;
struct ipa_node_params *info;
callee = cgraph_function_node (cs->callee, NULL);
callee = cs->callee->function_symbol (NULL);
info = IPA_NODE_REF (callee);
if (info->node_dead)
@ -3604,9 +3606,8 @@ has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p
&& cgraph_for_node_and_aliases (cs->caller,
has_undead_caller_from_outside_scc_p,
NULL, true))
&& cs->caller->call_for_symbol_thunks_and_aliases
(has_undead_caller_from_outside_scc_p, NULL, true))
return true;
else if (!ipa_edge_within_scc (cs)
&& !IPA_NODE_REF (cs->caller)->node_dead)
@ -3623,10 +3624,9 @@ identify_dead_nodes (struct cgraph_node *node)
{
struct cgraph_node *v;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
&& !cgraph_for_node_and_aliases (v,
has_undead_caller_from_outside_scc_p,
NULL, true))
if (v->will_be_removed_from_program_if_no_direct_calls_p ()
&& !v->call_for_symbol_thunks_and_aliases
(has_undead_caller_from_outside_scc_p, NULL, true))
IPA_NODE_REF (v)->node_dead = 1;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
@ -3663,7 +3663,7 @@ ipcp_decision_stage (struct topo_info *topo)
struct cgraph_node *v;
iterate = false;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
if (cgraph_function_with_gimple_body_p (v)
if (v->has_gimple_body_p ()
&& ipcp_versionable_function_p (v))
iterate |= decide_whether_version_node (v);

View File

@ -1269,7 +1269,7 @@ build_type_inheritance_graph (void)
FOR_EACH_SYMBOL (n)
if (is_a <cgraph_node *> (n)
&& DECL_VIRTUAL_P (n->decl)
&& symtab_real_symbol_p (n))
&& n->real_symbol_p ())
get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
true);
@ -1336,7 +1336,7 @@ referenced_from_vtable_p (struct cgraph_node *node)
for (i = 0; node->iterate_referring (i, ref); i++)
if ((ref->use == IPA_REF_ALIAS
&& referenced_from_vtable_p (cgraph (ref->referring)))
&& referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
|| (ref->use == IPA_REF_ADDR
&& TREE_CODE (ref->referring->decl) == VAR_DECL
&& DECL_VIRTUAL_P (ref->referring->decl)))
@ -1382,16 +1382,16 @@ maybe_record_node (vec <cgraph_node *> &nodes,
if (!target)
return;
target_node = cgraph_get_node (target);
target_node = cgraph_node::get (target);
/* Preffer alias target over aliases, so we do not get confused by
fake duplicates. */
if (target_node)
{
alias_target = cgraph_function_or_thunk_node (target_node, &avail);
alias_target = target_node->ultimate_alias_target (&avail);
if (target_node != alias_target
&& avail >= AVAIL_AVAILABLE
&& cgraph_function_body_availability (target_node))
&& target_node->get_availability ())
target_node = alias_target;
}
@ -1417,10 +1417,10 @@ maybe_record_node (vec <cgraph_node *> &nodes,
&& (TREE_PUBLIC (target)
|| DECL_EXTERNAL (target)
|| target_node->definition)
&& symtab_real_symbol_p (target_node))
&& target_node->real_symbol_p ())
{
gcc_assert (!target_node->global.inlined_to);
gcc_assert (symtab_real_symbol_p (target_node));
gcc_assert (target_node->real_symbol_p ());
if (!pointer_set_insert (inserted, target_node->decl))
{
pointer_set_insert (cached_polymorphic_call_targets,
@ -2672,7 +2672,7 @@ possible_polymorphic_call_target_p (tree otr_type,
return true;
targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
for (i = 0; i < targets.length (); i++)
if (symtab_semantically_equivalent_p (n, targets[i]))
if (n->semantically_equivalent_p (targets[i]))
return true;
/* At a moment we allow middle end to dig out new external declarations
@ -2700,7 +2700,7 @@ update_type_inheritance_graph (void)
FOR_EACH_FUNCTION (n)
if (DECL_VIRTUAL_P (n->decl)
&& !n->definition
&& symtab_real_symbol_p (n))
&& n->real_symbol_p ())
get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
true);
timevar_pop (TV_IPA_INHERITANCE);
@ -2827,8 +2827,8 @@ ipa_devirt (void)
struct cgraph_edge *e2;
struct ipa_ref *ref;
cgraph_speculative_call_info (e, e2, e, ref);
if (cgraph_function_or_thunk_node (e2->callee, NULL)
== cgraph_function_or_thunk_node (likely_target, NULL))
if (e2->callee->ultimate_alias_target ()
== likely_target->ultimate_alias_target ())
{
fprintf (dump_file, "We agree with speculation\n\n");
nok++;
@ -2860,7 +2860,7 @@ ipa_devirt (void)
}
/* Don't use an implicitly-declared destructor (c++/58678). */
struct cgraph_node *non_thunk_target
= cgraph_function_node (likely_target);
= likely_target->function_symbol ();
if (DECL_ARTIFICIAL (non_thunk_target->decl)
&& DECL_COMDAT (non_thunk_target->decl))
{
@ -2869,9 +2869,8 @@ ipa_devirt (void)
nartificial++;
continue;
}
if (cgraph_function_body_availability (likely_target)
<= AVAIL_OVERWRITABLE
&& symtab_can_be_discarded (likely_target))
if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
&& likely_target->can_be_discarded_p ())
{
if (dump_file)
fprintf (dump_file, "Target is overwritable\n\n");
@ -2889,11 +2888,10 @@ ipa_devirt (void)
likely_target->name (),
likely_target->order);
}
if (!symtab_can_be_discarded (likely_target))
if (!likely_target->can_be_discarded_p ())
{
cgraph_node *alias;
alias = cgraph (symtab_nonoverwritable_alias
(likely_target));
alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
if (alias)
likely_target = alias;
}

View File

@ -759,11 +759,11 @@ edge_set_predicate (struct cgraph_edge *e, struct predicate *predicate)
struct cgraph_node *callee = !e->inline_failed ? e->callee : NULL;
cgraph_redirect_edge_callee (e,
cgraph_get_create_node
cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE)));
e->inline_failed = CIF_UNREACHABLE;
if (callee)
cgraph_remove_node_and_inline_clones (callee, NULL);
callee->remove_symbol_and_inline_clones ();
}
if (predicate && !true_predicate_p (predicate))
{
@ -884,8 +884,7 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
vec<tree> *known_binfos_ptr,
vec<ipa_agg_jump_function_p> *known_aggs_ptr)
{
struct cgraph_node *callee =
cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
struct inline_summary *info = inline_summary (callee);
vec<tree> known_vals = vNULL;
vec<ipa_agg_jump_function_p> known_aggs = vNULL;
@ -1320,8 +1319,7 @@ dump_inline_edge_summary (FILE *f, int indent, struct cgraph_node *node,
for (edge = node->callees; edge; edge = edge->next_callee)
{
struct inline_edge_summary *es = inline_edge_summary (edge);
struct cgraph_node *callee =
cgraph_function_or_thunk_node (edge->callee, NULL);
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
int i;
fprintf (f,
@ -2598,7 +2596,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
if (is_gimple_call (stmt)
&& !gimple_call_internal_p (stmt))
{
struct cgraph_edge *edge = cgraph_edge (node, stmt);
struct cgraph_edge *edge = node->get_edge (stmt);
struct inline_edge_summary *es = inline_edge_summary (edge);
/* Special case: results of BUILT_IN_CONSTANT_P will be always
@ -2890,7 +2888,7 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
estimate_function_body_sizes (node, early);
for (e = node->callees; e; e = e->next_callee)
if (symtab_comdat_local_p (e->callee))
if (e->callee->comdat_local_p ())
break;
node->calls_comdat_local = (e != NULL);
@ -2914,7 +2912,7 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
static unsigned int
compute_inline_parameters_for_current (void)
{
compute_inline_parameters (cgraph_get_node (current_function_decl), true);
compute_inline_parameters (cgraph_node::get (current_function_decl), true);
return 0;
}
@ -2989,10 +2987,10 @@ estimate_edge_devirt_benefit (struct cgraph_edge *ie,
gcc_checking_assert (*time >= 0);
gcc_checking_assert (*size >= 0);
callee = cgraph_get_node (target);
callee = cgraph_node::get (target);
if (!callee || !callee->definition)
return false;
callee = cgraph_function_node (callee, &avail);
callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
return false;
isummary = inline_summary (callee);
@ -3666,7 +3664,7 @@ do_estimate_edge_time (struct cgraph_edge *edge)
struct inline_edge_summary *es = inline_edge_summary (edge);
int min_size;
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
callee = edge->callee->ultimate_alias_target ();
gcc_checking_assert (edge->inline_failed);
evaluate_properties_for_edge (edge, true,
@ -3730,7 +3728,7 @@ do_estimate_edge_size (struct cgraph_edge *edge)
return size - (size > 0);
}
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
@ -3769,7 +3767,7 @@ do_estimate_edge_hints (struct cgraph_edge *edge)
return hints - 1;
}
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
@ -3863,7 +3861,7 @@ do_estimate_growth (struct cgraph_node *node)
struct growth_data d = { node, 0, false };
struct inline_summary *info = inline_summary (node);
cgraph_for_node_and_aliases (node, do_estimate_growth_1, &d, true);
node->call_for_symbol_thunks_and_aliases (do_estimate_growth_1, &d, true);
/* For self recursive functions the growth estimation really should be
infinity. We don't want to return very large values because the growth
@ -3875,13 +3873,13 @@ do_estimate_growth (struct cgraph_node *node)
;
else
{
if (cgraph_will_be_removed_from_program_if_no_direct_calls (node))
if (node->will_be_removed_from_program_if_no_direct_calls_p ())
d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
else if (DECL_COMDAT (node->decl)
&& cgraph_can_remove_if_no_direct_calls_p (node))
&& node->can_remove_if_no_direct_calls_p ())
d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
+ 50) / 100;
@ -3923,16 +3921,16 @@ growth_likely_positive (struct cgraph_node *node, int edge_growth ATTRIBUTE_UNUS
instead of
cgraph_will_be_removed_from_program_if_no_direct_calls */
if (DECL_EXTERNAL (node->decl)
|| !cgraph_can_remove_if_no_direct_calls_p (node))
|| !node->can_remove_if_no_direct_calls_p ())
return true;
/* If there is cached value, just go ahead. */
if ((int)node_growth_cache.length () > node->uid
&& (ret = node_growth_cache[node->uid]))
return ret > 0;
if (!cgraph_will_be_removed_from_program_if_no_direct_calls (node)
if (!node->will_be_removed_from_program_if_no_direct_calls_p ()
&& (!DECL_COMDAT (node->decl)
|| !cgraph_can_remove_if_no_direct_calls_p (node)))
|| !node->can_remove_if_no_direct_calls_p ()))
return true;
max_callers = inline_summary (node)->size * 4 / edge_growth + 2;
@ -4111,7 +4109,8 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
index = streamer_read_uhwi (&ib);
encoder = file_data->symtab_node_encoder;
node = cgraph (lto_symtab_encoder_deref (encoder, index));
node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
index));
info = inline_summary (node);
info->estimated_stack_size

View File

@ -88,7 +88,7 @@ can_remove_node_now_p_1 (struct cgraph_node *node)
return (!node->address_taken
&& !node->has_aliases_p ()
&& !node->used_as_abstract_origin
&& cgraph_can_remove_if_no_direct_calls_p (node)
&& node->can_remove_if_no_direct_calls_p ()
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
@ -114,8 +114,8 @@ can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
items can be removed. */
if (!node->same_comdat_group)
return true;
for (next = cgraph (node->same_comdat_group);
next != node; next = cgraph (next->same_comdat_group))
for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if ((next->callers && next->callers != e)
|| !can_remove_node_now_p_1 (next))
return false;
@ -165,7 +165,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
For now we keep the ohter functions in the group in program until
cgraph_remove_unreachable_functions gets rid of them. */
gcc_assert (!e->callee->global.inlined_to);
symtab_dissolve_same_comdat_group_list (e->callee);
e->callee->dissolve_same_comdat_group_list ();
if (e->callee->definition && !DECL_EXTERNAL (e->callee->decl))
{
if (overall_size)
@ -182,15 +182,17 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
if (freq_scale == -1)
freq_scale = e->frequency;
n = cgraph_clone_node (e->callee, e->callee->decl,
MIN (e->count, e->callee->count), freq_scale,
update_original, vNULL, true, inlining_into,
NULL);
n = e->callee->create_clone (e->callee->decl,
MIN (e->count, e->callee->count),
freq_scale,
update_original, vNULL, true,
inlining_into,
NULL);
cgraph_redirect_edge_callee (e, n);
}
}
else
symtab_dissolve_same_comdat_group_list (e->callee);
e->callee->dissolve_same_comdat_group_list ();
e->callee->global.inlined_to = inlining_into;
@ -222,14 +224,14 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
bool
inline_call (struct cgraph_edge *e, bool update_original,
vec<cgraph_edge_p> *new_edges,
vec<cgraph_edge *> *new_edges,
int *overall_size, bool update_overall_summary,
bool *callee_removed)
{
int old_size = 0, new_size = 0;
struct cgraph_node *to = NULL;
struct cgraph_edge *curr = e;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
bool new_edges_found = false;
#ifdef ENABLE_CHECKING
@ -261,8 +263,8 @@ inline_call (struct cgraph_edge *e, bool update_original,
if (!alias->callers
&& can_remove_node_now_p (alias, e))
{
next_alias = cgraph_alias_target (alias);
cgraph_remove_node (alias);
next_alias = alias->get_alias_target ();
alias->remove ();
if (callee_removed)
*callee_removed = true;
alias = next_alias;
@ -286,11 +288,11 @@ inline_call (struct cgraph_edge *e, bool update_original,
if (callee->calls_comdat_local)
to->calls_comdat_local = true;
else if (to->calls_comdat_local && symtab_comdat_local_p (callee))
else if (to->calls_comdat_local && callee->comdat_local_p ())
{
struct cgraph_edge *se = to->callees;
for (; se; se = se->next_callee)
if (se->inline_failed && symtab_comdat_local_p (se->callee))
if (se->inline_failed && se->callee->comdat_local_p ())
break;
if (se == NULL)
to->calls_comdat_local = false;
@ -336,13 +338,13 @@ save_inline_function_body (struct cgraph_node *node)
fprintf (dump_file, "\nSaving body of %s for later reuse\n",
node->name ());
gcc_assert (node == cgraph_get_node (node->decl));
gcc_assert (node == cgraph_node::get (node->decl));
/* first_clone will be turned into real function. */
first_clone = node->clones;
first_clone->decl = copy_node (node->decl);
first_clone->decl->decl_with_vis.symtab_node = first_clone;
gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
/* Now reshape the clone tree, so all other clones descends from
first_clone. */
@ -404,12 +406,12 @@ save_inline_function_body (struct cgraph_node *node)
Remove it now. */
if (!first_clone->callers)
{
cgraph_remove_node_and_inline_clones (first_clone, NULL);
first_clone->remove_symbol_and_inline_clones ();
first_clone = NULL;
}
#ifdef ENABLE_CHECKING
else
verify_cgraph_node (first_clone);
first_clone->verify ();
#endif
return first_clone;
}

View File

@ -147,7 +147,7 @@ static bool
caller_growth_limits (struct cgraph_edge *e)
{
struct cgraph_node *to = e->caller;
struct cgraph_node *what = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *what = e->callee->ultimate_alias_target ();
int newsize;
int limit = 0;
HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
@ -269,8 +269,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
{
bool inlinable = true;
enum availability avail;
struct cgraph_node *callee
= cgraph_function_or_thunk_node (e->callee, &avail);
cgraph_node *callee = e->callee->ultimate_alias_target (&avail);
tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
tree callee_tree
= callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
@ -302,7 +301,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
inlinable = false;
}
else if (avail <= AVAIL_OVERWRITABLE)
else if (avail <= AVAIL_INTERPOSABLE)
{
e->inline_failed = CIF_OVERWRITABLE;
inlinable = false;
@ -399,8 +398,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
static bool
can_early_inline_edge_p (struct cgraph_edge *e)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* Early inliner might get called at WPA stage when IPA pass adds new
function. In this case we can not really do any of early inlining
because function bodies are missing. */
@ -447,7 +445,7 @@ static bool
want_early_inline_function_p (struct cgraph_edge *e)
{
bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
@ -565,7 +563,7 @@ static bool
want_inline_small_function_p (struct cgraph_edge *e, bool report)
{
bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
@ -822,22 +820,23 @@ has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
static bool
want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold)
{
struct cgraph_node *function = cgraph_function_or_thunk_node (node, NULL);
struct cgraph_node *function = node->ultimate_alias_target ();
bool has_hot_call = false;
/* Does it have callers? */
if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
return false;
/* Already inlined? */
if (function->global.inlined_to)
return false;
if (cgraph_function_or_thunk_node (node, NULL) != node)
if (node->ultimate_alias_target () != node)
return false;
/* Inlining into all callers would increase size? */
if (estimate_growth (node) > 0)
return false;
/* All inlines must be possible. */
if (cgraph_for_node_and_aliases (node, check_callers, &has_hot_call, true))
if (node->call_for_symbol_thunks_and_aliases
(check_callers, &has_hot_call, true))
return false;
if (!cold && !has_hot_call)
return false;
@ -895,8 +894,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
{
gcov_type badness;
int growth, edge_time;
struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee,
NULL);
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
struct inline_summary *callee_info = inline_summary (callee);
inline_hints hints;
@ -1230,7 +1228,7 @@ update_callee_keys (fibheap_t heap, struct cgraph_node *node,
growth chould have just increased and consequentely badness metric
don't need updating. */
if (e->inline_failed
&& (callee = cgraph_function_or_thunk_node (e->callee, &avail))
&& (callee = e->callee->ultimate_alias_target (&avail))
&& inline_summary (callee)->inlinable
&& avail >= AVAIL_AVAILABLE
&& !bitmap_bit_p (updated_nodes, callee->uid))
@ -1273,8 +1271,8 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
for (e = where->callees; e; e = e->next_callee)
if (e->callee == node
|| (cgraph_function_or_thunk_node (e->callee, &avail) == node
&& avail > AVAIL_OVERWRITABLE))
|| (e->callee->ultimate_alias_target (&avail) == node
&& avail > AVAIL_INTERPOSABLE))
{
/* When profile feedback is available, prioritize by expected number
of calls. */
@ -1295,7 +1293,7 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
static bool
recursive_inlining (struct cgraph_edge *edge,
vec<cgraph_edge_p> *new_edges)
vec<cgraph_edge *> *new_edges)
{
int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
fibheap_t heap;
@ -1359,7 +1357,7 @@ recursive_inlining (struct cgraph_edge *edge,
for (cnode = curr->caller;
cnode->global.inlined_to; cnode = cnode->callers->caller)
if (node->decl
== cgraph_function_or_thunk_node (curr->callee, NULL)->decl)
== curr->callee->ultimate_alias_target ()->decl)
depth++;
if (!want_inline_self_recursive_call_p (curr, node, false, depth))
@ -1383,9 +1381,9 @@ recursive_inlining (struct cgraph_edge *edge,
if (!master_clone)
{
/* We need original clone to copy around. */
master_clone = cgraph_clone_node (node, node->decl,
node->count, CGRAPH_FREQ_BASE,
false, vNULL, true, NULL, NULL);
master_clone = node->create_clone (node->decl, node->count,
CGRAPH_FREQ_BASE, false, vNULL,
true, NULL, NULL);
for (e = master_clone->callees; e; e = e->next_callee)
if (!e->inline_failed)
clone_inlined_nodes (e, true, false, NULL, CGRAPH_FREQ_BASE);
@ -1420,9 +1418,9 @@ recursive_inlining (struct cgraph_edge *edge,
{
next = cgraph_next_function (node);
if (node->global.inlined_to == master_clone)
cgraph_remove_node (node);
node->remove ();
}
cgraph_remove_node (master_clone);
master_clone->remove ();
return true;
}
@ -1445,7 +1443,7 @@ compute_max_insns (int insns)
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge_p> new_edges)
add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge *> new_edges)
{
while (new_edges.length () > 0)
{
@ -1481,7 +1479,7 @@ bool
speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining)
{
enum availability avail;
struct cgraph_node *target = cgraph_function_or_thunk_node (e->callee, &avail);
struct cgraph_node *target = e->callee->ultimate_alias_target (&avail);
struct cgraph_edge *direct, *indirect;
struct ipa_ref *ref;
@ -1561,7 +1559,7 @@ inline_small_functions (void)
fibheap_t edge_heap = fibheap_new ();
bitmap updated_nodes = BITMAP_ALLOC (NULL);
int min_size, max_size;
auto_vec<cgraph_edge_p> new_indirect_edges;
auto_vec<cgraph_edge *> new_indirect_edges;
int initial_size = 0;
struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
struct cgraph_edge_hook_list *edge_removal_hook_holder;
@ -1582,7 +1580,7 @@ inline_small_functions (void)
FOR_EACH_DEFINED_FUNCTION (node)
if (!node->global.inlined_to)
{
if (cgraph_function_with_gimple_body_p (node)
if (node->has_gimple_body_p ()
|| node->thunk.thunk_p)
{
struct inline_summary *info = inline_summary (node);
@ -1714,7 +1712,7 @@ inline_small_functions (void)
continue;
}
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
callee = edge->callee->ultimate_alias_target ();
growth = estimate_edge_growth (edge);
if (dump_file)
{
@ -1880,7 +1878,7 @@ flatten_function (struct cgraph_node *node, bool early)
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *orig_callee;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* We've hit cycle? It is time to give up. */
if (callee->aux)
@ -2242,10 +2240,10 @@ ipa_inline (void)
&& want_inline_function_to_all_callers_p (node, cold))
{
int num_calls = 0;
cgraph_for_node_and_aliases (node, sum_callers,
&num_calls, true);
while (cgraph_for_node_and_aliases (node, inline_to_all_callers,
&num_calls, true))
node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
true);
while (node->call_for_symbol_thunks_and_aliases (inline_to_all_callers,
&num_calls, true))
;
remove_functions = true;
}
@ -2282,7 +2280,7 @@ inline_always_inline_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
continue;
@ -2330,7 +2328,7 @@ early_inline_small_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!inline_summary (callee)->inlinable
|| !e->inline_failed)
continue;
@ -2403,7 +2401,7 @@ public:
unsigned int
pass_early_inline::execute (function *fun)
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *edge;
unsigned int todo = 0;
int iterations = 0;
@ -2422,7 +2420,7 @@ pass_early_inline::execute (function *fun)
return 0;
#ifdef ENABLE_CHECKING
verify_cgraph_node (node);
node->verify ();
#endif
node->remove_all_references ();

View File

@ -237,7 +237,7 @@ void compute_inline_parameters (struct cgraph_node *, bool);
bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
/* In ipa-inline-transform.c */
bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge_p> *, int *, bool,
bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge *> *, int *, bool,
bool *callee_removed = NULL);
unsigned int inline_transform (struct cgraph_node *);
void clone_inlined_nodes (struct cgraph_edge *e, bool, bool, int *,

View File

@ -208,7 +208,7 @@ ipa_profile_generate_summary (void)
counter 2 is total number of executions. */
if (h->hvalue.counters[2])
{
struct cgraph_edge * e = cgraph_edge (node, stmt);
struct cgraph_edge * e = node->get_edge (stmt);
if (e && !e->indirect_unknown_callee)
continue;
e->indirect_info->common_target_id
@ -408,7 +408,8 @@ ipa_propagate_frequency (struct cgraph_node *node)
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Processing frequency %s\n", node->name ());
cgraph_for_node_and_aliases (node, ipa_propagate_frequency_1, &d, true);
node->call_for_symbol_thunks_and_aliases (ipa_propagate_frequency_1, &d,
true);
if ((d.only_called_at_startup && !d.only_called_at_exit)
&& !node->only_called_at_startup)
@ -609,9 +610,8 @@ ipa_profile (void)
fprintf (dump_file,
"Not speculating: call is cold.\n");
}
else if (cgraph_function_body_availability (n2)
<= AVAIL_OVERWRITABLE
&& symtab_can_be_discarded (n2))
else if (n2->get_availability () <= AVAIL_INTERPOSABLE
&& n2->can_be_discarded_p ())
{
nuseless++;
if (dump_file)
@ -625,11 +625,10 @@ ipa_profile (void)
control flow goes to this particular implementation
of N2. Speculate on the local alias to allow inlining.
*/
if (!symtab_can_be_discarded (n2))
if (!n2->can_be_discarded_p ())
{
cgraph_node *alias;
alias = cgraph (symtab_nonoverwritable_alias
(n2));
alias = dyn_cast<cgraph_node *> (n2->noninterposable_alias ());
if (alias)
n2 = alias;
}

View File

@ -89,7 +89,7 @@ struct param_aa_status
struct ipa_bb_info
{
/* Call graph edges going out of this BB. */
vec<cgraph_edge_p> cg_edges;
vec<cgraph_edge *> cg_edges;
/* Alias analysis statuses of each formal parameter at this bb. */
vec<param_aa_status> param_aa_statuses;
};
@ -1979,7 +1979,7 @@ ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
if (callee)
{
cgraph_function_or_thunk_node (callee, NULL);
callee->ultimate_alias_target ();
/* We do not need to bother analyzing calls to unknown functions
unless they may become known during lto/whopr. */
if (!callee->definition && !flag_lto)
@ -2062,7 +2062,7 @@ ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
{
struct cgraph_edge *cs;
cs = cgraph_edge (node, stmt);
cs = node->get_edge (stmt);
cs->indirect_info->param_index = param_index;
cs->indirect_info->agg_contents = 0;
cs->indirect_info->member_ptr = 0;
@ -2339,7 +2339,7 @@ ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
/* If we previously turned the call into a direct call, there is
no need to analyze. */
struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
struct cgraph_edge *cs = fbi->node->get_edge (call);
if (cs && !cs->indirect_unknown_callee)
return;
if (TREE_CODE (target) == SSA_NAME)
@ -2574,7 +2574,7 @@ ipa_intraprocedural_devirtualization (gimple call)
#ifdef ENABLE_CHECKING
if (fndecl)
gcc_assert (possible_polymorphic_call_target_p
(otr, cgraph_get_node (fndecl)));
(otr, cgraph_node::get (fndecl)));
#endif
return fndecl;
}
@ -2798,14 +2798,14 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
}
target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
callee = cgraph_get_create_node (target);
callee = cgraph_node::get_create (target);
unreachable = true;
}
else
callee = cgraph_get_node (target);
callee = cgraph_node::get (target);
}
else
callee = cgraph_get_node (target);
callee = cgraph_node::get (target);
/* Because may-edges are not explicitely represented and vtable may be external,
we may create the first reference to the object in the unit. */
@ -2828,7 +2828,7 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
ie->callee->order);
return NULL;
}
callee = cgraph_get_create_node (target);
callee = cgraph_node::get_create (target);
}
if (!dbg_cnt (devirt))
@ -2950,7 +2950,7 @@ cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
|| TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
return NULL;
return cgraph_get_node (TREE_OPERAND (cst, 0));
return cgraph_node::get (TREE_OPERAND (cst, 0));
}
@ -3035,7 +3035,7 @@ ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
ie->caller->name (), ie->caller->order);
}
tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
cgraph_get_create_node (new_target);
cgraph_node::get_create (new_target);
return new_target;
}
@ -3072,7 +3072,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
(ie, cgraph_get_node (target)))
(ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
@ -3118,7 +3118,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
if (target)
{
if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
@ -3135,7 +3135,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
static bool
update_indirect_edges_after_inlining (struct cgraph_edge *cs,
struct cgraph_node *node,
vec<cgraph_edge_p> *new_edges)
vec<cgraph_edge *> *new_edges)
{
struct ipa_edge_args *top;
struct cgraph_edge *ie, *next_ie, *new_direct_edge;
@ -3244,7 +3244,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
static bool
propagate_info_to_inlined_callees (struct cgraph_edge *cs,
struct cgraph_node *node,
vec<cgraph_edge_p> *new_edges)
vec<cgraph_edge *> *new_edges)
{
struct cgraph_edge *e;
bool res;
@ -3312,7 +3312,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
if (t && TREE_CODE (t) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
&& (n = cgraph_get_node (TREE_OPERAND (t, 0)))
&& (n = cgraph_node::get (TREE_OPERAND (t, 0)))
&& (ref = new_root->find_reference (n, NULL, 0)))
{
if (dump_file)
@ -3338,7 +3338,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (cst, 0))
== FUNCTION_DECL);
n = cgraph_get_node (TREE_OPERAND (cst, 0));
n = cgraph_node::get (TREE_OPERAND (cst, 0));
if (n)
{
struct cgraph_node *clone;
@ -3399,7 +3399,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
bool
ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
vec<cgraph_edge_p> *new_edges)
vec<cgraph_edge *> *new_edges)
{
bool changed;
/* Do nothing if the preparation phase has not been carried out yet
@ -3653,7 +3653,7 @@ ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
static void
ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
if (cgraph_function_with_gimple_body_p (node))
if (node->has_gimple_body_p ())
ipa_analyze_node (node);
}
@ -3990,7 +3990,7 @@ void
ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
ipa_parm_adjustment_vec adjustments)
{
struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
vec<tree> vargs;
vec<tree, va_gc> **debug_args = NULL;
gimple new_stmt;
@ -4224,7 +4224,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
cgraph_set_call_stmt (cs, new_stmt);
do
{
ipa_record_stmt_references (current_node, gsi_stmt (gsi));
current_node->record_stmt_references (gsi_stmt (gsi));
gsi_prev (&gsi);
}
while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
@ -4854,7 +4854,7 @@ ipa_prop_write_jump_functions (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
if (cgraph_function_with_gimple_body_p (node)
if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
count++;
}
@ -4866,7 +4866,7 @@ ipa_prop_write_jump_functions (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
if (cgraph_function_with_gimple_body_p (node)
if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
ipa_write_node_info (ob, node);
}
@ -4907,7 +4907,8 @@ ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
node = cgraph (lto_symtab_encoder_deref (encoder, index));
node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
index));
gcc_assert (node->definition);
ipa_read_node_info (&ib_main, node, data_in);
}
@ -5030,7 +5031,7 @@ ipa_prop_write_all_agg_replacement (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
if (cgraph_function_with_gimple_body_p (node)
if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
count++;
}
@ -5041,7 +5042,7 @@ ipa_prop_write_all_agg_replacement (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
if (cgraph_function_with_gimple_body_p (node)
if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
write_agg_replacement_chain (ob, node);
}
@ -5083,7 +5084,8 @@ read_replacements_section (struct lto_file_decl_data *file_data,
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
node = cgraph (lto_symtab_encoder_deref (encoder, index));
node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
index));
gcc_assert (node->definition);
read_agg_replacement_chain (&ib_main, node, data_in);
}

View File

@ -576,7 +576,7 @@ ipa_get_agg_replacements_for_node (struct cgraph_node *node)
/* Function formal parameters related computations. */
void ipa_initialize_node_params (struct cgraph_node *node);
bool ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
vec<cgraph_edge_p> *new_edges);
vec<cgraph_edge *> *new_edges);
/* Indirect edge and binfo processing. */
tree ipa_get_indirect_edge_target (struct cgraph_edge *ie,

View File

@ -737,7 +737,7 @@ analyze_function (struct cgraph_node *fn, bool ipa)
l->can_throw = false;
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
flags_from_decl_or_type (fn->decl),
cgraph_node_cannot_return (fn));
fn->cannot_return_p ());
if (fn->thunk.thunk_p || fn->alias)
{
@ -840,14 +840,14 @@ end:
static void
add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
if (node->get_availability () < AVAIL_INTERPOSABLE)
return;
/* There are some shared nodes, in particular the initializers on
static declarations. We do not need to scan them more than once
since all we would be interested in are the addressof
operations. */
visited_nodes = pointer_set_create ();
if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
if (node->get_availability () > AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
@ -920,12 +920,12 @@ pure_const_generate_summary (void)
/* Process all of the functions.
We process AVAIL_OVERWRITABLE functions. We can not use the results
We process AVAIL_INTERPOSABLE functions. We can not use the results
by default, but the info can be used at LTO with -fwhole-program or
when function got cloned and the clone is AVAILABLE. */
FOR_EACH_DEFINED_FUNCTION (node)
if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
if (node->get_availability () >= AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
@ -1025,7 +1025,8 @@ pure_const_read_summary (void)
fs = XCNEW (struct funct_state_d);
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
node = cgraph (lto_symtab_encoder_deref (encoder, index));
node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
index));
set_function_state (node, fs);
/* Note that the flags must be read in the opposite
@ -1088,7 +1089,7 @@ self_recursive_p (struct cgraph_node *node)
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
if (cgraph_function_node (e->callee, NULL) == node)
if (e->callee->function_symbol () == node)
return true;
return false;
}
@ -1110,7 +1111,7 @@ propagate_pure_const (void)
order_pos = ipa_reduced_postorder (order, true, false, NULL);
if (dump_file)
{
dump_cgraph (dump_file);
cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced", order, order_pos);
}
@ -1155,7 +1156,7 @@ propagate_pure_const (void)
break;
/* For overwritable nodes we can not assume anything. */
if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
if (w->get_availability () == AVAIL_INTERPOSABLE)
{
worse_state (&pure_const_state, &looping,
w_l->state_previously_known,
@ -1182,7 +1183,7 @@ propagate_pure_const (void)
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
struct cgraph_node *y = e->callee->function_symbol (&avail);
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
@ -1193,7 +1194,7 @@ propagate_pure_const (void)
e->callee->name (),
e->callee->order);
}
if (avail > AVAIL_OVERWRITABLE)
if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
if (dump_file && (dump_flags & TDF_DETAILS))
@ -1344,7 +1345,7 @@ propagate_pure_const (void)
this_looping ? "looping " : "",
w->name ());
}
cgraph_set_const_flag (w, true, this_looping);
w->set_const_flag (true, this_looping);
break;
case IPA_PURE:
@ -1356,7 +1357,7 @@ propagate_pure_const (void)
this_looping ? "looping " : "",
w->name ());
}
cgraph_set_pure_flag (w, true, this_looping);
w->set_pure_flag (true, this_looping);
break;
default:
@ -1388,7 +1389,7 @@ propagate_nothrow (void)
order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
if (dump_file)
{
dump_cgraph (dump_file);
cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
}
@ -1412,7 +1413,7 @@ propagate_nothrow (void)
funct_state w_l = get_function_state (w);
if (w_l->can_throw
|| cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
|| w->get_availability () == AVAIL_INTERPOSABLE)
can_throw = true;
if (can_throw)
@ -1421,9 +1422,9 @@ propagate_nothrow (void)
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
struct cgraph_node *y = e->callee->function_symbol (&avail);
if (avail > AVAIL_OVERWRITABLE)
if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
@ -1459,7 +1460,7 @@ propagate_nothrow (void)
be different. */
if (!w->global.inlined_to)
{
cgraph_set_nothrow_flag (w, true);
w->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
w->name ());
@ -1569,7 +1570,7 @@ skip_function_for_local_pure_const (struct cgraph_node *node)
fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
return true;
}
if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
if (node->get_availability () <= AVAIL_INTERPOSABLE)
{
if (dump_file)
fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
@ -1619,7 +1620,7 @@ pass_local_pure_const::execute (function *fun)
bool skip;
struct cgraph_node *node;
node = cgraph_get_node (current_function_decl);
node = cgraph_node::get (current_function_decl);
skip = skip_function_for_local_pure_const (node);
if (!warn_suggest_attribute_const
&& !warn_suggest_attribute_pure
@ -1653,7 +1654,7 @@ pass_local_pure_const::execute (function *fun)
warn_function_const (current_function_decl, !l->looping);
if (!skip)
{
cgraph_set_const_flag (node, true, l->looping);
node->set_const_flag (true, l->looping);
changed = true;
}
if (dump_file)
@ -1666,7 +1667,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
cgraph_set_const_flag (node, true, false);
node->set_const_flag (true, false);
changed = true;
}
if (dump_file)
@ -1680,7 +1681,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
cgraph_set_pure_flag (node, true, l->looping);
node->set_pure_flag (true, l->looping);
changed = true;
}
warn_function_pure (current_function_decl, !l->looping);
@ -1694,7 +1695,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
cgraph_set_pure_flag (node, true, false);
node->set_pure_flag (true, false);
changed = true;
}
if (dump_file)
@ -1708,7 +1709,7 @@ pass_local_pure_const::execute (function *fun)
}
if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
{
cgraph_set_nothrow_flag (node, true);
node->set_nothrow_flag (true);
changed = true;
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",

View File

@ -87,7 +87,7 @@ ipa_ref::remove_reference ()
bool
ipa_ref::cannot_lead_to_return ()
{
return cgraph_node_cannot_return (dyn_cast <cgraph_node *> (referring));
return dyn_cast <cgraph_node *> (referring)->cannot_return_p ();
}
/* Return reference list this reference is in. */

View File

@ -179,7 +179,7 @@ bitmap
ipa_reference_get_not_read_global (struct cgraph_node *fn)
{
ipa_reference_optimization_summary_t info =
get_reference_optimization_summary (cgraph_function_node (fn, NULL));
get_reference_optimization_summary (fn->function_symbol (NULL));
if (info)
return info->statics_not_read;
else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
@ -355,14 +355,14 @@ propagate_bits (ipa_reference_global_vars_info_t x_global, struct cgraph_node *x
e = e->next_callee)
{
enum availability avail;
struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
struct cgraph_node *y = e->callee->function_symbol (&avail);
if (!y)
continue;
/* Only look into nodes we can propagate something. */
int flags = flags_from_decl_or_type (y->decl);
if (avail > AVAIL_OVERWRITABLE
|| (avail == AVAIL_OVERWRITABLE && (flags & ECF_LEAF)))
if (avail > AVAIL_INTERPOSABLE
|| (avail == AVAIL_INTERPOSABLE && (flags & ECF_LEAF)))
{
if (get_reference_vars_info (y))
{
@ -479,7 +479,7 @@ analyze_function (struct cgraph_node *fn)
}
}
if (cgraph_node_cannot_return (fn))
if (fn->cannot_return_p ())
bitmap_clear (local->statics_written);
}
@ -550,7 +550,7 @@ generate_summary (void)
if (dump_file)
FOR_EACH_DEFINED_FUNCTION (node)
if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
if (node->get_availability () >= AVAIL_INTERPOSABLE)
{
ipa_reference_local_vars_info_t l;
unsigned int index;
@ -587,12 +587,11 @@ read_write_all_from_decl (struct cgraph_node *node,
tree decl = node->decl;
int flags = flags_from_decl_or_type (decl);
if ((flags & ECF_LEAF)
&& cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
&& node->get_availability () <= AVAIL_INTERPOSABLE)
;
else if (flags & ECF_CONST)
;
else if ((flags & ECF_PURE)
|| cgraph_node_cannot_return (node))
else if ((flags & ECF_PURE) || node->cannot_return_p ())
{
read_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
@ -621,7 +620,7 @@ get_read_write_all_from_node (struct cgraph_node *node,
struct cgraph_edge *e, *ie;
/* When function is overwritable, we can not assume anything. */
if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
if (node->get_availability () <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (node, read_all, write_all);
for (e = node->callees;
@ -629,9 +628,9 @@ get_read_write_all_from_node (struct cgraph_node *node,
e = e->next_callee)
{
enum availability avail;
struct cgraph_node *callee = cgraph_function_node (e->callee, &avail);
struct cgraph_node *callee = e->callee->function_symbol (&avail);
gcc_checking_assert (callee);
if (avail <= AVAIL_OVERWRITABLE)
if (avail <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (callee, read_all, write_all);
}
@ -666,7 +665,7 @@ propagate (void)
int i;
if (dump_file)
dump_cgraph (dump_file);
cgraph_node::dump_cgraph (dump_file);
ipa_discover_readonly_nonaddressable_vars ();
generate_summary ();
@ -702,7 +701,7 @@ propagate (void)
fprintf (dump_file, "Starting cycle with %s/%i\n",
node->asm_name (), node->order);
vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* If any node in a cycle is read_all or write_all, they all are. */
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
@ -742,7 +741,7 @@ propagate (void)
read_all = union_static_var_sets (node_g->statics_read,
w_l->statics_read);
if (!(flags & ECF_PURE)
&& !cgraph_node_cannot_return (w))
&& !w->cannot_return_p ())
write_all = union_static_var_sets (node_g->statics_written,
w_l->statics_written);
}
@ -778,7 +777,7 @@ propagate (void)
ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
ipa_reference_global_vars_info_t node_g = &node_info->global;
vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
@ -810,7 +809,7 @@ propagate (void)
node_info = get_reference_vars_info (node);
if (!node->alias
&& (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE
&& (node->get_availability () > AVAIL_INTERPOSABLE
|| (flags_from_decl_or_type (node->decl) & ECF_LEAF)))
{
node_g = &node_info->global;
@ -1055,7 +1054,8 @@ ipa_reference_read_optimization_summary (void)
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
node = cgraph (lto_symtab_encoder_deref (encoder, index));
node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref
(encoder, index));
info = XCNEW (struct ipa_reference_optimization_summary_d);
set_reference_optimization_summary (node, info);
info->statics_not_read = BITMAP_ALLOC (&optimization_summary_obstack);

View File

@ -1088,7 +1088,7 @@ split_function (struct split_point *split_point)
bitmap args_to_skip;
tree parm;
int num = 0;
struct cgraph_node *node, *cur_node = cgraph_get_node (current_function_decl);
cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
basic_block return_bb = find_return_bb ();
basic_block call_bb;
gimple_stmt_iterator gsi;
@ -1230,12 +1230,9 @@ split_function (struct split_point *split_point)
/* Now create the actual clone. */
rebuild_cgraph_edges ();
node = cgraph_function_versioning (cur_node, vNULL,
NULL,
args_to_skip,
!split_part_return_p,
split_point->split_bbs,
split_point->entry_bb, "part");
node = cur_node->create_version_clone_with_body
(vNULL, NULL, args_to_skip, !split_part_return_p, split_point->split_bbs,
split_point->entry_bb, "part");
/* Let's take a time profile for splitted function. */
node->tp_first_run = cur_node->tp_first_run + 1;
@ -1251,7 +1248,7 @@ split_function (struct split_point *split_point)
/* If the original function is declared inline, there is no point in issuing
a warning for the non-inlinable part. */
DECL_NO_INLINE_WARNING_P (node->decl) = 1;
cgraph_node_remove_callees (cur_node);
cur_node->remove_callees ();
cur_node->remove_all_references ();
if (!split_part_return_p)
TREE_THIS_VOLATILE (node->decl) = 1;
@ -1512,7 +1509,7 @@ execute_split_functions (void)
basic_block bb;
int overall_time = 0, overall_size = 0;
int todo = 0;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (flags_from_decl_or_type (current_function_decl)
& (ECF_NORETURN|ECF_MALLOC))

View File

@ -55,12 +55,12 @@ ipa_print_order (FILE* out,
fprintf (out, "\n\n ordered call graph: %s\n", note);
for (i = count - 1; i >= 0; i--)
dump_cgraph_node (out, order[i]);
order[i]->dump (out);
fprintf (out, "\n");
fflush (out);
}
struct searchc_env {
struct cgraph_node **stack;
int stack_size;
@ -103,14 +103,14 @@ searchc (struct searchc_env* env, struct cgraph_node *v,
{
struct ipa_dfs_info * w_info;
enum availability avail;
struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail);
struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
if (!w || (ignore_edge && ignore_edge (edge)))
continue;
if (w->aux
&& (avail > AVAIL_OVERWRITABLE
|| (env->allow_overwritable && avail == AVAIL_OVERWRITABLE)))
&& (avail > AVAIL_INTERPOSABLE
|| (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
{
w_info = (struct ipa_dfs_info *) w->aux;
if (w_info->new_node)
@ -184,11 +184,11 @@ ipa_reduced_postorder (struct cgraph_node **order,
FOR_EACH_DEFINED_FUNCTION (node)
{
enum availability avail = cgraph_function_body_availability (node);
enum availability avail = node->get_availability ();
if (avail > AVAIL_OVERWRITABLE
if (avail > AVAIL_INTERPOSABLE
|| (allow_overwritable
&& (avail == AVAIL_OVERWRITABLE)))
&& (avail == AVAIL_INTERPOSABLE)))
{
/* Reuse the info if it is already there. */
struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
@ -240,10 +240,10 @@ ipa_free_postorder_info (void)
/* Get the set of nodes for the cycle in the reduced call graph starting
from NODE. */
vec<cgraph_node_ptr>
vec<cgraph_node *>
ipa_get_nodes_in_cycle (struct cgraph_node *node)
{
vec<cgraph_node_ptr> v = vNULL;
vec<cgraph_node *> v = vNULL;
struct ipa_dfs_info *node_dfs_info;
while (node)
{
@ -262,7 +262,7 @@ ipa_edge_within_scc (struct cgraph_edge *cs)
{
struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
struct ipa_dfs_info *callee_dfs;
struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
struct cgraph_node *callee = cs->callee->function_symbol ();
callee_dfs = (struct ipa_dfs_info *) callee->aux;
return (caller_dfs
@ -307,7 +307,7 @@ ipa_reverse_postorder (struct cgraph_node **order)
|| (!node->address_taken
&& !node->global.inlined_to
&& !node->alias && !node->thunk.thunk_p
&& !cgraph_only_called_directly_p (node))))
&& !node->only_called_directly_p ())))
{
stack_size = 0;
stack[stack_size].node = node;
@ -329,7 +329,7 @@ ipa_reverse_postorder (struct cgraph_node **order)
functions to non-always-inline functions. */
if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
&& !DECL_DISREGARD_INLINE_LIMITS
(cgraph_function_node (edge->callee, NULL)->decl))
(edge->callee->function_symbol ()->decl))
node2 = NULL;
}
for (; stack[stack_size].node->iterate_referring (
@ -712,8 +712,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
gcc_assert (!*slot);
*slot = state;
}
cgraph_get_body (src);
cgraph_get_body (dst);
src->get_body ();
dst->get_body ();
srccfun = DECL_STRUCT_FUNCTION (src->decl);
dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
if (n_basic_blocks_for_fn (srccfun)
@ -814,7 +814,7 @@ ipa_merge_profiles (struct cgraph_node *dst,
(dst->decl,
gimple_bb (e->call_stmt));
}
cgraph_release_function_body (src);
src->release_body ();
inline_update_overall_summary (dst);
}
/* TODO: if there is no match, we can scale up. */
@ -826,9 +826,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
bool
recursive_call_p (tree func, tree dest)
{
struct cgraph_node *dest_node = cgraph_get_create_node (dest);
struct cgraph_node *cnode = cgraph_get_create_node (func);
struct cgraph_node *dest_node = cgraph_node::get_create (dest);
struct cgraph_node *cnode = cgraph_node::get_create (func);
return symtab_semantically_equivalent_p (dest_node,
cnode);
return dest_node->semantically_equivalent_p (cnode);
}

View File

@ -55,7 +55,7 @@ void ipa_print_order (FILE*, const char *, struct cgraph_node**, int);
int ipa_reduced_postorder (struct cgraph_node **, bool, bool,
bool (*ignore_edge) (struct cgraph_edge *));
void ipa_free_postorder_info (void);
vec<cgraph_node_ptr> ipa_get_nodes_in_cycle (struct cgraph_node *);
vec<cgraph_node *> ipa_get_nodes_in_cycle (struct cgraph_node *);
bool ipa_edge_within_scc (struct cgraph_edge *);
int ipa_reverse_postorder (struct cgraph_node **);
tree get_base_var (tree);

View File

@ -86,11 +86,11 @@ along with GCC; see the file COPYING3. If not see
/* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
static bool
cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
bool
cgraph_node::non_local_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
/* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
return !(cgraph_only_called_directly_or_aliased_p (node)
return !(node->only_called_directly_or_aliased_p ()
&& !node->has_aliases_p ()
&& node->definition
&& !DECL_EXTERNAL (node->decl)
@ -102,27 +102,28 @@ cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED
/* Return true when function can be marked local. */
bool
cgraph_local_node_p (struct cgraph_node *node)
cgraph_node::local_p (void)
{
struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
cgraph_node *n = ultimate_alias_target ();
/* FIXME: thunks can be considered local, but we need prevent i386
from attempting to change calling convention of them. */
if (n->thunk.thunk_p)
return false;
return !cgraph_for_node_and_aliases (n,
cgraph_non_local_node_p_1, NULL, true);
return !n->call_for_symbol_thunks_and_aliases (cgraph_node::non_local_p,
NULL, true);
}
/* Return true when there is a reference to node and it is not vtable. */
bool
address_taken_from_non_vtable_p (symtab_node *node)
symtab_node::address_taken_from_non_vtable_p (void)
{
int i;
struct ipa_ref *ref = NULL;
for (i = 0; node->iterate_referring (i, ref); i++)
for (i = 0; iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ADDR)
{
varpool_node *node;
@ -150,7 +151,7 @@ comdat_can_be_unshared_p_1 (symtab_node *node)
&& (TREE_CODE (node->decl) != FUNCTION_DECL
|| (!DECL_CXX_CONSTRUCTOR_P (node->decl)
&& !DECL_CXX_DESTRUCTOR_P (node->decl)))
&& address_taken_from_non_vtable_p (node))
&& node->address_taken_from_non_vtable_p ())
return false;
/* If the symbol is used in some weird way, better to not touch it. */
@ -223,7 +224,7 @@ cgraph_externally_visible_p (struct cgraph_node *node,
return true;
/* If linker counts on us, we must preserve the function. */
if (symtab_used_from_object_file_p (node))
if (node->used_from_object_file_p ())
return true;
if (DECL_PRESERVE_P (node->decl))
return true;
@ -274,7 +275,7 @@ varpool_externally_visible_p (varpool_node *vnode)
return false;
/* If linker counts on us, we must preserve the function. */
if (symtab_used_from_object_file_p (vnode))
if (vnode->used_from_object_file_p ())
return true;
if (DECL_HARD_REGISTER (vnode->decl))
@ -295,7 +296,7 @@ varpool_externally_visible_p (varpool_node *vnode)
Even if the linker clams the symbol is unused, never bring internal
symbols that are declared by user as used or externally visible.
This is needed for i.e. references from asm statements. */
if (symtab_used_from_object_file_p (vnode))
if (vnode->used_from_object_file_p ())
return true;
if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
return false;
@ -340,9 +341,9 @@ varpool_externally_visible_p (varpool_node *vnode)
bool
can_replace_by_local_alias (symtab_node *node)
{
return (symtab_node_availability (node) > AVAIL_OVERWRITABLE
return (node->get_availability () > AVAIL_INTERPOSABLE
&& !decl_binds_to_current_def_p (node->decl)
&& !symtab_can_be_discarded (node));
&& !node->can_be_discarded_p ());
}
/* Return true if we can replace refernece to NODE by local alias
@ -366,8 +367,8 @@ update_vtable_references (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNU
if (TREE_CODE (*tp) == VAR_DECL
|| TREE_CODE (*tp) == FUNCTION_DECL)
{
if (can_replace_by_local_alias_in_vtable (symtab_get_node (*tp)))
*tp = symtab_nonoverwritable_alias (symtab_get_node (*tp))->decl;
if (can_replace_by_local_alias_in_vtable (symtab_node::get (*tp)))
*tp = symtab_node::get (*tp)->noninterposable_alias ()->decl;
*walk_subtrees = 0;
}
else if (IS_TYPE_OR_DECL_P (*tp))
@ -416,7 +417,7 @@ update_visibility_by_resolution_info (symtab_node * node)
DECL_WEAK (node->decl) = false;
if (!define)
DECL_EXTERNAL (node->decl) = true;
symtab_dissolve_same_comdat_group_list (node);
node->dissolve_same_comdat_group_list ();
}
/* Decide on visibility of all symbols. */
@ -473,7 +474,7 @@ function_and_variable_visibility (bool whole_program)
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
#endif
symtab_dissolve_same_comdat_group_list (node);
node->dissolve_same_comdat_group_list ();
}
gcc_assert ((!DECL_WEAK (node->decl)
&& !DECL_COMDAT (node->decl))
@ -514,7 +515,7 @@ function_and_variable_visibility (bool whole_program)
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
symtab_make_decl_local (next->decl);
next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
@ -524,13 +525,13 @@ function_and_variable_visibility (bool whole_program)
in the group and they will all be made local. We need to
dissolve the group at once so that the predicate does not
segfault though. */
symtab_dissolve_same_comdat_group_list (node);
node->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (node->decl))
node->set_comdat_group (NULL);
if (DECL_COMDAT (node->decl) && !node->alias)
node->set_section (NULL);
symtab_make_decl_local (node->decl);
node->make_decl_local ();
}
if (node->thunk.thunk_p
@ -538,7 +539,7 @@ function_and_variable_visibility (bool whole_program)
{
struct cgraph_node *decl_node = node;
decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
decl_node = decl_node->callees->callee->function_symbol ();
/* Thunks have the same visibility as function they are attached to.
Make sure the C++ front end set this up properly. */
@ -546,7 +547,7 @@ function_and_variable_visibility (bool whole_program)
{
gcc_checking_assert (DECL_COMDAT (node->decl)
== DECL_COMDAT (decl_node->decl));
gcc_checking_assert (symtab_in_same_comdat_p (node, decl_node));
gcc_checking_assert (node->in_same_comdat_group_p (decl_node));
gcc_checking_assert (node->same_comdat_group);
}
node->forced_by_abi = decl_node->forced_by_abi;
@ -558,18 +559,19 @@ function_and_variable_visibility (bool whole_program)
}
FOR_EACH_DEFINED_FUNCTION (node)
{
node->local.local |= cgraph_local_node_p (node);
node->local.local |= node->local_p ();
/* If we know that function can not be overwritten by a different semantics
and moreover its section can not be discarded, replace all direct calls
by calls to an nonoverwritable alias. This make dynamic linking
by calls to an noninterposable alias. This make dynamic linking
cheaper and enable more optimization.
TODO: We can also update virtual tables. */
if (node->callers
&& can_replace_by_local_alias (node))
{
struct cgraph_node *alias = cgraph (symtab_nonoverwritable_alias (node));
cgraph_node *alias = dyn_cast<cgraph_node *>
(node->noninterposable_alias ());
if (alias && alias != node)
{
@ -650,19 +652,19 @@ function_and_variable_visibility (bool whole_program)
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
symtab_make_decl_local (next->decl);
next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
&& TREE_PUBLIC (next->decl));
}
symtab_dissolve_same_comdat_group_list (vnode);
vnode->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (vnode->decl))
vnode->set_comdat_group (NULL);
if (DECL_COMDAT (vnode->decl) && !vnode->alias)
vnode->set_section (NULL);
symtab_make_decl_local (vnode->decl);
vnode->make_decl_local ();
vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
}
update_visibility_by_resolution_info (vnode);

View File

@ -193,13 +193,13 @@ walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
if (targets.length () == 1)
target = targets[0];
else
target = cgraph_get_create_node
target = cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (dump_enabled_p ())
{
location_t locus = gimple_location_safe (edge->call_stmt);
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
location_t locus = gimple_location (edge->call_stmt);
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
"devirtualizing call in %s/%i to %s/%i\n",
edge->caller->name (), edge->caller->order,
target->name (),
@ -301,7 +301,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (node->definition
&& !node->global.inlined_to
&& !node->in_other_partition
&& !cgraph_can_remove_if_no_direct_calls_and_refs_p (node))
&& !node->can_remove_if_no_direct_calls_and_refs_p ())
{
gcc_assert (!node->global.inlined_to);
pointer_set_insert (reachable, node);
@ -338,7 +338,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
&& DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
= cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl));
= cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
origin_node->used_as_abstract_origin = true;
enqueue_node (origin_node, &first, reachable);
}
@ -352,7 +352,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
for (next = node->same_comdat_group;
next != node;
next = next->same_comdat_group)
if (!symtab_comdat_local_p (next)
if (!next->comdat_local_p ()
&& !pointer_set_insert (reachable, next))
enqueue_node (next, &first, reachable);
}
@ -394,10 +394,8 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (DECL_EXTERNAL (e->callee->decl)
&& e->callee->alias
&& before_inlining_p)
{
pointer_set_insert (reachable,
cgraph_function_node (e->callee));
}
pointer_set_insert (reachable,
e->callee->function_symbol ());
pointer_set_insert (reachable, e->callee);
}
enqueue_node (e->callee, &first, reachable);
@ -460,14 +458,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
{
if (file)
fprintf (file, " %s/%i", node->name (), node->order);
cgraph_remove_node (node);
node->remove ();
changed = true;
}
/* If node is unreachable, remove its body. */
else if (!pointer_set_contains (reachable, node))
{
if (!pointer_set_contains (body_needed_for_clonning, node->decl))
cgraph_release_function_body (node);
node->release_body ();
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
if (node->definition)
@ -489,14 +487,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
DECL_ATTRIBUTES (node->decl));
if (!node->in_other_partition)
node->local.local = false;
cgraph_node_remove_callees (node);
symtab_remove_from_same_comdat_group (node);
node->remove_callees ();
node->remove_from_same_comdat_group ();
node->remove_all_references ();
changed = true;
}
}
else
gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
gcc_assert (node->clone_of || !node->has_gimple_body_p ()
|| in_lto_p || DECL_RESULT (node->decl));
}
@ -529,7 +527,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
{
if (file)
fprintf (file, " %s/%i", vnode->name (), vnode->order);
varpool_remove_node (vnode);
vnode->remove ();
changed = true;
}
else if (!pointer_set_contains (reachable, vnode))
@ -546,7 +544,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
vnode->analyzed = false;
vnode->aux = NULL;
symtab_remove_from_same_comdat_group (vnode);
vnode->remove_from_same_comdat_group ();
/* Keep body if it may be useful for constant folding. */
if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
@ -570,13 +568,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (node->address_taken
&& !node->used_from_other_partition)
{
if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
if (!node->call_for_symbol_thunks_and_aliases
(has_addr_references_p, NULL, true))
{
if (file)
fprintf (file, " %s", node->name ());
node->address_taken = false;
changed = true;
if (cgraph_local_node_p (node))
if (node->local_p ())
{
node->local.local = true;
if (file)
@ -588,7 +587,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
fprintf (file, "\n");
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
#endif
/* If we removed something, perhaps profile could be improved. */
@ -630,8 +629,8 @@ process_references (varpool_node *vnode,
*written = true;
break;
case IPA_REF_ALIAS:
process_references (varpool (ref->referring), written, address_taken,
read, explicit_refs);
process_references (dyn_cast<varpool_node *> (ref->referring), written,
address_taken, read, explicit_refs);
break;
}
}
@ -839,7 +838,7 @@ cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
gimplify_function_tree (decl);
cgraph_add_new_function (decl, false);
cgraph_node::add_new_function (decl, false);
set_cfun (NULL);
current_function_decl = NULL;
@ -875,7 +874,7 @@ record_cdtor_fn (struct cgraph_node *node)
static_ctors.safe_push (node->decl);
if (DECL_STATIC_DESTRUCTOR (node->decl))
static_dtors.safe_push (node->decl);
node = cgraph_get_node (node->decl);
node = cgraph_node::get (node->decl);
DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
}
@ -1147,9 +1146,7 @@ propagate_single_user (varpool_node *vnode, cgraph_node *function,
function = meet (function, varpool_alias_target (vnode), single_user_map);
/* Check all users and see if they correspond to a single function. */
for (i = 0;
vnode->iterate_referring (i, ref)
&& function != BOTTOM; i++)
for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
{
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
if (cnode)
@ -1215,8 +1212,7 @@ ipa_single_use (void)
single_user_map.put (var, user);
/* Enqueue all aliases for re-processing. */
for (i = 0;
var->iterate_referring (i, ref); i++)
for (i = 0; var->iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ALIAS
&& !ref->referring->aux)
{
@ -1224,8 +1220,7 @@ ipa_single_use (void)
first = dyn_cast <varpool_node *> (ref->referring);
}
/* Enqueue all users for re-processing. */
for (i = 0;
var->iterate_reference (i, ref); i++)
for (i = 0; var->iterate_reference (i, ref); i++)
if (!ref->referred->aux
&& ref->referred->definition
&& is_a <varpool_node *> (ref->referred))

View File

@ -1906,7 +1906,7 @@ java_mark_decl_local (tree decl)
/* Double check that we didn't pass the function to the callgraph early. */
if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_node (decl);
struct cgraph_node *node = cgraph_node::get (decl);
gcc_assert (!node || !node->definition);
}
#endif

View File

@ -423,7 +423,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
translation units into SET during IPA-inlining. We make them as
local static nodes to prevent clashes with other local statics. */
if (boundary_p && node->analyzed
&& symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
&& node->get_partitioning_class () == SYMBOL_PARTITION)
{
/* Inline clones can not be part of boundary.
gcc_assert (!node->global.inlined_to);
@ -523,7 +523,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
bp_pack_value (&bp, node->implicit_section, 1);
bp_pack_value (&bp, node->address_taken, 1);
bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
&& symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION
&& node->get_partitioning_class () == SYMBOL_PARTITION
&& (reachable_from_other_partition_p (node, encoder)
|| referenced_from_other_partition_p (node, encoder)), 1);
bp_pack_value (&bp, node->lowered, 1);
@ -600,7 +600,7 @@ lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
/* Constant pool initializers can be de-unified into individual ltrans units.
FIXME: Alternatively at -Os we may want to avoid generating for them the local
labels and share them across LTRANS partitions. */
if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
if (node->get_partitioning_class () != SYMBOL_PARTITION)
{
bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
bp_pack_value (&bp, 0, 1); /* in_other_partition. */
@ -837,7 +837,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
if (DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
= cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->decl));
= cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
add_node_to (encoder, origin_node, true);
}
}
@ -960,8 +960,7 @@ output_symtab (void)
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
lto_output_node (ob, cnode, encoder);
else
lto_output_varpool_node (ob, varpool (node), encoder);
lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
}
/* Go over the nodes in SET again to write edges. */
@ -1130,18 +1129,18 @@ input_node (struct lto_file_decl_data *file_data,
if (clone_ref != LCC_NOT_FOUND)
{
node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
0, CGRAPH_FREQ_BASE, false,
vNULL, false, NULL, NULL);
node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
0, CGRAPH_FREQ_BASE, false,
vNULL, false, NULL, NULL);
}
else
{
/* Declaration of functions can be already merged with a declaration
from other input file. We keep cgraph unmerged until after streaming
of ipa passes is done. Alays forcingly create a fresh node. */
node = cgraph_create_empty_node ();
node = cgraph_node::create_empty ();
node->decl = fn_decl;
symtab_register_node (node);
node->register_symbol ();
}
node->order = order;
@ -1185,7 +1184,7 @@ input_node (struct lto_file_decl_data *file_data,
input_overwrite_node (file_data, node, tag, &bp);
/* Store a reference for now, and fix up later to be a pointer. */
node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
if (group)
{
@ -1245,7 +1244,7 @@ input_varpool_node (struct lto_file_decl_data *file_data,
of ipa passes is done. Alays forcingly create a fresh node. */
node = varpool_create_empty_node ();
node->decl = var_decl;
symtab_register_node (node);
node->register_symbol ();
node->order = order;
if (order >= symtab_order)
@ -1339,13 +1338,13 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
struct bitpack_d bp;
int ecf_flags = 0;
caller = cgraph (nodes[streamer_read_hwi (ib)]);
caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (caller == NULL || caller->decl == NULL_TREE)
internal_error ("bytecode stream: no caller found while reading edge");
if (!indirect)
{
callee = cgraph (nodes[streamer_read_hwi (ib)]);
callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (callee == NULL || callee->decl == NULL_TREE)
internal_error ("bytecode stream: no callee found while reading edge");
}
@ -1360,9 +1359,9 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
freq = (int) bp_unpack_var_len_unsigned (&bp);
if (indirect)
edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
edge = caller->create_indirect_edge (NULL, 0, count, freq);
else
edge = cgraph_create_edge (caller, callee, NULL, count, freq);
edge = caller->create_edge (callee, NULL, count, freq);
edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
edge->speculative = bp_unpack_value (&bp, 1);
@ -1450,7 +1449,8 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
dyn_cast<cgraph_node *> (node)->global.inlined_to
= dyn_cast<cgraph_node *> (nodes[ref]);
else
cnode->global.inlined_to = NULL;
}
@ -1917,7 +1917,7 @@ input_cgraph_opt_section (struct lto_file_decl_data *file_data,
for (i = 0; i < count; i++)
{
int ref = streamer_read_uhwi (&ib_main);
input_node_opt_summary (cgraph (nodes[ref]),
input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
&ib_main, data_in);
}
lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,

View File

@ -936,9 +936,9 @@ input_function (tree fn_decl, struct data_in *data_in,
gimple_register_cfg_hooks ();
node = cgraph_get_node (fn_decl);
node = cgraph_node::get (fn_decl);
if (!node)
node = cgraph_create_node (fn_decl);
node = cgraph_node::create (fn_decl);
input_struct_function_base (fn, data_in, ib);
input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);

View File

@ -2318,8 +2318,8 @@ write_symbol (struct streamer_tree_cache_d *cache,
gcc_assert (alias || TREE_CODE (t) != VAR_DECL
|| varpool_get_node (t)->definition);
gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
|| (cgraph_get_node (t)
&& cgraph_get_node (t)->definition));
|| (cgraph_node::get (t)
&& cgraph_node::get (t)->definition));
}
/* Imitate what default_elf_asm_output_external do.
@ -2377,7 +2377,7 @@ bool
output_symbol_p (symtab_node *node)
{
struct cgraph_node *cnode;
if (!symtab_real_symbol_p (node))
if (!node->real_symbol_p ())
return false;
/* We keep external functions in symtab for sake of inlining
and devirtualization. We do not want to see them in symbol table as

View File

@ -1075,14 +1075,14 @@ lsei_node (lto_symtab_encoder_iterator lsei)
static inline struct cgraph_node *
lsei_cgraph_node (lto_symtab_encoder_iterator lsei)
{
return cgraph (lsei.encoder->nodes[lsei.index].node);
return dyn_cast<cgraph_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the node pointed to by LSI. */
static inline varpool_node *
lsei_varpool_node (lto_symtab_encoder_iterator lsei)
{
return varpool (lsei.encoder->nodes[lsei.index].node);
return dyn_cast<varpool_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the cgraph node corresponding to REF using ENCODER. */

View File

@ -90,13 +90,14 @@ add_references_to_partition (ltrans_partition part, symtab_node *node)
/* Add all duplicated references to the partition. */
for (i = 0; node->iterate_reference (i, ref); i++)
if (symtab_get_symbol_partitioning_class (ref->referred) == SYMBOL_DUPLICATE)
if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, ref->referred);
/* References to a readonly variable may be constant foled into its value.
Recursively look into the initializers of the constant variable and add
references, too. */
else if (is_a <varpool_node *> (ref->referred)
&& varpool_ctor_useable_for_folding_p (varpool (ref->referred))
&& varpool_ctor_useable_for_folding_p
(dyn_cast <varpool_node *> (ref->referred))
&& !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
{
if (!part->initializers_visited)
@ -112,7 +113,7 @@ add_references_to_partition (ltrans_partition part, symtab_node *node)
static bool
add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
{
enum symbol_partitioning_class c = symtab_get_symbol_partitioning_class (node);
enum symbol_partitioning_class c = node->get_partitioning_class ();
struct ipa_ref *ref;
symtab_node *node1;
@ -155,7 +156,7 @@ add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
for (e = cnode->callees; e; e = e->next_callee)
if (!e->inline_failed)
add_symbol_to_partition_1 (part, e->callee);
else if (symtab_get_symbol_partitioning_class (e->callee) == SYMBOL_DUPLICATE)
else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, e->callee);
/* Add all thunks associated with the function. */
@ -196,7 +197,7 @@ contained_in_symbol (symtab_node *node)
return node;
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
cnode = cgraph_function_node (cnode, NULL);
cnode = cnode->function_symbol ();
if (cnode->global.inlined_to)
cnode = cnode->global.inlined_to;
return cnode;
@ -215,7 +216,7 @@ add_symbol_to_partition (ltrans_partition part, symtab_node *node)
symtab_node *node1;
/* Verify that we do not try to duplicate something that can not be. */
gcc_checking_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| !symbol_partitioned_p (node));
while ((node1 = contained_in_symbol (node)) != node)
@ -228,7 +229,7 @@ add_symbol_to_partition (ltrans_partition part, symtab_node *node)
Be lax about comdats; they may or may not be duplicated and we may
end up in need to duplicate keyed comdat because it has unkeyed alias. */
gcc_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| DECL_COMDAT (node->decl)
|| !symbol_partitioned_p (node));
@ -276,7 +277,7 @@ lto_1_to_1_map (void)
FOR_EACH_SYMBOL (node)
{
if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
@ -328,7 +329,7 @@ lto_max_map (void)
FOR_EACH_SYMBOL (node)
{
if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
partition = new_partition (node->asm_name ());
@ -438,7 +439,7 @@ lto_balanced_map (int n_lto_partitions)
gcc_assert (!vnode->aux);
FOR_EACH_DEFINED_FUNCTION (node)
if (symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
if (node->get_partitioning_class () == SYMBOL_PARTITION)
{
order[n_nodes++] = node;
if (!node->alias)
@ -459,13 +460,13 @@ lto_balanced_map (int n_lto_partitions)
if (!flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
n_varpool_nodes++;
varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
n_varpool_nodes = 0;
FOR_EACH_VARIABLE (vnode)
if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
varpool_order[n_varpool_nodes++] = vnode;
qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
varpool_node_cmp);
@ -585,7 +586,7 @@ lto_balanced_map (int n_lto_partitions)
if (!vnode->definition)
continue;
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
&& symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
&& vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
@ -622,7 +623,7 @@ lto_balanced_map (int n_lto_partitions)
number of symbols promoted to hidden. */
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
&& !varpool_can_remove_if_no_refs (vnode)
&& symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
&& vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
@ -714,7 +715,7 @@ lto_balanced_map (int n_lto_partitions)
if (flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION
if (vnode->get_partitioning_class () == SYMBOL_PARTITION
&& !symbol_partitioned_p (vnode))
add_symbol_to_partition (partition, vnode);
}
@ -814,7 +815,7 @@ may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
if (!cnode)
return false;
if (symtab_real_symbol_p (node))
if (node->real_symbol_p ())
return false;
return (!encoder
|| (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
@ -841,7 +842,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
external symbols (i.e. those not defined). Remove this test
once this is fixed. */
|| DECL_EXTERNAL (node->decl)
|| !symtab_real_symbol_p (node))
|| !node->real_symbol_p ())
&& !may_need_named_section_p (encoder, node))
return;
@ -850,7 +851,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
same name as external or public symbol.) */
for (s = symtab_node_for_asm (name);
s; s = s->next_sharing_asm_name)
if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
&& s->decl != node->decl
&& (!encoder
|| lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
@ -868,7 +869,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
mangled name. */
for (s = symtab_node_for_asm (name); s;)
if (!s->externally_visible
&& ((symtab_real_symbol_p (s)
&& ((s->real_symbol_p ()
&& !DECL_EXTERNAL (node->decl)
&& !TREE_PUBLIC (node->decl))
|| may_need_named_section_p (encoder, s))
@ -924,7 +925,7 @@ lto_promote_cross_file_statics (void)
|| lto_symtab_encoder_in_partition_p (encoder, node)
/* ... or if we do not partition it. This mean that it will
appear in every partition refernecing it. */
|| symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
|| node->get_partitioning_class () != SYMBOL_PARTITION)
continue;
promote_symbol (node);

View File

@ -59,13 +59,13 @@ lto_cgraph_replace_node (struct cgraph_node *node,
/* Merge node flags. */
if (node->force_output)
cgraph_mark_force_output_node (prevailing_node);
prevailing_node->mark_force_output ();
if (node->forced_by_abi)
prevailing_node->forced_by_abi = true;
if (node->address_taken)
{
gcc_assert (!prevailing_node->global.inlined_to);
cgraph_mark_address_taken_node (prevailing_node);
prevailing_node->mark_address_taken ();
}
/* Redirect all incoming edges. */
@ -90,10 +90,10 @@ lto_cgraph_replace_node (struct cgraph_node *node,
lto_free_function_in_decl_state_for_node (node);
if (node->decl != prevailing_node->decl)
cgraph_release_function_body (node);
node->release_body ();
/* Finally remove the replaced node. */
cgraph_remove_node (node);
node->remove ();
}
/* Replace the cgraph node NODE with PREVAILING_NODE in the cgraph, merging
@ -126,7 +126,7 @@ lto_varpool_replace_node (varpool_node *vnode,
tls_model_names [prevailing_node->tls_model]);
}
/* Finally remove the replaced node. */
varpool_remove_node (vnode);
vnode->remove ();
}
/* Merge two variable or function symbol table entries PREVAILING and ENTRY.
@ -261,7 +261,7 @@ lto_symtab_symbol_p (symtab_node *e)
{
if (!TREE_PUBLIC (e->decl) && !DECL_EXTERNAL (e->decl))
return false;
return symtab_real_symbol_p (e);
return e->real_symbol_p ();
}
/* Return true if the symtab entry E can be the prevailing one. */
@ -445,7 +445,7 @@ lto_symtab_merge_decls_1 (symtab_node *first)
first->asm_name ());
for (e = first; e; e = e->next_sharing_asm_name)
if (TREE_PUBLIC (e->decl))
dump_symtab_node (cgraph_dump_file, e);
e->dump (cgraph_dump_file);
}
/* Compute the symbol resolutions. This is a no-op when using the
@ -539,7 +539,7 @@ lto_symtab_merge_decls_1 (symtab_node *first)
{
fprintf (cgraph_dump_file, "After resolution:\n");
for (e = prevailing; e; e = e->next_sharing_asm_name)
dump_symtab_node (cgraph_dump_file, e);
e->dump (cgraph_dump_file);
}
}
@ -577,9 +577,9 @@ lto_symtab_merge_symbols_1 (symtab_node *prevailing)
continue;
cgraph_node *ce = dyn_cast <cgraph_node *> (e);
if (ce && !DECL_BUILT_IN (e->decl))
lto_cgraph_replace_node (ce, cgraph (prevailing));
lto_cgraph_replace_node (ce, dyn_cast<cgraph_node *> (prevailing));
if (varpool_node *ve = dyn_cast <varpool_node *> (e))
lto_varpool_replace_node (ve, varpool (prevailing));
lto_varpool_replace_node (ve, dyn_cast<varpool_node *> (prevailing));
}
return;
@ -620,7 +620,7 @@ lto_symtab_merge_symbols (void)
symtab_node *tgt = symtab_node_for_asm (node->alias_target);
gcc_assert (node->weakref);
if (tgt)
symtab_resolve_alias (node, tgt);
node->resolve_alias (tgt);
}
node->aux = NULL;
@ -632,7 +632,7 @@ lto_symtab_merge_symbols (void)
possible that tree merging unified the declaration. We
do not want duplicate entries in symbol table. */
if (cnode && DECL_BUILT_IN (node->decl)
&& (cnode2 = cgraph_get_node (node->decl))
&& (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
lto_cgraph_replace_node (cnode2, cnode);
@ -641,7 +641,7 @@ lto_symtab_merge_symbols (void)
nodes if tree merging occured. */
if ((vnode = dyn_cast <varpool_node *> (node))
&& DECL_HARD_REGISTER (vnode->decl)
&& (node2 = symtab_get_node (vnode->decl))
&& (node2 = symtab_node::get (vnode->decl))
&& node2 != node)
lto_varpool_replace_node (dyn_cast <varpool_node *> (node2),
vnode);
@ -650,9 +650,9 @@ lto_symtab_merge_symbols (void)
/* Abstract functions may have duplicated cgraph nodes attached;
remove them. */
else if (cnode && DECL_ABSTRACT (cnode->decl)
&& (cnode2 = cgraph_get_node (node->decl))
&& (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
cgraph_remove_node (cnode2);
cnode2->remove ();
node->decl->decl_with_vis.symtab_node = node;
}

View File

@ -205,7 +205,7 @@ lto_materialize_function (struct cgraph_node *node)
decl = node->decl;
/* Read in functions with body (analyzed nodes)
and also functions that are needed to produce virtual clones. */
if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
if ((node->has_gimple_body_p () && node->analyzed)
|| node->used_as_abstract_origin
|| has_analyzed_clone_p (node))
{
@ -3014,7 +3014,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
/* Store resolutions into the symbol table. */
FOR_EACH_SYMBOL (snode)
if (symtab_real_symbol_p (snode)
if (snode->real_symbol_p ()
&& snode->lto_file_data
&& snode->lto_file_data->resolution_map
&& (res = pointer_map_contains (snode->lto_file_data->resolution_map,
@ -3082,7 +3082,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Before merging:\n");
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
}
lto_symtab_merge_symbols ();
/* Removal of unreacable symbols is needed to make verify_symtab to pass;
@ -3240,7 +3240,7 @@ do_whole_program_analysis (void)
cgraph_function_flags_ready = true;
if (cgraph_dump_file)
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
bitmap_obstack_initialize (NULL);
cgraph_state = CGRAPH_STATE_IPA_SSA;
@ -3250,10 +3250,10 @@ do_whole_program_analysis (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
dump_symtab (cgraph_dump_file);
symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
verify_symtab ();
symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);

View File

@ -4650,16 +4650,14 @@ mark_referenced_methods (void)
chain = CLASS_CLS_METHODS (impent->imp_context);
while (chain)
{
cgraph_mark_force_output_node (
cgraph_get_create_node (METHOD_DEFINITION (chain)));
cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
chain = CLASS_NST_METHODS (impent->imp_context);
while (chain)
{
cgraph_mark_force_output_node (
cgraph_get_create_node (METHOD_DEFINITION (chain)));
cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
}

View File

@ -1349,7 +1349,7 @@ new_omp_context (gimple stmt, omp_context *outer_ctx)
{
ctx->cb.src_fn = current_function_decl;
ctx->cb.dst_fn = current_function_decl;
ctx->cb.src_node = cgraph_get_node (current_function_decl);
ctx->cb.src_node = cgraph_node::get (current_function_decl);
gcc_checking_assert (ctx->cb.src_node);
ctx->cb.dst_node = ctx->cb.src_node;
ctx->cb.src_cfun = cfun;
@ -1397,7 +1397,7 @@ finalize_task_copyfn (gimple task_stmt)
pop_cfun ();
/* Inform the callgraph about the new function. */
cgraph_add_new_function (child_fn, false);
cgraph_node::add_new_function (child_fn, false);
}
/* Destroy a omp_context data structures. Called through the splay tree
@ -4903,7 +4903,7 @@ expand_omp_taskreg (struct omp_region *region)
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
cgraph_add_new_function (child_fn, true);
cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
@ -8021,7 +8021,7 @@ expand_omp_target (struct omp_region *region)
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
cgraph_add_new_function (child_fn, true);
cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
@ -9279,7 +9279,7 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
memset (&tcctx, '\0', sizeof (tcctx));
tcctx.cb.src_fn = ctx->cb.src_fn;
tcctx.cb.dst_fn = child_fn;
tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
gcc_checking_assert (tcctx.cb.src_node);
tcctx.cb.dst_node = tcctx.cb.src_node;
tcctx.cb.src_cfun = ctx->cb.src_cfun;
@ -11038,11 +11038,12 @@ simd_clone_create (struct cgraph_node *old_node)
struct cgraph_node *new_node;
if (old_node->definition)
{
if (!cgraph_function_with_gimple_body_p (old_node))
if (!old_node->has_gimple_body_p ())
return NULL;
cgraph_get_body (old_node);
new_node = cgraph_function_versioning (old_node, vNULL, NULL, NULL,
false, NULL, NULL, "simdclone");
old_node->get_body ();
new_node = old_node->create_version_clone_with_body (vNULL, NULL, NULL,
false, NULL, NULL,
"simdclone");
}
else
{
@ -11053,9 +11054,8 @@ simd_clone_create (struct cgraph_node *old_node)
SET_DECL_RTL (new_decl, NULL);
DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
DECL_STATIC_DESTRUCTOR (new_decl) = 0;
new_node
= cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
cgraph_call_function_insertion_hooks (new_node);
new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
new_node->call_function_insertion_hooks ();
}
if (new_node == NULL)
return new_node;
@ -11734,8 +11734,8 @@ simd_clone_adjust (struct cgraph_node *node)
entry_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
int freq = compute_call_stmt_bb_frequency (current_function_decl,
entry_bb);
cgraph_create_edge (node, cgraph_get_create_node (fn),
call, entry_bb->count, freq);
node->create_edge (cgraph_node::get_create (fn),
call, entry_bb->count, freq);
imm_use_iterator iter;
use_operand_p use_p;

View File

@ -1080,7 +1080,7 @@ is_pass_explicitly_enabled_or_disabled (opt_pass *pass,
if (!slot)
return false;
cgraph_uid = func ? cgraph_get_node (func)->uid : 0;
cgraph_uid = func ? cgraph_node::get (func)->uid : 0;
if (func && DECL_ASSEMBLER_NAME_SET_P (func))
aname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (func));
@ -1488,7 +1488,7 @@ do_per_function (void (*callback) (function *, void *data), void *data)
keep the array visible to garbage collector to avoid reading collected
out nodes. */
static int nnodes;
static GTY ((length ("nnodes"))) cgraph_node_ptr *order;
static GTY ((length ("nnodes"))) cgraph_node **order;
/* If we are in IPA mode (i.e., current_function_decl is NULL), call
function CALLBACK for every function in the call graph. Otherwise,
@ -1504,7 +1504,7 @@ do_per_function_toporder (void (*callback) (function *, void *data), void *data)
else
{
gcc_assert (!order);
order = ggc_vec_alloc<cgraph_node_ptr> (cgraph_n_nodes);
order = ggc_vec_alloc<cgraph_node *> (cgraph_n_nodes);
nnodes = ipa_reverse_postorder (order);
for (i = nnodes - 1; i >= 0; i--)
order[i]->process = 1;
@ -1515,7 +1515,7 @@ do_per_function_toporder (void (*callback) (function *, void *data), void *data)
/* Allow possibly removed nodes to be garbage collected. */
order[i] = NULL;
node->process = 0;
if (cgraph_function_with_gimple_body_p (node))
if (node->has_gimple_body_p ())
callback (DECL_STRUCT_FUNCTION (node->decl), data);
}
}
@ -1818,7 +1818,7 @@ execute_todo (unsigned int flags)
if ((flags & TODO_dump_symtab) && dump_file && !current_function_decl)
{
gcc_assert (!cfun);
dump_symtab (dump_file);
symtab_node::dump_table (dump_file);
/* Flush the file. If verification fails, we won't be able to
close the file before aborting. */
fflush (dump_file);
@ -2015,7 +2015,7 @@ execute_all_ipa_transforms (void)
struct cgraph_node *node;
if (!cfun)
return;
node = cgraph_get_node (current_function_decl);
node = cgraph_node::get (current_function_decl);
if (node->ipa_transforms_to_apply.exists ())
{
@ -2102,13 +2102,13 @@ execute_one_pass (opt_pass *pass)
bool applied = false;
FOR_EACH_DEFINED_FUNCTION (node)
if (node->analyzed
&& cgraph_function_with_gimple_body_p (node)
&& node->has_gimple_body_p ()
&& (!node->clone_of || node->decl != node->clone_of->decl))
{
if (!node->global.inlined_to
&& node->ipa_transforms_to_apply.exists ())
{
cgraph_get_body (node);
node->get_body ();
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
execute_all_ipa_transforms ();
rebuild_cgraph_edges ();
@ -2320,7 +2320,7 @@ ipa_write_summaries (void)
{
struct cgraph_node *node = order[i];
if (cgraph_function_with_gimple_body_p (node))
if (node->has_gimple_body_p ())
{
/* When streaming out references to statements as part of some IPA
pass summary, the statements need to have uids assigned and the
@ -2648,13 +2648,13 @@ bool
function_called_by_processed_nodes_p (void)
{
struct cgraph_edge *e;
for (e = cgraph_get_node (current_function_decl)->callers;
for (e = cgraph_node::get (current_function_decl)->callers;
e;
e = e->next_caller)
{
if (e->caller->decl == current_function_decl)
continue;
if (!cgraph_function_with_gimple_body_p (e->caller))
if (!e->caller->has_gimple_body_p ())
continue;
if (TREE_ASM_WRITTEN (e->caller->decl))
continue;
@ -2664,7 +2664,7 @@ function_called_by_processed_nodes_p (void)
if (dump_file && e)
{
fprintf (dump_file, "Already processed call to:\n");
dump_cgraph_node (dump_file, e->caller);
e->caller->dump (dump_file);
}
return e != NULL;
}

View File

@ -113,7 +113,7 @@ static const struct predictor_info predictor_info[]= {
static inline bool
maybe_hot_frequency_p (struct function *fun, int freq)
{
struct cgraph_node *node = cgraph_get_node (fun->decl);
struct cgraph_node *node = cgraph_node::get (fun->decl);
if (!profile_info || !flag_branch_probabilities)
{
if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
@ -275,7 +275,7 @@ probably_never_executed (struct function *fun,
return true;
}
if ((!profile_info || !flag_branch_probabilities)
&& (cgraph_get_node (fun->decl)->frequency
&& (cgraph_node::get (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
return false;
@ -299,14 +299,14 @@ probably_never_executed_edge_p (struct function *fun, edge e)
return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
}
/* Return true if NODE should be optimized for size. */
/* Return true if function should be optimized for size. */
bool
cgraph_optimize_for_size_p (struct cgraph_node *node)
cgraph_node::optimize_for_size_p (void)
{
if (optimize_size)
return true;
if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return true;
else
return false;
@ -321,7 +321,9 @@ optimize_function_for_size_p (struct function *fun)
return true;
if (!fun || !fun->decl)
return false;
return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
cgraph_node *n = cgraph_node::get (fun->decl);
return n && n->optimize_for_size_p ();
}
/* Return true when current function should always be optimized for speed. */
@ -2983,7 +2985,7 @@ void
compute_function_frequency (void)
{
basic_block bb;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
|| MAIN_NAME_P (DECL_NAME (current_function_decl)))

View File

@ -918,9 +918,8 @@ compute_value_histograms (histogram_values values, unsigned cfg_checksum,
the corresponding call graph node. */
if (hist->type == HIST_TYPE_TIME_PROFILE)
{
node = cgraph_get_node (hist->fun->decl);
node->tp_first_run = hist->hvalue.counters[0];
node = cgraph_node::get (hist->fun->decl);
node->tp_first_run = hist->hvalue.counters[0];
if (dump_file)
fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);

File diff suppressed because it is too large Load Diff

View File

@ -2369,7 +2369,7 @@ expand_call_tm (struct tm_region *region,
return false;
}
node = cgraph_get_node (fn_decl);
node = cgraph_node::get (fn_decl);
/* All calls should have cgraph here. */
if (!node)
{
@ -2389,7 +2389,7 @@ expand_call_tm (struct tm_region *region,
{
gimple_call_set_fndecl (stmt, repl);
update_stmt (stmt);
node = cgraph_create_node (repl);
node = cgraph_node::create (repl);
node->local.tm_may_enter_irr = false;
return expand_call_tm (region, gsi);
}
@ -4032,7 +4032,7 @@ struct tm_ipa_cg_data
bool want_irr_scan_normal;
};
typedef vec<cgraph_node_ptr> cgraph_node_queue;
typedef vec<cgraph_node *> cgraph_node_queue;
/* Return the ipa data associated with NODE, allocating zeroed memory
if necessary. TRAVERSE_ALIASES is true if we must traverse aliases
@ -4044,7 +4044,7 @@ get_cg_data (struct cgraph_node **node, bool traverse_aliases)
struct tm_ipa_cg_data *d;
if (traverse_aliases && (*node)->alias)
*node = cgraph_alias_target (*node);
*node = (*node)->get_alias_target ();
d = (struct tm_ipa_cg_data *) (*node)->aux;
@ -4128,7 +4128,7 @@ ipa_tm_scan_calls_block (cgraph_node_queue *callees_p,
if (find_tm_replacement_function (fndecl))
continue;
node = cgraph_get_node (fndecl);
node = cgraph_node::get (fndecl);
gcc_assert (node != NULL);
d = get_cg_data (&node, true);
@ -4295,7 +4295,7 @@ ipa_tm_scan_irr_block (basic_block bb)
if (find_tm_replacement_function (fn))
break;
node = cgraph_get_node (fn);
node = cgraph_node::get (fn);
d = get_cg_data (&node, true);
/* Return true if irrevocable, but above all, believe
@ -4468,7 +4468,7 @@ ipa_tm_decrement_clone_counts (basic_block bb, bool for_clone)
if (find_tm_replacement_function (fndecl))
continue;
tnode = cgraph_get_node (fndecl);
tnode = cgraph_node::get (fndecl);
d = get_cg_data (&tnode, true);
pcallers = (for_clone ? &d->tm_callers_clone
@ -4610,7 +4610,7 @@ ipa_tm_mayenterirr_function (struct cgraph_node *node)
/* If we aren't seeing the final version of the function we don't
know what it will contain at runtime. */
if (cgraph_function_body_availability (node) < AVAIL_AVAILABLE)
if (node->get_availability () < AVAIL_AVAILABLE)
return true;
/* If the function must go irrevocable, then of course true. */
@ -4631,7 +4631,7 @@ ipa_tm_mayenterirr_function (struct cgraph_node *node)
result in one of the bits above being set so that we will not
have to recurse next time. */
if (node->alias)
return ipa_tm_mayenterirr_function (cgraph_get_node (node->thunk.alias));
return ipa_tm_mayenterirr_function (cgraph_node::get (node->thunk.alias));
/* What remains is unmarked local functions without items that force
the function to go irrevocable. */
@ -4789,7 +4789,7 @@ tm_mangle (tree old_asm_id)
static inline void
ipa_tm_mark_force_output_node (struct cgraph_node *node)
{
cgraph_mark_force_output_node (node);
node->mark_force_output ();
node->analyzed = true;
}
@ -4845,7 +4845,7 @@ ipa_tm_create_version_alias (struct cgraph_node *node, void *data)
if (DECL_ONE_ONLY (new_decl))
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (decl_comdat_group_id (old_decl)));
new_node = cgraph_same_body_alias (NULL, new_decl, info->new_decl);
new_node = cgraph_node::create_same_body_alias (new_decl, info->new_decl);
new_node->tm_clone = true;
new_node->externally_visible = info->old_node->externally_visible;
/* ?? Do not traverse aliases here. */
@ -4886,14 +4886,14 @@ ipa_tm_create_version (struct cgraph_node *old_node)
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (DECL_COMDAT_GROUP (old_decl)));
gcc_assert (!old_node->ipa_transforms_to_apply.exists ());
new_node = cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
new_node->local.local = false;
new_node->externally_visible = old_node->externally_visible;
new_node->lowered = true;
new_node->tm_clone = 1;
get_cg_data (&old_node, true)->clone = new_node;
if (cgraph_function_body_availability (old_node) >= AVAIL_OVERWRITABLE)
if (old_node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* Remap extern inline to static inline. */
/* ??? Is it worth trying to use make_decl_one_only? */
@ -4911,7 +4911,7 @@ ipa_tm_create_version (struct cgraph_node *old_node)
record_tm_clone_pair (old_decl, new_decl);
cgraph_call_function_insertion_hooks (new_node);
new_node->call_function_insertion_hooks ();
if (old_node->force_output
|| old_node->ref_list.first_referring ())
ipa_tm_mark_force_output_node (new_node);
@ -4923,8 +4923,8 @@ ipa_tm_create_version (struct cgraph_node *old_node)
struct create_version_alias_info data;
data.old_node = old_node;
data.new_decl = new_decl;
cgraph_for_node_and_aliases (old_node, ipa_tm_create_version_alias,
&data, true);
old_node->call_for_symbol_thunks_and_aliases (ipa_tm_create_version_alias,
&data, true);
}
}
@ -4946,12 +4946,11 @@ ipa_tm_insert_irr_call (struct cgraph_node *node, struct tm_region *region,
gsi = gsi_after_labels (bb);
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
cgraph_create_edge (node,
cgraph_get_create_node
(builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
g, 0,
compute_call_stmt_bb_frequency (node->decl,
gimple_bb (g)));
node->create_edge (cgraph_node::get_create
(builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
g, 0,
compute_call_stmt_bb_frequency (node->decl,
gimple_bb (g)));
}
/* Construct a call to TM_GETTMCLONE and insert it before GSI. */
@ -4976,9 +4975,9 @@ ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
technically taking the address of the original function and
its clone. Explain this so inlining will know this function
is needed. */
cgraph_mark_address_taken_node (cgraph_get_node (fndecl));
cgraph_node::get (fndecl)->mark_address_taken () ;
if (clone)
cgraph_mark_address_taken_node (cgraph_get_node (clone));
cgraph_node::get (clone)->mark_address_taken ();
}
safe = is_tm_safe (TREE_TYPE (old_fn));
@ -4999,9 +4998,9 @@ ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
gsi_insert_before (gsi, g, GSI_SAME_STMT);
cgraph_create_edge (node, cgraph_get_create_node (gettm_fn), g, 0,
compute_call_stmt_bb_frequency (node->decl,
gimple_bb (g)));
node->create_edge (cgraph_node::get_create (gettm_fn), g, 0,
compute_call_stmt_bb_frequency (node->decl,
gimple_bb (g)));
/* Cast return value from tm_gettmclone* into appropriate function
pointer. */
@ -5057,7 +5056,7 @@ ipa_tm_transform_calls_redirect (struct cgraph_node *node,
{
gimple stmt = gsi_stmt (*gsi);
struct cgraph_node *new_node;
struct cgraph_edge *e = cgraph_edge (node, stmt);
struct cgraph_edge *e = node->get_edge (stmt);
tree fndecl = gimple_call_fndecl (stmt);
/* For indirect calls, pass the address through the runtime. */
@ -5087,7 +5086,7 @@ ipa_tm_transform_calls_redirect (struct cgraph_node *node,
fndecl = find_tm_replacement_function (fndecl);
if (fndecl)
{
new_node = cgraph_get_create_node (fndecl);
new_node = cgraph_node::get_create (fndecl);
/* ??? Mark all transaction_wrap functions tm_may_enter_irr.
@ -5292,7 +5291,7 @@ ipa_tm_execute (void)
unsigned int i;
#ifdef ENABLE_CHECKING
verify_cgraph ();
cgraph_node::verify_cgraph_nodes ();
#endif
bitmap_obstack_initialize (&tm_obstack);
@ -5301,7 +5300,7 @@ ipa_tm_execute (void)
/* For all local functions marked tm_callable, queue them. */
FOR_EACH_DEFINED_FUNCTION (node)
if (is_tm_callable (node->decl)
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
&& node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
@ -5310,7 +5309,7 @@ ipa_tm_execute (void)
/* For all local reachable functions... */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
&& node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* ... marked tm_pure, record that fact for the runtime by
indicating that the pure function is its own tm_callable.
@ -5350,7 +5349,7 @@ ipa_tm_execute (void)
for (i = 0; i < tm_callees.length (); ++i)
{
node = tm_callees[i];
a = cgraph_function_body_availability (node);
a = node->get_availability ();
d = get_cg_data (&node, true);
/* Put it in the worklist so we can scan the function later
@ -5365,7 +5364,7 @@ ipa_tm_execute (void)
else if (a <= AVAIL_NOT_AVAILABLE
&& !is_tm_safe_or_pure (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
else if (a >= AVAIL_OVERWRITABLE)
else if (a >= AVAIL_INTERPOSABLE)
{
if (!tree_versionable_function_p (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
@ -5375,7 +5374,7 @@ ipa_tm_execute (void)
we need not scan the callees now, as the base will do. */
if (node->alias)
{
node = cgraph_get_node (node->thunk.alias);
node = cgraph_node::get (node->thunk.alias);
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
continue;
@ -5461,7 +5460,7 @@ ipa_tm_execute (void)
/* Propagate back to referring aliases as well. */
FOR_EACH_ALIAS (node, ref)
{
caller = cgraph (ref->referring);
caller = dyn_cast<cgraph_node *> (ref->referring);
if (!caller->local.tm_may_enter_irr)
{
/* ?? Do not traverse aliases here. */
@ -5475,7 +5474,7 @@ ipa_tm_execute (void)
other functions. */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
&& node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (is_tm_safe (node->decl))
@ -5495,7 +5494,7 @@ ipa_tm_execute (void)
if (node->cpp_implicit_alias)
continue;
a = cgraph_function_body_availability (node);
a = node->get_availability ();
d = get_cg_data (&node, true);
if (a <= AVAIL_NOT_AVAILABLE)
@ -5523,7 +5522,7 @@ ipa_tm_execute (void)
}
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
&& node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (d->all_tm_regions)
@ -5540,7 +5539,7 @@ ipa_tm_execute (void)
node->aux = NULL;
#ifdef ENABLE_CHECKING
verify_cgraph ();
cgraph_node::verify_cgraph_nodes ();
#endif
return 0;

View File

@ -8449,11 +8449,11 @@ execute_fixup_cfg (void)
edge_iterator ei;
count_scale
= GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
= GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
cgraph_get_node (current_function_decl)->count;
cgraph_node::get (current_function_decl)->count;
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
count_scale);

View File

@ -2691,7 +2691,7 @@ tree_could_trap_p (tree expr)
struct cgraph_node *node;
if (!DECL_EXTERNAL (expr))
return false;
node = cgraph_function_node (cgraph_get_node (expr), NULL);
node = cgraph_node::get (expr)->function_symbol ();
if (node && node->in_other_partition)
return false;
return true;

View File

@ -71,7 +71,7 @@ along with GCC; see the file COPYING3. If not see
the index of a TLS variable equals the index of its control variable in
the other vector. */
static varpool_node_set tls_vars;
static vec<varpool_node_ptr> control_vars;
static vec<varpool_node *> control_vars;
/* For the current basic block, an SSA_NAME that has computed the address
of the TLS variable at the corresponding index. */
@ -448,8 +448,7 @@ gen_emutls_addr (tree decl, struct lower_emutls_data *d)
gimple_seq_add_stmt (&d->seq, x);
cgraph_create_edge (d->cfun_node, d->builtin_node, x,
d->bb->count, d->bb_freq);
d->cfun_node->create_edge (d->builtin_node, x, d->bb->count, d->bb_freq);
/* We may be adding a new reference to a new variable to the function.
This means we have to play with the ipa-reference web. */
@ -632,7 +631,7 @@ lower_emutls_function_body (struct cgraph_node *node)
d.builtin_decl = builtin_decl_explicit (BUILT_IN_EMUTLS_GET_ADDRESS);
/* This is where we introduce the declaration to the IL and so we have to
create a node for it. */
d.builtin_node = cgraph_get_create_node (d.builtin_decl);
d.builtin_node = cgraph_node::get_create (d.builtin_decl);
FOR_EACH_BB_FN (d.bb, cfun)
{

View File

@ -1799,7 +1799,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
switch (id->transform_call_graph_edges)
{
case CB_CGE_DUPLICATE:
edge = cgraph_edge (id->src_node, orig_stmt);
edge = id->src_node->get_edge (orig_stmt);
if (edge)
{
int edge_freq = edge->frequency;
@ -1862,13 +1862,13 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
break;
case CB_CGE_MOVE_CLONES:
cgraph_set_call_stmt_including_clones (id->dst_node,
orig_stmt, stmt);
edge = cgraph_edge (id->dst_node, stmt);
id->dst_node->set_call_stmt_including_clones (orig_stmt,
stmt);
edge = id->dst_node->get_edge (stmt);
break;
case CB_CGE_MOVE:
edge = cgraph_edge (id->dst_node, orig_stmt);
edge = id->dst_node->get_edge (orig_stmt);
if (edge)
cgraph_set_call_stmt (edge, stmt);
break;
@ -1885,7 +1885,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
&& id->dst_node->definition
&& (fn = gimple_call_fndecl (stmt)) != NULL)
{
struct cgraph_node *dest = cgraph_get_node (fn);
struct cgraph_node *dest = cgraph_node::get (fn);
/* We have missing edge in the callgraph. This can happen
when previous inlining turned an indirect call into a
@ -1898,13 +1898,13 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
|| !id->src_node->definition
|| !id->dst_node->definition);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
cgraph_create_edge_including_clones
(id->dst_node, dest, orig_stmt, stmt, bb->count,
id->dst_node->create_edge_including_clones
(dest, orig_stmt, stmt, bb->count,
compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
cgraph_create_edge (id->dst_node, dest, stmt,
id->dst_node->create_edge (dest, stmt,
bb->count,
compute_call_stmt_bb_frequency
(id->dst_node->decl,
@ -2430,7 +2430,7 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
{
if (is_gimple_call (gsi_stmt (si)))
{
struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
if (edge)
cgraph_redirect_edge_call_stmt_to_callee (edge);
}
@ -3889,7 +3889,7 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
/* Do not special case builtins where we see the body.
This just confuse inliner. */
struct cgraph_node *node;
if (!(node = cgraph_get_node (decl))
if (!(node = cgraph_node::get (decl))
|| node->definition)
;
/* For buitins that are likely expanded to nothing or
@ -4159,7 +4159,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
if (gimple_code (stmt) != GIMPLE_CALL)
goto egress;
cg_edge = cgraph_edge (id->dst_node, stmt);
cg_edge = id->dst_node->get_edge (stmt);
gcc_checking_assert (cg_edge);
/* First, see if we can figure out what function is being called.
If we cannot, then there is no hope of inlining the function. */
@ -4227,11 +4227,11 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
goto egress;
}
fn = cg_edge->callee->decl;
cgraph_get_body (cg_edge->callee);
cg_edge->callee->get_body ();
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->dst_node->decl)
verify_cgraph_node (cg_edge->callee);
cg_edge->callee->verify ();
#endif
/* We will be inlining this callee. */
@ -4494,7 +4494,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
(*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
/* Update callgraph if needed. */
cgraph_remove_node (cg_edge->callee);
cg_edge->callee->remove ();
id->block = NULL_TREE;
successfully_inlined = TRUE;
@ -4629,7 +4629,7 @@ optimize_inline_calls (tree fn)
/* Clear out ID. */
memset (&id, 0, sizeof (id));
id.src_node = id.dst_node = cgraph_get_node (fn);
id.src_node = id.dst_node = cgraph_node::get (fn);
gcc_assert (id.dst_node->definition);
id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
@ -4668,7 +4668,7 @@ optimize_inline_calls (tree fn)
{
struct cgraph_edge *e;
verify_cgraph_node (id.dst_node);
id.dst_node->verify ();
/* Double check that we inlined everything we are supposed to inline. */
for (e = id.dst_node->callees; e; e = e->next_callee)
@ -4691,7 +4691,7 @@ optimize_inline_calls (tree fn)
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
verify_cgraph_node (id.dst_node);
id.dst_node->verify ();
#endif
/* It would be nice to check SSA/CFG/statement consistency here, but it is
@ -5221,10 +5221,10 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
id->dst_node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
&&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
&&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
@ -5234,10 +5234,10 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
{
node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
&& (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
@ -5316,7 +5316,7 @@ update_clone_info (copy_body_data * id)
*/
void
tree_function_versioning (tree old_decl, tree new_decl,
vec<ipa_replace_map_p, va_gc> *tree_map,
vec<ipa_replace_map *, va_gc> *tree_map,
bool update_clones, bitmap args_to_skip,
bool skip_return, bitmap blocks_to_copy,
basic_block new_entry)
@ -5335,9 +5335,9 @@ tree_function_versioning (tree old_decl, tree new_decl,
&& TREE_CODE (new_decl) == FUNCTION_DECL);
DECL_POSSIBLY_INLINED (old_decl) = 1;
old_version_node = cgraph_get_node (old_decl);
old_version_node = cgraph_node::get (old_decl);
gcc_checking_assert (old_version_node);
new_version_node = cgraph_get_node (new_decl);
new_version_node = cgraph_node::get (new_decl);
gcc_checking_assert (new_version_node);
/* Copy over debug args. */

View File

@ -706,7 +706,7 @@ walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
static bool
check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
{
struct cgraph_node *cgn = cgraph_get_node (fndecl);
struct cgraph_node *cgn = cgraph_node::get (fndecl);
tree arg;
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
@ -2901,13 +2901,13 @@ finalize_nesting_tree (struct nesting_info *root)
static void
unnest_nesting_tree_1 (struct nesting_info *root)
{
struct cgraph_node *node = cgraph_get_node (root->context);
struct cgraph_node *node = cgraph_node::get (root->context);
/* For nested functions update the cgraph to reflect unnesting.
We also delay finalizing of these functions up to this point. */
if (node->origin)
{
cgraph_unnest_node (node);
node->unnest ();
cgraph_finalize_function (root->context, true);
}
}
@ -2961,7 +2961,7 @@ lower_nested_functions (tree fndecl)
struct nesting_info *root;
/* If there are no nested functions, there's nothing to do. */
cgn = cgraph_get_node (fndecl);
cgn = cgraph_node::get (fndecl);
if (!cgn->nested)
return;

View File

@ -3456,7 +3456,7 @@ void
dump_function_header (FILE *dump_file, tree fdecl, int flags)
{
const char *dname, *aname;
struct cgraph_node *node = cgraph_get_node (fdecl);
struct cgraph_node *node = cgraph_node::get (fdecl);
struct function *fun = DECL_STRUCT_FUNCTION (fdecl);
dname = lang_hooks.decl_printable_name (fdecl, 2);

View File

@ -427,12 +427,12 @@ gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
void
gimple_gen_ic_func_profiler (void)
{
struct cgraph_node * c_node = cgraph_get_node (current_function_decl);
struct cgraph_node * c_node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
gimple stmt1, stmt2;
tree tree_uid, cur_func, void0;
if (cgraph_only_called_directly_p (c_node))
if (c_node->only_called_directly_p ())
return;
gimple_init_edge_profiler ();
@ -451,7 +451,7 @@ gimple_gen_ic_func_profiler (void)
true, NULL_TREE,
true, GSI_SAME_STMT);
tree_uid = build_int_cst
(gcov_type_node, cgraph_get_node (current_function_decl)->profile_id);
(gcov_type_node, cgraph_node::get (current_function_decl)->profile_id);
/* Workaround for binutils bug 14342. Once it is fixed, remove lto path. */
if (flag_lto)
{
@ -615,8 +615,8 @@ tree_profiling (void)
if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
continue;
cgraph_set_const_flag (node, false, false);
cgraph_set_pure_flag (node, false, false);
node->set_const_flag (false, false);
node->set_pure_flag (false, false);
}
/* Update call statements and rebuild the cgraph. */

View File

@ -4886,8 +4886,8 @@ convert_callers (struct cgraph_node *node, tree old_decl,
{
basic_block this_block;
cgraph_for_node_and_aliases (node, convert_callers_for_node,
&adjustments, false);
node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
&adjustments, false);
if (!encountered_recursive_call)
return;
@ -4932,10 +4932,10 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
/* This must be done after rebuilding cgraph edges for node above.
Otherwise any recursive calls to node that are recorded in
redirect_callers will be corrupted. */
vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
new_node = cgraph_function_versioning (node, redirect_callers,
NULL,
NULL, false, NULL, NULL, "isra");
vec<cgraph_edge *> redirect_callers = node->collect_callers ();
new_node = node->create_version_clone_with_body (redirect_callers, NULL,
NULL, false, NULL, NULL,
"isra");
redirect_callers.release ();
push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
@ -4943,7 +4943,7 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
cfg_changed = ipa_sra_modify_function_body (adjustments);
sra_ipa_reset_debug_stmts (adjustments);
convert_callers (new_node, node->decl, adjustments);
cgraph_make_node_local (new_node);
new_node->make_local ();
return cfg_changed;
}
@ -4964,7 +4964,7 @@ has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
static bool
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
{
if (!cgraph_node_can_be_local_p (node))
if (!node->can_be_local_p ())
{
if (dump_file)
fprintf (dump_file, "Function not local to this compilation unit.\n");
@ -5008,7 +5008,7 @@ ipa_sra_preliminary_function_checks (struct cgraph_node *node)
return false;
}
if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
{
if (dump_file)
fprintf (dump_file,
@ -5042,7 +5042,7 @@ ipa_sra_preliminary_function_checks (struct cgraph_node *node)
static unsigned int
ipa_early_sra (void)
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
ipa_parm_adjustment_vec adjustments;
int ret = 0;
@ -5059,9 +5059,8 @@ ipa_early_sra (void)
goto simple_out;
}
if (cgraph_for_node_and_aliases (node,
some_callers_have_mismatched_arguments_p,
NULL, true))
if (node->call_for_symbol_thunks_and_aliases
(some_callers_have_mismatched_arguments_p, NULL, true))
{
if (dump_file)
fprintf (dump_file, "There are callers with insufficient number of "

View File

@ -1710,7 +1710,7 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
struct cgraph_node *node = cgraph_get_node (callee);
struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_read;
/* FIXME: Callee can be an OMP builtin that does not have a call graph
@ -2078,7 +2078,7 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
struct cgraph_node *node = cgraph_get_node (callee);
struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_written;
if (node

View File

@ -2956,7 +2956,7 @@ computation_cost (tree expr, bool speed)
unsigned cost;
/* Avoid using hard regs in ways which may be unsupported. */
int regno = LAST_VIRTUAL_REGISTER + 1;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
enum node_frequency real_frequency = node->frequency;
node->frequency = NODE_FREQUENCY_NORMAL;

View File

@ -4371,7 +4371,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
cgraph_get_node (fn)->name ());
cgraph_node::get (fn)->name ());
}
gimple_call_set_fndecl (stmt, fn);
gimple_set_modified (stmt, true);

View File

@ -7115,7 +7115,7 @@ ipa_pta_execute (void)
if (dump_file && (dump_flags & TDF_DETAILS))
{
dump_symtab (dump_file);
symtab_node::dump_table (dump_file);
fprintf (dump_file, "\n");
}
@ -7126,15 +7126,16 @@ ipa_pta_execute (void)
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
if (!node->has_gimple_body_p () || node->clone_of)
continue;
cgraph_get_body (node);
node->get_body ();
gcc_assert (!node->clone_of);
vi = create_function_info_for (node->decl,
alias_get_name (node->decl));
cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
node->call_for_symbol_thunks_and_aliases
(associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
@ -7161,7 +7162,7 @@ ipa_pta_execute (void)
basic_block bb;
/* Nodes without a body are not interesting. */
if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
if (!node->has_gimple_body_p () || node->clone_of)
continue;
if (dump_file)
@ -7265,7 +7266,7 @@ ipa_pta_execute (void)
basic_block bb;
/* Nodes without a body are not interesting. */
if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
if (!node->has_gimple_body_p () || node->clone_of)
continue;
fn = DECL_STRUCT_FUNCTION (node->decl);

View File

@ -3221,7 +3221,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
tree fndecl = gimple_call_fndecl (stmt), op;
if (fndecl != NULL_TREE)
{
struct cgraph_node *node = cgraph_get_node (fndecl);
struct cgraph_node *node = cgraph_node::get (fndecl);
if (node != NULL && node->simd_clones != NULL)
{
unsigned int j, n = gimple_call_num_args (stmt);
@ -5690,10 +5690,10 @@ vect_can_force_dr_alignment_p (const_tree decl, unsigned int alignment)
/* When compiling partition, be sure the symbol is not output by other
partition. */
snode = symtab_get_node (decl);
snode = symtab_node::get (decl);
if (flag_ltrans
&& (snode->in_other_partition
|| symtab_get_symbol_partitioning_class (snode) == SYMBOL_DUPLICATE))
|| snode->get_partitioning_class () == SYMBOL_DUPLICATE))
return false;
}
@ -5707,13 +5707,13 @@ vect_can_force_dr_alignment_p (const_tree decl, unsigned int alignment)
software projects. */
if (TREE_STATIC (decl)
&& DECL_SECTION_NAME (decl) != NULL
&& !symtab_get_node (decl)->implicit_section)
&& !symtab_node::get (decl)->implicit_section)
return false;
/* If symbol is an alias, we need to check that target is OK. */
if (TREE_STATIC (decl))
{
tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
if (target != decl)
{
if (DECL_PRESERVE_P (target))

View File

@ -2643,7 +2643,7 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
if (fndecl == NULL_TREE)
return false;
struct cgraph_node *node = cgraph_get_node (fndecl);
struct cgraph_node *node = cgraph_node::get (fndecl);
if (node == NULL || node->simd_clones == NULL)
return false;
@ -2726,7 +2726,7 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
unsigned int badness = 0;
struct cgraph_node *bestn = NULL;
if (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info))
bestn = cgraph_get_node (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
else
for (struct cgraph_node *n = node->simd_clones; n != NULL;
n = n->simdclone->next_clone)

View File

@ -704,7 +704,7 @@ increase_alignment (void)
DECL_USER_ALIGN (decl) = 1;
if (TREE_STATIC (decl))
{
tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
DECL_ALIGN (target) = TYPE_ALIGN (vectype);
DECL_USER_ALIGN (target) = 1;
}

View File

@ -603,7 +603,7 @@ decl_assembler_name (tree decl)
tree
decl_comdat_group (const_tree node)
{
struct symtab_node *snode = symtab_get_node (node);
struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group ();
@ -613,7 +613,7 @@ decl_comdat_group (const_tree node)
tree
decl_comdat_group_id (const_tree node)
{
struct symtab_node *snode = symtab_get_node (node);
struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group_id ();
@ -624,7 +624,7 @@ decl_comdat_group_id (const_tree node)
const char *
decl_section_name (const_tree node)
{
struct symtab_node *snode = symtab_get_node (node);
struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_section ();
@ -639,14 +639,14 @@ set_decl_section_name (tree node, const char *value)
if (value == NULL)
{
snode = symtab_get_node (node);
snode = symtab_node::get (node);
if (!snode)
return;
}
else if (TREE_CODE (node) == VAR_DECL)
snode = varpool_node_for_decl (node);
else
snode = cgraph_get_create_node (node);
snode = cgraph_node::get_create (node);
snode->set_section (value);
}
@ -5062,7 +5062,7 @@ need_assembler_name_p (tree decl)
return false;
/* Functions represented in the callgraph need an assembler name. */
if (cgraph_get_node (decl) != NULL)
if (cgraph_node::get (decl) != NULL)
return true;
/* Unused and not public functions don't need an assembler name. */
@ -5105,11 +5105,11 @@ free_lang_data_in_decl (tree decl)
if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node;
if (!(node = cgraph_get_node (decl))
if (!(node = cgraph_node::get (decl))
|| (!node->definition && !node->clones))
{
if (node)
cgraph_release_function_body (node);
node->release_body ();
else
{
release_function_body (decl);
@ -6488,7 +6488,7 @@ tree_decl_map_hash (const void *item)
priority_type
decl_init_priority_lookup (tree decl)
{
symtab_node *snode = symtab_get_node (decl);
symtab_node *snode = symtab_node::get (decl);
if (!snode)
return DEFAULT_INIT_PRIORITY;
@ -6501,7 +6501,7 @@ decl_init_priority_lookup (tree decl)
priority_type
decl_fini_priority_lookup (tree decl)
{
cgraph_node *node = cgraph_get_node (decl);
cgraph_node *node = cgraph_node::get (decl);
if (!node)
return DEFAULT_INIT_PRIORITY;
@ -6518,14 +6518,14 @@ decl_init_priority_insert (tree decl, priority_type priority)
if (priority == DEFAULT_INIT_PRIORITY)
{
snode = symtab_get_node (decl);
snode = symtab_node::get (decl);
if (!snode)
return;
}
else if (TREE_CODE (decl) == VAR_DECL)
snode = varpool_node_for_decl (decl);
else
snode = cgraph_get_create_node (decl);
snode = cgraph_node::get_create (decl);
snode->set_init_priority (priority);
}
@ -6538,12 +6538,12 @@ decl_fini_priority_insert (tree decl, priority_type priority)
if (priority == DEFAULT_INIT_PRIORITY)
{
node = cgraph_get_node (decl);
node = cgraph_node::get (decl);
if (!node)
return;
}
else
node = cgraph_get_create_node (decl);
node = cgraph_node::get_create (decl);
node->set_fini_priority (priority);
}

View File

@ -1223,8 +1223,8 @@ init_node_map (bool local)
cgraph_node_map = pointer_map_create ();
FOR_EACH_DEFINED_FUNCTION (n)
if (cgraph_function_with_gimple_body_p (n)
&& !cgraph_only_called_directly_p (n))
if (n->has_gimple_body_p ()
&& !n->only_called_directly_p ())
{
void **val;
if (local)

View File

@ -440,8 +440,8 @@ resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
{
targetm.asm_out.unique_section (decl, reloc);
if (DECL_SECTION_NAME (decl))
symtab_for_node_and_aliases (symtab_get_node (decl),
set_implicit_section, NULL, true);
symtab_node::get (decl)->call_for_symbol_and_aliases
(set_implicit_section, NULL, true);
}
}
@ -521,7 +521,7 @@ get_named_text_section (tree decl,
buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
return get_named_section (decl, buffer, 0);
}
else if (symtab_get_node (decl)->implicit_section)
else if (symtab_node::get (decl)->implicit_section)
{
const char *name;
@ -550,7 +550,7 @@ default_function_section (tree decl, enum node_frequency freq,
/* Old GNU linkers have buggy --gc-section support, which sometimes
results in .gcc_except_table* sections being garbage collected. */
if (decl
&& symtab_get_node (decl)->implicit_section)
&& symtab_node::get (decl)->implicit_section)
return NULL;
#endif
@ -606,7 +606,7 @@ function_section_1 (tree decl, bool force_cold)
if (decl)
{
struct cgraph_node *node = cgraph_get_node (decl);
struct cgraph_node *node = cgraph_node::get (decl);
if (node)
{
@ -1092,9 +1092,9 @@ get_variable_section (tree decl, bool prefer_noswitch_p)
{
addr_space_t as = ADDR_SPACE_GENERIC;
int reloc;
symtab_node *snode = symtab_get_node (decl);
symtab_node *snode = symtab_node::get (decl);
if (snode)
decl = symtab_alias_ultimate_target (snode)->decl;
decl = snode->ultimate_alias_target ()->decl;
if (TREE_TYPE (decl) != error_mark_node)
as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
@ -1210,7 +1210,7 @@ use_blocks_for_decl_p (tree decl)
/* If this decl is an alias, then we don't want to emit a
definition. */
if (TREE_CODE (decl) == VAR_DECL
&& (snode = symtab_get_node (decl)) != NULL
&& (snode = symtab_node::get (decl)) != NULL
&& snode->alias)
return false;
@ -1600,7 +1600,7 @@ decide_function_section (tree decl)
if (DECL_SECTION_NAME (decl))
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
struct cgraph_node *node = cgraph_node::get (current_function_decl);
/* Calls to function_section rely on first_function_block_is_cold
being accurate. */
first_function_block_is_cold = (node
@ -2387,10 +2387,10 @@ mark_decl_referenced (tree decl)
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
struct cgraph_node *node = cgraph_get_create_node (decl);
struct cgraph_node *node = cgraph_node::get_create (decl);
if (!DECL_EXTERNAL (decl)
&& !node->definition)
cgraph_mark_force_output_node (node);
node->mark_force_output ();
}
else if (TREE_CODE (decl) == VAR_DECL)
{
@ -5632,7 +5632,7 @@ assemble_alias (tree decl, tree target)
/* Allow aliases to aliases. */
if (TREE_CODE (decl) == FUNCTION_DECL)
cgraph_get_create_node (decl)->alias = true;
cgraph_node::get_create (decl)->alias = true;
else
varpool_node_for_decl (decl)->alias = true;
@ -5728,8 +5728,8 @@ dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
{
tree src = p->from;
tree dst = p->to;
struct cgraph_node *src_n = cgraph_get_node (src);
struct cgraph_node *dst_n = cgraph_get_node (dst);
struct cgraph_node *src_n = cgraph_node::get (src);
struct cgraph_node *dst_n = cgraph_node::get (dst);
/* The function ipa_tm_create_version() marks the clone as needed if
the original function was needed. But we also mark the clone as
@ -5880,7 +5880,7 @@ make_decl_one_only (tree decl, tree comdat_group)
if (TREE_CODE (decl) == VAR_DECL)
symbol = varpool_node_for_decl (decl);
else
symbol = cgraph_get_create_node (decl);
symbol = cgraph_node::get_create (decl);
if (SUPPORTS_ONE_ONLY)
{
@ -6701,7 +6701,7 @@ default_binds_local_p_1 (const_tree exp, int shlib)
}
else if (TREE_CODE (exp) == FUNCTION_DECL && TREE_PUBLIC (exp))
{
struct cgraph_node *node = cgraph_get_node (exp);
struct cgraph_node *node = cgraph_node::get (exp);
if (node
&& (resolution_local_p (node->resolution) || node->in_other_partition))
resolved_locally = true;
@ -6792,7 +6792,7 @@ decl_binds_to_current_def_p (const_tree decl)
}
else if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_node (decl);
struct cgraph_node *node = cgraph_node::get (decl);
if (node
&& node->resolution != LDPR_UNKNOWN)
return resolution_to_local_definition_p (node->resolution);
@ -7042,10 +7042,10 @@ place_block_symbol (rtx symbol)
struct symtab_node *snode;
decl = SYMBOL_REF_DECL (symbol);
snode = symtab_get_node (decl);
snode = symtab_node::get (decl);
if (snode->alias)
{
rtx target = DECL_RTL (symtab_alias_ultimate_target (snode)->decl);
rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
place_block_symbol (target);
SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);

View File

@ -158,26 +158,27 @@ varpool_node_for_decl (tree decl)
node = varpool_create_empty_node ();
node->decl = decl;
symtab_register_node (node);
node->register_symbol ();
return node;
}
/* Remove node from the varpool. */
/* Remove variable from symbol table. */
void
varpool_remove_node (varpool_node *node)
varpool_node::remove (void)
{
varpool_call_node_removal_hooks (node);
symtab_unregister_node (node);
varpool_call_node_removal_hooks (this);
unregister ();
/* When streaming we can have multiple nodes associated with decl. */
if (cgraph_state == CGRAPH_LTO_STREAMING)
;
/* Keep constructor when it may be used for folding. We remove
references to external variables before final compilation. */
else if (DECL_INITIAL (node->decl) && DECL_INITIAL (node->decl) != error_mark_node
&& !varpool_ctor_useable_for_folding_p (node))
varpool_remove_initializer (node);
ggc_free (node);
else if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node
&& !varpool_ctor_useable_for_folding_p (this))
varpool_remove_initializer (this);
ggc_free (this);
}
/* Renove node initializer when it is no longer needed. */
@ -200,32 +201,32 @@ varpool_remove_initializer (varpool_node *node)
/* Dump given cgraph node. */
void
dump_varpool_node (FILE *f, varpool_node *node)
varpool_node::dump (FILE *f)
{
dump_symtab_base (f, node);
dump_base (f);
fprintf (f, " Availability: %s\n",
cgraph_function_flags_ready
? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
? cgraph_availability_names[cgraph_variable_initializer_availability (this)]
: "not-ready");
fprintf (f, " Varpool flags:");
if (DECL_INITIAL (node->decl))
if (DECL_INITIAL (decl))
fprintf (f, " initialized");
if (node->output)
if (output)
fprintf (f, " output");
if (node->used_by_single_function)
if (used_by_single_function)
fprintf (f, " used-by-single-function");
if (TREE_READONLY (node->decl))
if (TREE_READONLY (decl))
fprintf (f, " read-only");
if (varpool_ctor_useable_for_folding_p (node))
if (varpool_ctor_useable_for_folding_p (this))
fprintf (f, " const-value-known");
if (node->writeonly)
if (writeonly)
fprintf (f, " write-only");
if (node->tls_model)
fprintf (f, " %s", tls_model_names [node->tls_model]);
if (tls_model)
fprintf (f, " %s", tls_model_names [tls_model]);
fprintf (f, "\n");
}
/* Dump the variable pool. */
/* Dump the variable pool to F. */
void
dump_varpool (FILE *f)
{
@ -233,7 +234,7 @@ dump_varpool (FILE *f)
fprintf (f, "variable pool:\n\n");
FOR_EACH_VARIABLE (node)
dump_varpool_node (f, node);
node->dump (f);
}
/* Dump the variable pool to stderr. */
@ -459,7 +460,7 @@ cgraph_variable_initializer_availability (varpool_node *node)
used to share template instantiations in C++. */
if (decl_replaceable_p (node->decl)
|| DECL_EXTERNAL (node->decl))
return AVAIL_OVERWRITABLE;
return AVAIL_INTERPOSABLE;
return AVAIL_AVAILABLE;
}
@ -479,8 +480,7 @@ varpool_analyze_node (varpool_node *node)
align_variable (decl, 0);
}
if (node->alias)
symtab_resolve_alias
(node, varpool_get_node (node->alias_target));
node->resolve_alias (varpool_get_node (node->alias_target));
else if (DECL_INITIAL (decl))
record_references_in_initializer (decl, node->analyzed);
node->analyzed = true;
@ -607,7 +607,7 @@ varpool_remove_unreferenced_decls (void)
next = next->same_comdat_group)
{
varpool_node *vnext = dyn_cast <varpool_node *> (next);
if (vnext && vnext->analyzed && !symtab_comdat_local_p (next))
if (vnext && vnext->analyzed && !next->comdat_local_p ())
enqueue_node (vnext, &first);
}
}
@ -636,7 +636,7 @@ varpool_remove_unreferenced_decls (void)
if (pointer_set_contains (referenced, node))
varpool_remove_initializer (node);
else
varpool_remove_node (node);
node->remove ();
}
}
pointer_set_destroy (referenced);
@ -745,8 +745,7 @@ varpool_extra_name_alias (tree alias, tree decl)
This is unfortunate because they are not going through the
standard channels. Ensure they get output. */
if (cpp_implicit_aliases_done)
symtab_resolve_alias (alias_node,
varpool_node_for_decl (decl));
alias_node->resolve_alias (varpool_node_for_decl (decl));
return alias_node;
}
@ -769,7 +768,7 @@ varpool_for_node_and_aliases (varpool_node *node,
{
varpool_node *alias = dyn_cast <varpool_node *> (ref->referring);
if (include_overwritable
|| cgraph_variable_initializer_availability (alias) > AVAIL_OVERWRITABLE)
|| cgraph_variable_initializer_availability (alias) > AVAIL_INTERPOSABLE)
if (varpool_for_node_and_aliases (alias, callback, data,
include_overwritable))
return true;