except.c: Convert prototypes to ISO C90.
* except.c: Convert prototypes to ISO C90. * except.h: Likewise. * emit-rtl.c: Likewise. * et-forest.c: Likewise. * et-forest.h: Likewise. * except.c: Likewise. * explow.c: Likewise. * expmed.c: Likewise. * expr.c: Likewise. * expr.h: Likewise. From-SVN: r68674
This commit is contained in:
parent
7080f73594
commit
502b832280
@ -14,6 +14,16 @@
|
||||
* dwarf2out.c: Likewise.
|
||||
* dwarf2out.h: Likewise.
|
||||
* dwarfout.c: Likewise.
|
||||
* except.c: Likewise.
|
||||
* except.h: Likewise.
|
||||
* emit-rtl.c: Likewise.
|
||||
* et-forest.c: Likewise.
|
||||
* et-forest.h: Likewise.
|
||||
* except.c: Likewise.
|
||||
* explow.c: Likewise.
|
||||
* expmed.c: Likewise.
|
||||
* expr.c: Likewise.
|
||||
* expr.h: Likewise.
|
||||
|
||||
2003-06-29 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
|
641
gcc/emit-rtl.c
641
gcc/emit-rtl.c
File diff suppressed because it is too large
Load Diff
105
gcc/et-forest.c
105
gcc/et-forest.c
@ -1,6 +1,6 @@
|
||||
/* ET-trees datastructure implementation.
|
||||
Contributed by Pavel Nejedly
|
||||
Copyright (C) 2002 Free Software Foundation, Inc.
|
||||
Copyright (C) 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of the libiberty library.
|
||||
Libiberty is free software; you can redistribute it and/or
|
||||
@ -16,7 +16,7 @@ Library General Public License for more details.
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with libiberty; see the file COPYING.LIB. If
|
||||
not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
Boston, MA 02111-1307, USA.
|
||||
Boston, MA 02111-1307, USA.
|
||||
|
||||
The ET-forest structure is described in:
|
||||
D. D. Sleator and R. E. Tarjan. A data structure for dynamic trees.
|
||||
@ -42,7 +42,7 @@ struct et_forest
|
||||
alloc_pool occur_pool;
|
||||
};
|
||||
|
||||
/* Single occurrence of node in ET-forest.
|
||||
/* Single occurrence of node in ET-forest.
|
||||
A single node may have multiple occurrences.
|
||||
*/
|
||||
struct et_forest_occurrence
|
||||
@ -75,18 +75,17 @@ struct et_forest_node
|
||||
};
|
||||
|
||||
|
||||
static et_forest_occurrence_t splay PARAMS ((et_forest_occurrence_t));
|
||||
static void remove_all_occurrences PARAMS ((et_forest_t, et_forest_node_t));
|
||||
static inline et_forest_occurrence_t find_leftmost_node
|
||||
PARAMS ((et_forest_occurrence_t));
|
||||
static inline et_forest_occurrence_t find_rightmost_node
|
||||
PARAMS ((et_forest_occurrence_t));
|
||||
static int calculate_value PARAMS ((et_forest_occurrence_t));
|
||||
static et_forest_occurrence_t splay (et_forest_occurrence_t);
|
||||
static void remove_all_occurrences (et_forest_t, et_forest_node_t);
|
||||
static inline et_forest_occurrence_t find_leftmost_node
|
||||
(et_forest_occurrence_t);
|
||||
static inline et_forest_occurrence_t find_rightmost_node
|
||||
(et_forest_occurrence_t);
|
||||
static int calculate_value (et_forest_occurrence_t);
|
||||
|
||||
/* Return leftmost node present in the tree roted by OCC. */
|
||||
static inline et_forest_occurrence_t
|
||||
find_leftmost_node (occ)
|
||||
et_forest_occurrence_t occ;
|
||||
find_leftmost_node (et_forest_occurrence_t occ)
|
||||
{
|
||||
while (occ->left)
|
||||
occ = occ->left;
|
||||
@ -96,8 +95,7 @@ find_leftmost_node (occ)
|
||||
|
||||
/* Return rightmost node present in the tree roted by OCC. */
|
||||
static inline et_forest_occurrence_t
|
||||
find_rightmost_node (occ)
|
||||
et_forest_occurrence_t occ;
|
||||
find_rightmost_node (et_forest_occurrence_t occ)
|
||||
{
|
||||
while (occ->right)
|
||||
occ = occ->right;
|
||||
@ -107,8 +105,7 @@ find_rightmost_node (occ)
|
||||
|
||||
/* Operation splay for splay tree structure representing occurrences. */
|
||||
static et_forest_occurrence_t
|
||||
splay (node)
|
||||
et_forest_occurrence_t node;
|
||||
splay (et_forest_occurrence_t node)
|
||||
{
|
||||
et_forest_occurrence_t parent;
|
||||
et_forest_occurrence_t grandparent;
|
||||
@ -276,7 +273,7 @@ splay (node)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/* parent == root. */
|
||||
@ -286,7 +283,7 @@ splay (node)
|
||||
{
|
||||
et_forest_occurrence_t node1;
|
||||
int count1;
|
||||
|
||||
|
||||
node1 = node->right;
|
||||
count1 = node->count_right;
|
||||
|
||||
@ -306,13 +303,13 @@ splay (node)
|
||||
else
|
||||
node->parent->right = node;
|
||||
}
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
/* node == parent->right. */
|
||||
et_forest_occurrence_t node1;
|
||||
int count1;
|
||||
|
||||
|
||||
node1 = node->left;
|
||||
count1 = node->count_left;
|
||||
|
||||
@ -339,9 +336,7 @@ splay (node)
|
||||
|
||||
/* Remove all occurrences of the given node before destroying the node. */
|
||||
static void
|
||||
remove_all_occurrences (forest, forest_node)
|
||||
et_forest_t forest;
|
||||
et_forest_node_t forest_node;
|
||||
remove_all_occurrences (et_forest_t forest, et_forest_node_t forest_node)
|
||||
{
|
||||
et_forest_occurrence_t first = forest_node->first;
|
||||
et_forest_occurrence_t last = forest_node->last;
|
||||
@ -352,7 +347,7 @@ remove_all_occurrences (forest, forest_node)
|
||||
if (first->left)
|
||||
first->left->parent = 0;
|
||||
if (first->right)
|
||||
first->right->parent = 0;
|
||||
first->right->parent = 0;
|
||||
|
||||
if (last != first)
|
||||
{
|
||||
@ -416,8 +411,7 @@ remove_all_occurrences (forest, forest_node)
|
||||
|
||||
/* Calculate ET value of the given node. */
|
||||
static inline int
|
||||
calculate_value (node)
|
||||
et_forest_occurrence_t node;
|
||||
calculate_value (et_forest_occurrence_t node)
|
||||
{
|
||||
int value = node->count_left;
|
||||
|
||||
@ -437,7 +431,7 @@ calculate_value (node)
|
||||
|
||||
/* Create ET-forest structure. */
|
||||
et_forest_t
|
||||
et_forest_create ()
|
||||
et_forest_create (void)
|
||||
{
|
||||
et_forest_t forest = xmalloc (sizeof (struct et_forest));
|
||||
|
||||
@ -450,9 +444,8 @@ et_forest_create ()
|
||||
|
||||
|
||||
/* Deallocate the structure. */
|
||||
void
|
||||
et_forest_delete (forest)
|
||||
et_forest_t forest;
|
||||
void
|
||||
et_forest_delete (et_forest_t forest)
|
||||
{
|
||||
if (forest->nnodes)
|
||||
abort ();
|
||||
@ -464,9 +457,7 @@ et_forest_delete (forest)
|
||||
/* Create new node with VALUE and return the edge.
|
||||
Return NULL when memory allocation failed. */
|
||||
et_forest_node_t
|
||||
et_forest_add_node (forest, value)
|
||||
et_forest_t forest;
|
||||
void *value;
|
||||
et_forest_add_node (et_forest_t forest, void *value)
|
||||
{
|
||||
/* Create node with one occurrence. */
|
||||
et_forest_node_t node;
|
||||
@ -489,10 +480,8 @@ et_forest_add_node (forest, value)
|
||||
/* Add new edge to the tree, return 1 if successful.
|
||||
0 indicates that creation of the edge will close the cycle in graph. */
|
||||
int
|
||||
et_forest_add_edge (forest, parent_node, child_node)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t parent_node;
|
||||
et_forest_node_t child_node;
|
||||
et_forest_add_edge (et_forest_t forest ATTRIBUTE_UNUSED,
|
||||
et_forest_node_t parent_node, et_forest_node_t child_node)
|
||||
{
|
||||
et_forest_occurrence_t new_occ, parent_occ, child_occ;
|
||||
|
||||
@ -510,7 +499,7 @@ et_forest_add_edge (forest, parent_node, child_node)
|
||||
|
||||
if (child_occ->left)
|
||||
abort (); /* child must be root of its containing tree. */
|
||||
|
||||
|
||||
new_occ = pool_alloc (forest->occur_pool);
|
||||
|
||||
new_occ->node = parent_node;
|
||||
@ -534,9 +523,7 @@ et_forest_add_edge (forest, parent_node, child_node)
|
||||
|
||||
/* Remove NODE from the tree and all connected edges. */
|
||||
void
|
||||
et_forest_remove_node (forest, node)
|
||||
et_forest_t forest;
|
||||
et_forest_node_t node;
|
||||
et_forest_remove_node (et_forest_t forest, et_forest_node_t node)
|
||||
{
|
||||
remove_all_occurrences (forest, node);
|
||||
forest->nnodes--;
|
||||
@ -547,10 +534,9 @@ et_forest_remove_node (forest, node)
|
||||
/* Remove edge from the tree, return 1 if successful,
|
||||
0 indicates nonexisting edge. */
|
||||
int
|
||||
et_forest_remove_edge (forest, parent_node, child_node)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t parent_node;
|
||||
et_forest_node_t child_node;
|
||||
et_forest_remove_edge (et_forest_t forest ATTRIBUTE_UNUSED,
|
||||
et_forest_node_t parent_node,
|
||||
et_forest_node_t child_node)
|
||||
{
|
||||
et_forest_occurrence_t parent_pre_occ, parent_post_occ;
|
||||
|
||||
@ -565,7 +551,7 @@ et_forest_remove_edge (forest, parent_node, child_node)
|
||||
|
||||
splay (parent_pre_occ);
|
||||
parent_pre_occ->right->parent = 0;
|
||||
|
||||
|
||||
parent_post_occ = parent_pre_occ->next;
|
||||
splay (parent_post_occ);
|
||||
|
||||
@ -587,9 +573,7 @@ et_forest_remove_edge (forest, parent_node, child_node)
|
||||
|
||||
/* Return the parent of the NODE if any, NULL otherwise. */
|
||||
et_forest_node_t
|
||||
et_forest_parent (forest, node)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t node;
|
||||
et_forest_parent (et_forest_t forest ATTRIBUTE_UNUSED, et_forest_node_t node)
|
||||
{
|
||||
splay (node->first);
|
||||
|
||||
@ -603,17 +587,15 @@ et_forest_parent (forest, node)
|
||||
/* Return nearest common ancestor of NODE1 and NODE2.
|
||||
Return NULL of they are in different trees. */
|
||||
et_forest_node_t
|
||||
et_forest_common_ancestor (forest, node1, node2)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t node1;
|
||||
et_forest_node_t node2;
|
||||
et_forest_common_ancestor (et_forest_t forest ATTRIBUTE_UNUSED,
|
||||
et_forest_node_t node1, et_forest_node_t node2)
|
||||
{
|
||||
int value1, value2, max_value;
|
||||
et_forest_node_t ancestor;
|
||||
|
||||
if (node1 == node2)
|
||||
return node1;
|
||||
|
||||
|
||||
if (! node1 || ! node2)
|
||||
abort ();
|
||||
|
||||
@ -636,7 +618,7 @@ et_forest_common_ancestor (forest, node1, node2)
|
||||
ancestor = node2;
|
||||
max_value = value1;
|
||||
}
|
||||
|
||||
|
||||
while (calculate_value (ancestor->last) < max_value)
|
||||
{
|
||||
/* Find parent node. */
|
||||
@ -649,9 +631,8 @@ et_forest_common_ancestor (forest, node1, node2)
|
||||
|
||||
/* Return the value pointer of node set during it's creation. */
|
||||
void *
|
||||
et_forest_node_value (forest, node)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t node;
|
||||
et_forest_node_value (et_forest_t forest ATTRIBUTE_UNUSED,
|
||||
et_forest_node_t node)
|
||||
{
|
||||
/* Alloc threading NULL as a special node of the forest. */
|
||||
if (!node)
|
||||
@ -662,10 +643,8 @@ et_forest_node_value (forest, node)
|
||||
/* Find all sons of NODE and store them into ARRAY allocated by the caller.
|
||||
Return number of nodes found. */
|
||||
int
|
||||
et_forest_enumerate_sons (forest, node, array)
|
||||
et_forest_t forest ATTRIBUTE_UNUSED;
|
||||
et_forest_node_t node;
|
||||
et_forest_node_t *array;
|
||||
et_forest_enumerate_sons (et_forest_t forest ATTRIBUTE_UNUSED,
|
||||
et_forest_node_t node, et_forest_node_t *array)
|
||||
{
|
||||
int n = 0;
|
||||
et_forest_occurrence_t occ = node->first, stop = node->last, occ1;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/* Et-forest data structure implementation.
|
||||
Copyright (C) 2002 Free Software Foundation, Inc.
|
||||
/* Et-forest data structure implementation.
|
||||
Copyright (C) 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@ -15,32 +15,32 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
|
||||
|
||||
/* This package implements ET forest data structure. Each tree in
|
||||
/* This package implements ET forest data structure. Each tree in
|
||||
the structure maintains a tree structure and offers logarithmic time
|
||||
for tree operations (insertion and removal of nodes and edges) and
|
||||
poly-logarithmic time for nearest common ancestor.
|
||||
|
||||
ET tree strores its structue as a sequence of symbols obtained
|
||||
|
||||
ET tree strores its structue as a sequence of symbols obtained
|
||||
by dfs(root)
|
||||
|
||||
dfs (node)
|
||||
dfs (node)
|
||||
{
|
||||
s = node;
|
||||
for each child c of node do
|
||||
s = concat (s, c, node);
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
For example for tree
|
||||
|
||||
|
||||
1
|
||||
/ | \
|
||||
2 3 4
|
||||
/ |
|
||||
4 5
|
||||
|
||||
|
||||
the sequence is 1 2 4 2 5 3 1 3 1 4 1.
|
||||
|
||||
|
||||
The sequence is stored in a sligtly modified splay tree.
|
||||
In order to support various types of node values, a hashtable
|
||||
is used to convert node values to the internal representation. */
|
||||
@ -58,23 +58,23 @@ extern "C" {
|
||||
typedef struct et_forest *et_forest_t;
|
||||
typedef struct et_forest_node *et_forest_node_t;
|
||||
|
||||
extern et_forest_t et_forest_create PARAMS ((void));
|
||||
extern et_forest_t et_forest_create (void);
|
||||
|
||||
extern void et_forest_delete PARAMS ((et_forest_t));
|
||||
extern void et_forest_delete (et_forest_t);
|
||||
|
||||
extern et_forest_node_t et_forest_add_node PARAMS ((et_forest_t, void *));
|
||||
extern int et_forest_add_edge PARAMS ((et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t));
|
||||
extern void et_forest_remove_node PARAMS ((et_forest_t, et_forest_node_t));
|
||||
extern int et_forest_remove_edge PARAMS ((et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t));
|
||||
extern et_forest_node_t et_forest_parent PARAMS ((et_forest_t, et_forest_node_t));
|
||||
extern et_forest_node_t et_forest_common_ancestor PARAMS ((et_forest_t,
|
||||
et_forest_node_t,
|
||||
et_forest_node_t));
|
||||
extern void * et_forest_node_value PARAMS ((et_forest_t, et_forest_node_t));
|
||||
extern int et_forest_enumerate_sons PARAMS ((et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t *));
|
||||
extern et_forest_node_t et_forest_add_node (et_forest_t, void *);
|
||||
extern int et_forest_add_edge (et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t);
|
||||
extern void et_forest_remove_node (et_forest_t, et_forest_node_t);
|
||||
extern int et_forest_remove_edge (et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t);
|
||||
extern et_forest_node_t et_forest_parent (et_forest_t, et_forest_node_t);
|
||||
extern et_forest_node_t et_forest_common_ancestor (et_forest_t,
|
||||
et_forest_node_t,
|
||||
et_forest_node_t);
|
||||
extern void * et_forest_node_value (et_forest_t, et_forest_node_t);
|
||||
extern int et_forest_enumerate_sons (et_forest_t, et_forest_node_t,
|
||||
et_forest_node_t *);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
397
gcc/except.c
397
gcc/except.c
@ -1,6 +1,6 @@
|
||||
/* Implements exception handling.
|
||||
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
Contributed by Mike Stump <mrs@cygnus.com>.
|
||||
|
||||
This file is part of GCC.
|
||||
@ -86,13 +86,13 @@ int flag_non_call_exceptions;
|
||||
|
||||
/* Protect cleanup actions with must-not-throw regions, with a call
|
||||
to the given failure handler. */
|
||||
tree (*lang_protect_cleanup_actions) PARAMS ((void));
|
||||
tree (*lang_protect_cleanup_actions) (void);
|
||||
|
||||
/* Return true if type A catches type B. */
|
||||
int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
|
||||
int (*lang_eh_type_covers) (tree a, tree b);
|
||||
|
||||
/* Map a type to a runtime object to match type. */
|
||||
tree (*lang_eh_runtime_type) PARAMS ((tree));
|
||||
tree (*lang_eh_runtime_type) (tree);
|
||||
|
||||
/* A hash table of label to region number. */
|
||||
|
||||
@ -241,7 +241,7 @@ struct eh_status GTY(())
|
||||
|
||||
htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
|
||||
|
||||
struct call_site_record * GTY ((length ("%h.call_site_data_used")))
|
||||
struct call_site_record * GTY ((length ("%h.call_site_data_used")))
|
||||
call_site_data;
|
||||
int call_site_data_used;
|
||||
int call_site_data_size;
|
||||
@ -255,61 +255,50 @@ struct eh_status GTY(())
|
||||
};
|
||||
|
||||
|
||||
static int t2r_eq PARAMS ((const void *,
|
||||
const void *));
|
||||
static hashval_t t2r_hash PARAMS ((const void *));
|
||||
static void add_type_for_runtime PARAMS ((tree));
|
||||
static tree lookup_type_for_runtime PARAMS ((tree));
|
||||
static int t2r_eq (const void *, const void *);
|
||||
static hashval_t t2r_hash (const void *);
|
||||
static void add_type_for_runtime (tree);
|
||||
static tree lookup_type_for_runtime (tree);
|
||||
|
||||
static struct eh_region *expand_eh_region_end PARAMS ((void));
|
||||
static struct eh_region *expand_eh_region_end (void);
|
||||
|
||||
static rtx get_exception_filter PARAMS ((struct function *));
|
||||
static rtx get_exception_filter (struct function *);
|
||||
|
||||
static void collect_eh_region_array PARAMS ((void));
|
||||
static void resolve_fixup_regions PARAMS ((void));
|
||||
static void remove_fixup_regions PARAMS ((void));
|
||||
static void remove_unreachable_regions PARAMS ((rtx));
|
||||
static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
|
||||
static void collect_eh_region_array (void);
|
||||
static void resolve_fixup_regions (void);
|
||||
static void remove_fixup_regions (void);
|
||||
static void remove_unreachable_regions (rtx);
|
||||
static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
|
||||
|
||||
static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
|
||||
struct inline_remap *));
|
||||
static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
|
||||
struct eh_region **));
|
||||
static int ttypes_filter_eq PARAMS ((const void *,
|
||||
const void *));
|
||||
static hashval_t ttypes_filter_hash PARAMS ((const void *));
|
||||
static int ehspec_filter_eq PARAMS ((const void *,
|
||||
const void *));
|
||||
static hashval_t ehspec_filter_hash PARAMS ((const void *));
|
||||
static int add_ttypes_entry PARAMS ((htab_t, tree));
|
||||
static int add_ehspec_entry PARAMS ((htab_t, htab_t,
|
||||
tree));
|
||||
static void assign_filter_values PARAMS ((void));
|
||||
static void build_post_landing_pads PARAMS ((void));
|
||||
static void connect_post_landing_pads PARAMS ((void));
|
||||
static void dw2_build_landing_pads PARAMS ((void));
|
||||
static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
|
||||
struct inline_remap *);
|
||||
static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
|
||||
static int ttypes_filter_eq (const void *, const void *);
|
||||
static hashval_t ttypes_filter_hash (const void *);
|
||||
static int ehspec_filter_eq (const void *, const void *);
|
||||
static hashval_t ehspec_filter_hash (const void *);
|
||||
static int add_ttypes_entry (htab_t, tree);
|
||||
static int add_ehspec_entry (htab_t, htab_t, tree);
|
||||
static void assign_filter_values (void);
|
||||
static void build_post_landing_pads (void);
|
||||
static void connect_post_landing_pads (void);
|
||||
static void dw2_build_landing_pads (void);
|
||||
|
||||
struct sjlj_lp_info;
|
||||
static bool sjlj_find_directly_reachable_regions
|
||||
PARAMS ((struct sjlj_lp_info *));
|
||||
static void sjlj_assign_call_site_values
|
||||
PARAMS ((rtx, struct sjlj_lp_info *));
|
||||
static void sjlj_mark_call_sites
|
||||
PARAMS ((struct sjlj_lp_info *));
|
||||
static void sjlj_emit_function_enter PARAMS ((rtx));
|
||||
static void sjlj_emit_function_exit PARAMS ((void));
|
||||
static void sjlj_emit_dispatch_table
|
||||
PARAMS ((rtx, struct sjlj_lp_info *));
|
||||
static void sjlj_build_landing_pads PARAMS ((void));
|
||||
static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
|
||||
static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
|
||||
static void sjlj_mark_call_sites (struct sjlj_lp_info *);
|
||||
static void sjlj_emit_function_enter (rtx);
|
||||
static void sjlj_emit_function_exit (void);
|
||||
static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
|
||||
static void sjlj_build_landing_pads (void);
|
||||
|
||||
static hashval_t ehl_hash PARAMS ((const void *));
|
||||
static int ehl_eq PARAMS ((const void *,
|
||||
const void *));
|
||||
static void add_ehl_entry PARAMS ((rtx,
|
||||
struct eh_region *));
|
||||
static void remove_exception_handler_label PARAMS ((rtx));
|
||||
static void remove_eh_handler PARAMS ((struct eh_region *));
|
||||
static int for_each_eh_label_1 PARAMS ((void **, void *));
|
||||
static hashval_t ehl_hash (const void *);
|
||||
static int ehl_eq (const void *, const void *);
|
||||
static void add_ehl_entry (rtx, struct eh_region *);
|
||||
static void remove_exception_handler_label (rtx);
|
||||
static void remove_eh_handler (struct eh_region *);
|
||||
static int for_each_eh_label_1 (void **, void *);
|
||||
|
||||
struct reachable_info;
|
||||
|
||||
@ -326,30 +315,26 @@ enum reachable_code
|
||||
RNL_BLOCKED
|
||||
};
|
||||
|
||||
static int check_handled PARAMS ((tree, tree));
|
||||
static void add_reachable_handler
|
||||
PARAMS ((struct reachable_info *, struct eh_region *,
|
||||
struct eh_region *));
|
||||
static enum reachable_code reachable_next_level
|
||||
PARAMS ((struct eh_region *, tree, struct reachable_info *));
|
||||
static int check_handled (tree, tree);
|
||||
static void add_reachable_handler (struct reachable_info *,
|
||||
struct eh_region *, struct eh_region *);
|
||||
static enum reachable_code reachable_next_level (struct eh_region *, tree,
|
||||
struct reachable_info *);
|
||||
|
||||
static int action_record_eq PARAMS ((const void *,
|
||||
const void *));
|
||||
static hashval_t action_record_hash PARAMS ((const void *));
|
||||
static int add_action_record PARAMS ((htab_t, int, int));
|
||||
static int collect_one_action_chain PARAMS ((htab_t,
|
||||
struct eh_region *));
|
||||
static int add_call_site PARAMS ((rtx, int));
|
||||
static int action_record_eq (const void *, const void *);
|
||||
static hashval_t action_record_hash (const void *);
|
||||
static int add_action_record (htab_t, int, int);
|
||||
static int collect_one_action_chain (htab_t, struct eh_region *);
|
||||
static int add_call_site (rtx, int);
|
||||
|
||||
static void push_uleb128 PARAMS ((varray_type *,
|
||||
unsigned int));
|
||||
static void push_sleb128 PARAMS ((varray_type *, int));
|
||||
static void push_uleb128 (varray_type *, unsigned int);
|
||||
static void push_sleb128 (varray_type *, int);
|
||||
#ifndef HAVE_AS_LEB128
|
||||
static int dw2_size_of_call_site_table PARAMS ((void));
|
||||
static int sjlj_size_of_call_site_table PARAMS ((void));
|
||||
static int dw2_size_of_call_site_table (void);
|
||||
static int sjlj_size_of_call_site_table (void);
|
||||
#endif
|
||||
static void dw2_output_call_site_table PARAMS ((void));
|
||||
static void sjlj_output_call_site_table PARAMS ((void));
|
||||
static void dw2_output_call_site_table (void);
|
||||
static void sjlj_output_call_site_table (void);
|
||||
|
||||
|
||||
/* Routine to see if exception handling is turned on.
|
||||
@ -360,8 +345,7 @@ static void sjlj_output_call_site_table PARAMS ((void));
|
||||
compiler tries to use any exception-specific functions. */
|
||||
|
||||
int
|
||||
doing_eh (do_warn)
|
||||
int do_warn;
|
||||
doing_eh (int do_warn)
|
||||
{
|
||||
if (! flag_exceptions)
|
||||
{
|
||||
@ -378,7 +362,7 @@ doing_eh (do_warn)
|
||||
|
||||
|
||||
void
|
||||
init_eh ()
|
||||
init_eh (void)
|
||||
{
|
||||
if (! flag_exceptions)
|
||||
return;
|
||||
@ -472,9 +456,9 @@ init_eh ()
|
||||
}
|
||||
|
||||
void
|
||||
init_eh_for_function ()
|
||||
init_eh_for_function (void)
|
||||
{
|
||||
cfun->eh = (struct eh_status *)
|
||||
cfun->eh = (struct eh_status *)
|
||||
ggc_alloc_cleared (sizeof (struct eh_status));
|
||||
}
|
||||
|
||||
@ -483,7 +467,7 @@ init_eh_for_function ()
|
||||
expand_eh_region_end is invoked. */
|
||||
|
||||
void
|
||||
expand_eh_region_start ()
|
||||
expand_eh_region_start (void)
|
||||
{
|
||||
struct eh_region *new_region;
|
||||
struct eh_region *cur_region;
|
||||
@ -517,7 +501,7 @@ expand_eh_region_start ()
|
||||
/* Common code to end a region. Returns the region just ended. */
|
||||
|
||||
static struct eh_region *
|
||||
expand_eh_region_end ()
|
||||
expand_eh_region_end (void)
|
||||
{
|
||||
struct eh_region *cur_region = cfun->eh->cur_region;
|
||||
rtx note;
|
||||
@ -536,8 +520,7 @@ expand_eh_region_end ()
|
||||
expression to expand for the cleanup. */
|
||||
|
||||
void
|
||||
expand_eh_region_end_cleanup (handler)
|
||||
tree handler;
|
||||
expand_eh_region_end_cleanup (tree handler)
|
||||
{
|
||||
struct eh_region *region;
|
||||
tree protect_cleanup_actions;
|
||||
@ -603,7 +586,7 @@ expand_eh_region_end_cleanup (handler)
|
||||
for subsequent calls to expand_start_catch. */
|
||||
|
||||
void
|
||||
expand_start_all_catch ()
|
||||
expand_start_all_catch (void)
|
||||
{
|
||||
struct eh_region *region;
|
||||
|
||||
@ -626,8 +609,7 @@ expand_start_all_catch ()
|
||||
is useful e.g. for Ada. */
|
||||
|
||||
void
|
||||
expand_start_catch (type_or_list)
|
||||
tree type_or_list;
|
||||
expand_start_catch (tree type_or_list)
|
||||
{
|
||||
struct eh_region *t, *c, *l;
|
||||
tree type_list;
|
||||
@ -674,7 +656,7 @@ expand_start_catch (type_or_list)
|
||||
/* End a catch clause. Control will resume after the try/catch block. */
|
||||
|
||||
void
|
||||
expand_end_catch ()
|
||||
expand_end_catch (void)
|
||||
{
|
||||
struct eh_region *try_region;
|
||||
|
||||
@ -690,7 +672,7 @@ expand_end_catch ()
|
||||
/* End a sequence of catch handlers for a try block. */
|
||||
|
||||
void
|
||||
expand_end_all_catch ()
|
||||
expand_end_all_catch (void)
|
||||
{
|
||||
struct eh_region *try_region;
|
||||
|
||||
@ -712,8 +694,7 @@ expand_end_all_catch ()
|
||||
rethrowing satisfies the "filter" of the catch type. */
|
||||
|
||||
void
|
||||
expand_eh_region_end_allowed (allowed, failure)
|
||||
tree allowed, failure;
|
||||
expand_eh_region_end_allowed (tree allowed, tree failure)
|
||||
{
|
||||
struct eh_region *region;
|
||||
rtx around_label;
|
||||
@ -755,8 +736,7 @@ expand_eh_region_end_allowed (allowed, failure)
|
||||
the C++ LSDA. */
|
||||
|
||||
void
|
||||
expand_eh_region_end_must_not_throw (failure)
|
||||
tree failure;
|
||||
expand_eh_region_end_must_not_throw (tree failure)
|
||||
{
|
||||
struct eh_region *region;
|
||||
rtx around_label;
|
||||
@ -786,8 +766,7 @@ expand_eh_region_end_must_not_throw (failure)
|
||||
is being thrown. */
|
||||
|
||||
void
|
||||
expand_eh_region_end_throw (type)
|
||||
tree type;
|
||||
expand_eh_region_end_throw (tree type)
|
||||
{
|
||||
struct eh_region *region;
|
||||
|
||||
@ -810,8 +789,7 @@ expand_eh_region_end_throw (type)
|
||||
the proper notion of "enclosing" in convert_from_eh_region_ranges. */
|
||||
|
||||
void
|
||||
expand_eh_region_end_fixup (handler)
|
||||
tree handler;
|
||||
expand_eh_region_end_fixup (tree handler)
|
||||
{
|
||||
struct eh_region *fixup;
|
||||
|
||||
@ -827,7 +805,7 @@ expand_eh_region_end_fixup (handler)
|
||||
call to a function which itself may contain a throw. */
|
||||
|
||||
void
|
||||
note_eh_region_may_contain_throw ()
|
||||
note_eh_region_may_contain_throw (void)
|
||||
{
|
||||
struct eh_region *region;
|
||||
|
||||
@ -843,8 +821,7 @@ note_eh_region_may_contain_throw ()
|
||||
within a handler. */
|
||||
|
||||
rtx
|
||||
get_exception_pointer (fun)
|
||||
struct function *fun;
|
||||
get_exception_pointer (struct function *fun)
|
||||
{
|
||||
rtx exc_ptr = fun->eh->exc_ptr;
|
||||
if (fun == cfun && ! exc_ptr)
|
||||
@ -859,8 +836,7 @@ get_exception_pointer (fun)
|
||||
within a handler. */
|
||||
|
||||
static rtx
|
||||
get_exception_filter (fun)
|
||||
struct function *fun;
|
||||
get_exception_filter (struct function *fun)
|
||||
{
|
||||
rtx filter = fun->eh->filter;
|
||||
if (fun == cfun && ! filter)
|
||||
@ -878,7 +854,7 @@ get_exception_filter (fun)
|
||||
without having to realloc memory. */
|
||||
|
||||
static void
|
||||
collect_eh_region_array ()
|
||||
collect_eh_region_array (void)
|
||||
{
|
||||
struct eh_region **array, *i;
|
||||
|
||||
@ -914,7 +890,7 @@ collect_eh_region_array ()
|
||||
}
|
||||
|
||||
static void
|
||||
resolve_fixup_regions ()
|
||||
resolve_fixup_regions (void)
|
||||
{
|
||||
int i, j, n = cfun->eh->last_region_number;
|
||||
|
||||
@ -944,7 +920,7 @@ resolve_fixup_regions ()
|
||||
we can shuffle pointers and remove them from the tree. */
|
||||
|
||||
static void
|
||||
remove_fixup_regions ()
|
||||
remove_fixup_regions (void)
|
||||
{
|
||||
int i;
|
||||
rtx insn, note;
|
||||
@ -1014,8 +990,7 @@ remove_fixup_regions ()
|
||||
/* Remove all regions whose labels are not reachable from insns. */
|
||||
|
||||
static void
|
||||
remove_unreachable_regions (insns)
|
||||
rtx insns;
|
||||
remove_unreachable_regions (rtx insns)
|
||||
{
|
||||
int i, *uid_region_num;
|
||||
bool *reachable;
|
||||
@ -1078,10 +1053,7 @@ remove_unreachable_regions (insns)
|
||||
can_throw instruction in the region. */
|
||||
|
||||
static void
|
||||
convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
|
||||
rtx *pinsns;
|
||||
int *orig_sp;
|
||||
int cur;
|
||||
convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
|
||||
{
|
||||
int *sp = orig_sp;
|
||||
rtx insn, next;
|
||||
@ -1162,7 +1134,7 @@ convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
|
||||
}
|
||||
|
||||
void
|
||||
convert_from_eh_region_ranges ()
|
||||
convert_from_eh_region_ranges (void)
|
||||
{
|
||||
int *stack;
|
||||
rtx insns;
|
||||
@ -1180,9 +1152,7 @@ convert_from_eh_region_ranges ()
|
||||
}
|
||||
|
||||
static void
|
||||
add_ehl_entry (label, region)
|
||||
rtx label;
|
||||
struct eh_region *region;
|
||||
add_ehl_entry (rtx label, struct eh_region *region)
|
||||
{
|
||||
struct ehl_map_entry **slot, *entry;
|
||||
|
||||
@ -1206,7 +1176,7 @@ add_ehl_entry (label, region)
|
||||
}
|
||||
|
||||
void
|
||||
find_exception_handler_labels ()
|
||||
find_exception_handler_labels (void)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1247,7 +1217,7 @@ find_exception_handler_labels ()
|
||||
}
|
||||
|
||||
bool
|
||||
current_function_has_exception_handlers ()
|
||||
current_function_has_exception_handlers (void)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1265,9 +1235,7 @@ current_function_has_exception_handlers ()
|
||||
}
|
||||
|
||||
static struct eh_region *
|
||||
duplicate_eh_region_1 (o, map)
|
||||
struct eh_region *o;
|
||||
struct inline_remap *map;
|
||||
duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
|
||||
{
|
||||
struct eh_region *n
|
||||
= (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
|
||||
@ -1316,9 +1284,7 @@ duplicate_eh_region_1 (o, map)
|
||||
}
|
||||
|
||||
static void
|
||||
duplicate_eh_region_2 (o, n_array)
|
||||
struct eh_region *o;
|
||||
struct eh_region **n_array;
|
||||
duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
|
||||
{
|
||||
struct eh_region *n = n_array[o->region_number];
|
||||
|
||||
@ -1349,9 +1315,7 @@ duplicate_eh_region_2 (o, n_array)
|
||||
}
|
||||
|
||||
int
|
||||
duplicate_eh_regions (ifun, map)
|
||||
struct function *ifun;
|
||||
struct inline_remap *map;
|
||||
duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
|
||||
{
|
||||
int ifun_last_region_number = ifun->eh->last_region_number;
|
||||
struct eh_region **n_array, *root, *cur;
|
||||
@ -1417,9 +1381,7 @@ duplicate_eh_regions (ifun, map)
|
||||
|
||||
|
||||
static int
|
||||
t2r_eq (pentry, pdata)
|
||||
const void *pentry;
|
||||
const void *pdata;
|
||||
t2r_eq (const void *pentry, const void *pdata)
|
||||
{
|
||||
tree entry = (tree) pentry;
|
||||
tree data = (tree) pdata;
|
||||
@ -1428,16 +1390,14 @@ t2r_eq (pentry, pdata)
|
||||
}
|
||||
|
||||
static hashval_t
|
||||
t2r_hash (pentry)
|
||||
const void *pentry;
|
||||
t2r_hash (const void *pentry)
|
||||
{
|
||||
tree entry = (tree) pentry;
|
||||
return TYPE_HASH (TREE_PURPOSE (entry));
|
||||
}
|
||||
|
||||
static void
|
||||
add_type_for_runtime (type)
|
||||
tree type;
|
||||
add_type_for_runtime (tree type)
|
||||
{
|
||||
tree *slot;
|
||||
|
||||
@ -1451,8 +1411,7 @@ add_type_for_runtime (type)
|
||||
}
|
||||
|
||||
static tree
|
||||
lookup_type_for_runtime (type)
|
||||
tree type;
|
||||
lookup_type_for_runtime (tree type)
|
||||
{
|
||||
tree *slot;
|
||||
|
||||
@ -1476,9 +1435,7 @@ struct ttypes_filter GTY(())
|
||||
(a tree) for a @TTypes type node we are thinking about adding. */
|
||||
|
||||
static int
|
||||
ttypes_filter_eq (pentry, pdata)
|
||||
const void *pentry;
|
||||
const void *pdata;
|
||||
ttypes_filter_eq (const void *pentry, const void *pdata)
|
||||
{
|
||||
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
||||
tree data = (tree) pdata;
|
||||
@ -1487,8 +1444,7 @@ ttypes_filter_eq (pentry, pdata)
|
||||
}
|
||||
|
||||
static hashval_t
|
||||
ttypes_filter_hash (pentry)
|
||||
const void *pentry;
|
||||
ttypes_filter_hash (const void *pentry)
|
||||
{
|
||||
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
||||
return TYPE_HASH (entry->t);
|
||||
@ -1500,9 +1456,7 @@ ttypes_filter_hash (pentry)
|
||||
should put these in some canonical order. */
|
||||
|
||||
static int
|
||||
ehspec_filter_eq (pentry, pdata)
|
||||
const void *pentry;
|
||||
const void *pdata;
|
||||
ehspec_filter_eq (const void *pentry, const void *pdata)
|
||||
{
|
||||
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
||||
const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
|
||||
@ -1513,8 +1467,7 @@ ehspec_filter_eq (pentry, pdata)
|
||||
/* Hash function for exception specification lists. */
|
||||
|
||||
static hashval_t
|
||||
ehspec_filter_hash (pentry)
|
||||
const void *pentry;
|
||||
ehspec_filter_hash (const void *pentry)
|
||||
{
|
||||
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
||||
hashval_t h = 0;
|
||||
@ -1529,9 +1482,7 @@ ehspec_filter_hash (pentry)
|
||||
up the search. Return the filter value to be used. */
|
||||
|
||||
static int
|
||||
add_ttypes_entry (ttypes_hash, type)
|
||||
htab_t ttypes_hash;
|
||||
tree type;
|
||||
add_ttypes_entry (htab_t ttypes_hash, tree type)
|
||||
{
|
||||
struct ttypes_filter **slot, *n;
|
||||
|
||||
@ -1557,10 +1508,7 @@ add_ttypes_entry (ttypes_hash, type)
|
||||
to speed up the search. Return the filter value to be used. */
|
||||
|
||||
static int
|
||||
add_ehspec_entry (ehspec_hash, ttypes_hash, list)
|
||||
htab_t ehspec_hash;
|
||||
htab_t ttypes_hash;
|
||||
tree list;
|
||||
add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
|
||||
{
|
||||
struct ttypes_filter **slot, *n;
|
||||
struct ttypes_filter dummy;
|
||||
@ -1595,7 +1543,7 @@ add_ehspec_entry (ehspec_hash, ttypes_hash, list)
|
||||
the same filter value, which saves table space. */
|
||||
|
||||
static void
|
||||
assign_filter_values ()
|
||||
assign_filter_values (void)
|
||||
{
|
||||
int i;
|
||||
htab_t ttypes, ehspec;
|
||||
@ -1667,7 +1615,7 @@ assign_filter_values ()
|
||||
landing pads. */
|
||||
|
||||
static void
|
||||
build_post_landing_pads ()
|
||||
build_post_landing_pads (void)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1783,7 +1731,7 @@ build_post_landing_pads ()
|
||||
_Unwind_Resume otherwise. */
|
||||
|
||||
static void
|
||||
connect_post_landing_pads ()
|
||||
connect_post_landing_pads (void)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1824,7 +1772,7 @@ connect_post_landing_pads ()
|
||||
|
||||
|
||||
static void
|
||||
dw2_build_landing_pads ()
|
||||
dw2_build_landing_pads (void)
|
||||
{
|
||||
int i;
|
||||
unsigned int j;
|
||||
@ -1907,8 +1855,7 @@ struct sjlj_lp_info
|
||||
};
|
||||
|
||||
static bool
|
||||
sjlj_find_directly_reachable_regions (lp_info)
|
||||
struct sjlj_lp_info *lp_info;
|
||||
sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
|
||||
{
|
||||
rtx insn;
|
||||
bool found_one = false;
|
||||
@ -1956,9 +1903,7 @@ sjlj_find_directly_reachable_regions (lp_info)
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_assign_call_site_values (dispatch_label, lp_info)
|
||||
rtx dispatch_label;
|
||||
struct sjlj_lp_info *lp_info;
|
||||
sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
|
||||
{
|
||||
htab_t ar_hash;
|
||||
int i, index;
|
||||
@ -2020,8 +1965,7 @@ sjlj_assign_call_site_values (dispatch_label, lp_info)
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_mark_call_sites (lp_info)
|
||||
struct sjlj_lp_info *lp_info;
|
||||
sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
|
||||
{
|
||||
int last_call_site = -2;
|
||||
rtx insn, mem;
|
||||
@ -2085,8 +2029,7 @@ sjlj_mark_call_sites (lp_info)
|
||||
/* Construct the SjLj_Function_Context. */
|
||||
|
||||
static void
|
||||
sjlj_emit_function_enter (dispatch_label)
|
||||
rtx dispatch_label;
|
||||
sjlj_emit_function_enter (rtx dispatch_label)
|
||||
{
|
||||
rtx fn_begin, fc, mem, seq;
|
||||
|
||||
@ -2151,14 +2094,13 @@ sjlj_emit_function_enter (dispatch_label)
|
||||
the call to unwind_sjlj_unregister_libfunc if needed. */
|
||||
|
||||
void
|
||||
sjlj_emit_function_exit_after (after)
|
||||
rtx after;
|
||||
sjlj_emit_function_exit_after (rtx after)
|
||||
{
|
||||
cfun->eh->sjlj_exit_after = after;
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_emit_function_exit ()
|
||||
sjlj_emit_function_exit (void)
|
||||
{
|
||||
rtx seq;
|
||||
|
||||
@ -2178,9 +2120,7 @@ sjlj_emit_function_exit ()
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_emit_dispatch_table (dispatch_label, lp_info)
|
||||
rtx dispatch_label;
|
||||
struct sjlj_lp_info *lp_info;
|
||||
sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
|
||||
{
|
||||
int i, first_reachable;
|
||||
rtx mem, dispatch, seq, fc;
|
||||
@ -2243,7 +2183,7 @@ sjlj_emit_dispatch_table (dispatch_label, lp_info)
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_build_landing_pads ()
|
||||
sjlj_build_landing_pads (void)
|
||||
{
|
||||
struct sjlj_lp_info *lp_info;
|
||||
|
||||
@ -2271,7 +2211,7 @@ sjlj_build_landing_pads ()
|
||||
}
|
||||
|
||||
void
|
||||
finish_eh_generation ()
|
||||
finish_eh_generation (void)
|
||||
{
|
||||
/* Nothing to do if no regions created. */
|
||||
if (cfun->eh->region_tree == NULL)
|
||||
@ -2313,8 +2253,7 @@ finish_eh_generation ()
|
||||
}
|
||||
|
||||
static hashval_t
|
||||
ehl_hash (pentry)
|
||||
const void *pentry;
|
||||
ehl_hash (const void *pentry)
|
||||
{
|
||||
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
|
||||
|
||||
@ -2324,9 +2263,7 @@ ehl_hash (pentry)
|
||||
}
|
||||
|
||||
static int
|
||||
ehl_eq (pentry, pdata)
|
||||
const void *pentry;
|
||||
const void *pdata;
|
||||
ehl_eq (const void *pentry, const void *pdata)
|
||||
{
|
||||
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
|
||||
struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
|
||||
@ -2339,8 +2276,7 @@ ehl_eq (pentry, pdata)
|
||||
/* Remove LABEL from exception_handler_label_map. */
|
||||
|
||||
static void
|
||||
remove_exception_handler_label (label)
|
||||
rtx label;
|
||||
remove_exception_handler_label (rtx label)
|
||||
{
|
||||
struct ehl_map_entry **slot, tmp;
|
||||
|
||||
@ -2361,8 +2297,7 @@ remove_exception_handler_label (label)
|
||||
/* Splice REGION from the region tree etc. */
|
||||
|
||||
static void
|
||||
remove_eh_handler (region)
|
||||
struct eh_region *region;
|
||||
remove_eh_handler (struct eh_region *region)
|
||||
{
|
||||
struct eh_region **pp, **pp_start, *p, *outer, *inner;
|
||||
rtx lab;
|
||||
@ -2451,8 +2386,7 @@ remove_eh_handler (region)
|
||||
delete the region. */
|
||||
|
||||
void
|
||||
maybe_remove_eh_handler (label)
|
||||
rtx label;
|
||||
maybe_remove_eh_handler (rtx label)
|
||||
{
|
||||
struct ehl_map_entry **slot, tmp;
|
||||
struct eh_region *region;
|
||||
@ -2490,20 +2424,17 @@ maybe_remove_eh_handler (label)
|
||||
loop hackery; should not be used by new code. */
|
||||
|
||||
void
|
||||
for_each_eh_label (callback)
|
||||
void (*callback) PARAMS ((rtx));
|
||||
for_each_eh_label (void (*callback) (rtx))
|
||||
{
|
||||
htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
|
||||
(void *)callback);
|
||||
}
|
||||
|
||||
static int
|
||||
for_each_eh_label_1 (pentry, data)
|
||||
void **pentry;
|
||||
void *data;
|
||||
for_each_eh_label_1 (void **pentry, void *data)
|
||||
{
|
||||
struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
|
||||
void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
|
||||
void (*callback) (rtx) = (void (*) (rtx)) data;
|
||||
|
||||
(*callback) (entry->label);
|
||||
return 1;
|
||||
@ -2523,8 +2454,7 @@ struct reachable_info GTY(())
|
||||
base class of TYPE, is in HANDLED. */
|
||||
|
||||
static int
|
||||
check_handled (handled, type)
|
||||
tree handled, type;
|
||||
check_handled (tree handled, tree type)
|
||||
{
|
||||
tree t;
|
||||
|
||||
@ -2554,10 +2484,7 @@ check_handled (handled, type)
|
||||
LP_REGION contains the landing pad; REGION is the handler. */
|
||||
|
||||
static void
|
||||
add_reachable_handler (info, lp_region, region)
|
||||
struct reachable_info *info;
|
||||
struct eh_region *lp_region;
|
||||
struct eh_region *region;
|
||||
add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
|
||||
{
|
||||
if (! info)
|
||||
return;
|
||||
@ -2577,10 +2504,8 @@ add_reachable_handler (info, lp_region, region)
|
||||
and caught/allowed type information between invocations. */
|
||||
|
||||
static enum reachable_code
|
||||
reachable_next_level (region, type_thrown, info)
|
||||
struct eh_region *region;
|
||||
tree type_thrown;
|
||||
struct reachable_info *info;
|
||||
reachable_next_level (struct eh_region *region, tree type_thrown,
|
||||
struct reachable_info *info)
|
||||
{
|
||||
switch (region->type)
|
||||
{
|
||||
@ -2741,8 +2666,7 @@ reachable_next_level (region, type_thrown, info)
|
||||
reached by a given insn. */
|
||||
|
||||
rtx
|
||||
reachable_handlers (insn)
|
||||
rtx insn;
|
||||
reachable_handlers (rtx insn)
|
||||
{
|
||||
struct reachable_info info;
|
||||
struct eh_region *region;
|
||||
@ -2793,7 +2717,7 @@ reachable_handlers (insn)
|
||||
else
|
||||
region = region->outer;
|
||||
}
|
||||
|
||||
|
||||
return info.handlers;
|
||||
}
|
||||
|
||||
@ -2801,8 +2725,7 @@ reachable_handlers (insn)
|
||||
within the function. */
|
||||
|
||||
bool
|
||||
can_throw_internal (insn)
|
||||
rtx insn;
|
||||
can_throw_internal (rtx insn)
|
||||
{
|
||||
struct eh_region *region;
|
||||
tree type_thrown;
|
||||
@ -2862,8 +2785,7 @@ can_throw_internal (insn)
|
||||
visible outside the function. */
|
||||
|
||||
bool
|
||||
can_throw_external (insn)
|
||||
rtx insn;
|
||||
can_throw_external (rtx insn)
|
||||
{
|
||||
struct eh_region *region;
|
||||
tree type_thrown;
|
||||
@ -2926,10 +2848,10 @@ can_throw_external (insn)
|
||||
/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
|
||||
|
||||
void
|
||||
set_nothrow_function_flags ()
|
||||
set_nothrow_function_flags (void)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
|
||||
current_function_nothrow = 1;
|
||||
|
||||
/* Assume cfun->all_throwers_are_sibcalls until we encounter
|
||||
@ -2942,7 +2864,7 @@ set_nothrow_function_flags ()
|
||||
|
||||
if (! flag_exceptions)
|
||||
return;
|
||||
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
if (can_throw_external (insn))
|
||||
{
|
||||
@ -2976,7 +2898,7 @@ set_nothrow_function_flags ()
|
||||
On the SPARC, this means flushing the register windows. */
|
||||
|
||||
void
|
||||
expand_builtin_unwind_init ()
|
||||
expand_builtin_unwind_init (void)
|
||||
{
|
||||
/* Set this so all the registers get saved in our frame; we need to be
|
||||
able to copy the saved values for any registers from frames we unwind. */
|
||||
@ -2988,8 +2910,7 @@ expand_builtin_unwind_init ()
|
||||
}
|
||||
|
||||
rtx
|
||||
expand_builtin_eh_return_data_regno (arglist)
|
||||
tree arglist;
|
||||
expand_builtin_eh_return_data_regno (tree arglist)
|
||||
{
|
||||
tree which = TREE_VALUE (arglist);
|
||||
unsigned HOST_WIDE_INT iwhich;
|
||||
@ -3018,8 +2939,7 @@ expand_builtin_eh_return_data_regno (arglist)
|
||||
return the actual address encoded in that value. */
|
||||
|
||||
rtx
|
||||
expand_builtin_extract_return_addr (addr_tree)
|
||||
tree addr_tree;
|
||||
expand_builtin_extract_return_addr (tree addr_tree)
|
||||
{
|
||||
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
|
||||
|
||||
@ -3051,8 +2971,7 @@ expand_builtin_extract_return_addr (addr_tree)
|
||||
stack slot so the epilogue will return to that address. */
|
||||
|
||||
rtx
|
||||
expand_builtin_frob_return_addr (addr_tree)
|
||||
tree addr_tree;
|
||||
expand_builtin_frob_return_addr (tree addr_tree)
|
||||
{
|
||||
rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
|
||||
|
||||
@ -3073,9 +2992,8 @@ expand_builtin_frob_return_addr (addr_tree)
|
||||
exception handler. */
|
||||
|
||||
void
|
||||
expand_builtin_eh_return (stackadj_tree, handler_tree)
|
||||
tree stackadj_tree ATTRIBUTE_UNUSED;
|
||||
tree handler_tree;
|
||||
expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
|
||||
tree handler_tree)
|
||||
{
|
||||
rtx tmp;
|
||||
|
||||
@ -3107,7 +3025,7 @@ expand_builtin_eh_return (stackadj_tree, handler_tree)
|
||||
}
|
||||
|
||||
void
|
||||
expand_eh_return ()
|
||||
expand_eh_return (void)
|
||||
{
|
||||
rtx around_label;
|
||||
|
||||
@ -3165,9 +3083,7 @@ struct action_record
|
||||
};
|
||||
|
||||
static int
|
||||
action_record_eq (pentry, pdata)
|
||||
const void *pentry;
|
||||
const void *pdata;
|
||||
action_record_eq (const void *pentry, const void *pdata)
|
||||
{
|
||||
const struct action_record *entry = (const struct action_record *) pentry;
|
||||
const struct action_record *data = (const struct action_record *) pdata;
|
||||
@ -3175,17 +3091,14 @@ action_record_eq (pentry, pdata)
|
||||
}
|
||||
|
||||
static hashval_t
|
||||
action_record_hash (pentry)
|
||||
const void *pentry;
|
||||
action_record_hash (const void *pentry)
|
||||
{
|
||||
const struct action_record *entry = (const struct action_record *) pentry;
|
||||
return entry->next * 1009 + entry->filter;
|
||||
}
|
||||
|
||||
static int
|
||||
add_action_record (ar_hash, filter, next)
|
||||
htab_t ar_hash;
|
||||
int filter, next;
|
||||
add_action_record (htab_t ar_hash, int filter, int next)
|
||||
{
|
||||
struct action_record **slot, *new, tmp;
|
||||
|
||||
@ -3216,9 +3129,7 @@ add_action_record (ar_hash, filter, next)
|
||||
}
|
||||
|
||||
static int
|
||||
collect_one_action_chain (ar_hash, region)
|
||||
htab_t ar_hash;
|
||||
struct eh_region *region;
|
||||
collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
|
||||
{
|
||||
struct eh_region *c;
|
||||
int next;
|
||||
@ -3319,9 +3230,7 @@ collect_one_action_chain (ar_hash, region)
|
||||
}
|
||||
|
||||
static int
|
||||
add_call_site (landing_pad, action)
|
||||
rtx landing_pad;
|
||||
int action;
|
||||
add_call_site (rtx landing_pad, int action)
|
||||
{
|
||||
struct call_site_record *data = cfun->eh->call_site_data;
|
||||
int used = cfun->eh->call_site_data_used;
|
||||
@ -3349,7 +3258,7 @@ add_call_site (landing_pad, action)
|
||||
instead to call site entries. */
|
||||
|
||||
void
|
||||
convert_to_eh_region_ranges ()
|
||||
convert_to_eh_region_ranges (void)
|
||||
{
|
||||
rtx insn, iter, note;
|
||||
htab_t ar_hash;
|
||||
@ -3473,9 +3382,7 @@ convert_to_eh_region_ranges ()
|
||||
|
||||
|
||||
static void
|
||||
push_uleb128 (data_area, value)
|
||||
varray_type *data_area;
|
||||
unsigned int value;
|
||||
push_uleb128 (varray_type *data_area, unsigned int value)
|
||||
{
|
||||
do
|
||||
{
|
||||
@ -3489,9 +3396,7 @@ push_uleb128 (data_area, value)
|
||||
}
|
||||
|
||||
static void
|
||||
push_sleb128 (data_area, value)
|
||||
varray_type *data_area;
|
||||
int value;
|
||||
push_sleb128 (varray_type *data_area, int value)
|
||||
{
|
||||
unsigned char byte;
|
||||
int more;
|
||||
@ -3512,7 +3417,7 @@ push_sleb128 (data_area, value)
|
||||
|
||||
#ifndef HAVE_AS_LEB128
|
||||
static int
|
||||
dw2_size_of_call_site_table ()
|
||||
dw2_size_of_call_site_table (void)
|
||||
{
|
||||
int n = cfun->eh->call_site_data_used;
|
||||
int size = n * (4 + 4 + 4);
|
||||
@ -3528,7 +3433,7 @@ dw2_size_of_call_site_table ()
|
||||
}
|
||||
|
||||
static int
|
||||
sjlj_size_of_call_site_table ()
|
||||
sjlj_size_of_call_site_table (void)
|
||||
{
|
||||
int n = cfun->eh->call_site_data_used;
|
||||
int size = 0;
|
||||
@ -3546,7 +3451,7 @@ sjlj_size_of_call_site_table ()
|
||||
#endif
|
||||
|
||||
static void
|
||||
dw2_output_call_site_table ()
|
||||
dw2_output_call_site_table (void)
|
||||
{
|
||||
const char *const function_start_lab
|
||||
= IDENTIFIER_POINTER (current_function_func_begin_label);
|
||||
@ -3598,7 +3503,7 @@ dw2_output_call_site_table ()
|
||||
}
|
||||
|
||||
static void
|
||||
sjlj_output_call_site_table ()
|
||||
sjlj_output_call_site_table (void)
|
||||
{
|
||||
int n = cfun->eh->call_site_data_used;
|
||||
int i;
|
||||
@ -3619,7 +3524,7 @@ sjlj_output_call_site_table ()
|
||||
table. */
|
||||
|
||||
void
|
||||
default_exception_section ()
|
||||
default_exception_section (void)
|
||||
{
|
||||
if (targetm.have_named_sections)
|
||||
{
|
||||
@ -3643,7 +3548,7 @@ default_exception_section ()
|
||||
}
|
||||
|
||||
void
|
||||
output_function_exception_table ()
|
||||
output_function_exception_table (void)
|
||||
{
|
||||
int tt_format, cs_format, lp_format, i, n;
|
||||
#ifdef HAVE_AS_LEB128
|
||||
|
83
gcc/except.h
83
gcc/except.h
@ -1,5 +1,5 @@
|
||||
/* Exception Handling interface routines.
|
||||
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002
|
||||
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
|
||||
Free Software Foundation, Inc.
|
||||
Contributed by Mike Stump <mrs@cygnus.com>.
|
||||
|
||||
@ -33,95 +33,94 @@ struct eh_status;
|
||||
struct eh_region;
|
||||
|
||||
/* Test: is exception handling turned on? */
|
||||
extern int doing_eh PARAMS ((int));
|
||||
extern int doing_eh (int);
|
||||
|
||||
/* Start an exception handling region. All instructions emitted after
|
||||
this point are considered to be part of the region until an
|
||||
expand_eh_region_end variant is invoked. */
|
||||
extern void expand_eh_region_start PARAMS ((void));
|
||||
extern void expand_eh_region_start (void);
|
||||
|
||||
/* End an exception handling region for a cleanup. HANDLER is an
|
||||
expression to expand for the cleanup. */
|
||||
extern void expand_eh_region_end_cleanup PARAMS ((tree));
|
||||
extern void expand_eh_region_end_cleanup (tree);
|
||||
|
||||
/* End an exception handling region for a try block, and prepares
|
||||
for subsequent calls to expand_start_catch. */
|
||||
extern void expand_start_all_catch PARAMS ((void));
|
||||
extern void expand_start_all_catch (void);
|
||||
|
||||
/* Begin a catch clause. TYPE is an object to be matched by the
|
||||
runtime, or a list of such objects, or null if this is a catch-all
|
||||
clause. */
|
||||
extern void expand_start_catch PARAMS ((tree));
|
||||
extern void expand_start_catch (tree);
|
||||
|
||||
/* End a catch clause. Control will resume after the try/catch block. */
|
||||
extern void expand_end_catch PARAMS ((void));
|
||||
extern void expand_end_catch (void);
|
||||
|
||||
/* End a sequence of catch handlers for a try block. */
|
||||
extern void expand_end_all_catch PARAMS ((void));
|
||||
extern void expand_end_all_catch (void);
|
||||
|
||||
/* End an exception region for an exception type filter. ALLOWED is a
|
||||
TREE_LIST of TREE_VALUE objects to be matched by the runtime.
|
||||
FAILURE is a function to invoke if a mismatch occurs. */
|
||||
extern void expand_eh_region_end_allowed PARAMS ((tree, tree));
|
||||
extern void expand_eh_region_end_allowed (tree, tree);
|
||||
|
||||
/* End an exception region for a must-not-throw filter. FAILURE is a
|
||||
function to invoke if an uncaught exception propagates this far. */
|
||||
extern void expand_eh_region_end_must_not_throw PARAMS ((tree));
|
||||
extern void expand_eh_region_end_must_not_throw (tree);
|
||||
|
||||
/* End an exception region for a throw. No handling goes on here,
|
||||
but it's the easiest way for the front-end to indicate what type
|
||||
is being thrown. */
|
||||
extern void expand_eh_region_end_throw PARAMS ((tree));
|
||||
extern void expand_eh_region_end_throw (tree);
|
||||
|
||||
/* End a fixup region. Within this region the cleanups for the immediately
|
||||
enclosing region are _not_ run. This is used for goto cleanup to avoid
|
||||
destroying an object twice. */
|
||||
extern void expand_eh_region_end_fixup PARAMS ((tree));
|
||||
extern void expand_eh_region_end_fixup (tree);
|
||||
|
||||
/* Note that the current EH region (if any) may contain a throw, or a
|
||||
call to a function which itself may contain a throw. */
|
||||
extern void note_eh_region_may_contain_throw PARAMS ((void));
|
||||
extern void note_eh_region_may_contain_throw (void);
|
||||
|
||||
/* Invokes CALLBACK for every exception handler label. Only used by old
|
||||
loop hackery; should not be used by new code. */
|
||||
extern void for_each_eh_label PARAMS ((void (*) (rtx)));
|
||||
extern void for_each_eh_label (void (*) (rtx));
|
||||
|
||||
/* Determine if the given INSN can throw an exception. */
|
||||
extern bool can_throw_internal PARAMS ((rtx));
|
||||
extern bool can_throw_external PARAMS ((rtx));
|
||||
extern bool can_throw_internal (rtx);
|
||||
extern bool can_throw_external (rtx);
|
||||
|
||||
/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
|
||||
extern void set_nothrow_function_flags PARAMS ((void));
|
||||
extern void set_nothrow_function_flags (void);
|
||||
|
||||
/* After initial rtl generation, call back to finish generating
|
||||
exception support code. */
|
||||
extern void finish_eh_generation PARAMS ((void));
|
||||
extern void finish_eh_generation (void);
|
||||
|
||||
extern void init_eh PARAMS ((void));
|
||||
extern void init_eh_for_function PARAMS ((void));
|
||||
extern void init_eh (void);
|
||||
extern void init_eh_for_function (void);
|
||||
|
||||
extern rtx reachable_handlers PARAMS ((rtx));
|
||||
extern void maybe_remove_eh_handler PARAMS ((rtx));
|
||||
extern rtx reachable_handlers (rtx);
|
||||
extern void maybe_remove_eh_handler (rtx);
|
||||
|
||||
extern void convert_from_eh_region_ranges PARAMS ((void));
|
||||
extern void convert_to_eh_region_ranges PARAMS ((void));
|
||||
extern void find_exception_handler_labels PARAMS ((void));
|
||||
extern bool current_function_has_exception_handlers PARAMS ((void));
|
||||
extern void output_function_exception_table PARAMS ((void));
|
||||
extern void convert_from_eh_region_ranges (void);
|
||||
extern void convert_to_eh_region_ranges (void);
|
||||
extern void find_exception_handler_labels (void);
|
||||
extern bool current_function_has_exception_handlers (void);
|
||||
extern void output_function_exception_table (void);
|
||||
|
||||
extern void expand_builtin_unwind_init PARAMS ((void));
|
||||
extern rtx expand_builtin_eh_return_data_regno PARAMS ((tree));
|
||||
extern rtx expand_builtin_extract_return_addr PARAMS ((tree));
|
||||
extern void expand_builtin_init_dwarf_reg_sizes PARAMS ((tree));
|
||||
extern rtx expand_builtin_frob_return_addr PARAMS ((tree));
|
||||
extern rtx expand_builtin_dwarf_sp_column PARAMS ((void));
|
||||
extern void expand_builtin_eh_return PARAMS ((tree, tree));
|
||||
extern void expand_eh_return PARAMS ((void));
|
||||
extern rtx get_exception_pointer PARAMS ((struct function *));
|
||||
extern int duplicate_eh_regions PARAMS ((struct function *,
|
||||
struct inline_remap *));
|
||||
extern void expand_builtin_unwind_init (void);
|
||||
extern rtx expand_builtin_eh_return_data_regno (tree);
|
||||
extern rtx expand_builtin_extract_return_addr (tree);
|
||||
extern void expand_builtin_init_dwarf_reg_sizes (tree);
|
||||
extern rtx expand_builtin_frob_return_addr (tree);
|
||||
extern rtx expand_builtin_dwarf_sp_column (void);
|
||||
extern void expand_builtin_eh_return (tree, tree);
|
||||
extern void expand_eh_return (void);
|
||||
extern rtx get_exception_pointer (struct function *);
|
||||
extern int duplicate_eh_regions (struct function *, struct inline_remap *);
|
||||
|
||||
extern void sjlj_emit_function_exit_after PARAMS ((rtx));
|
||||
extern void sjlj_emit_function_exit_after (rtx);
|
||||
|
||||
|
||||
/* If non-NULL, this is a function that returns an expression to be
|
||||
@ -130,13 +129,13 @@ extern void sjlj_emit_function_exit_after PARAMS ((rtx));
|
||||
during stack unwinding is required to result in a call to
|
||||
`std::terminate', so the C++ version of this function returns a
|
||||
CALL_EXPR for `std::terminate'. */
|
||||
extern tree (*lang_protect_cleanup_actions) PARAMS ((void));
|
||||
extern tree (*lang_protect_cleanup_actions) (void);
|
||||
|
||||
/* Return true if type A catches type B. */
|
||||
extern int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
|
||||
extern int (*lang_eh_type_covers) (tree a, tree b);
|
||||
|
||||
/* Map a type to a runtime object to match type. */
|
||||
extern tree (*lang_eh_runtime_type) PARAMS ((tree));
|
||||
extern tree (*lang_eh_runtime_type) (tree);
|
||||
|
||||
|
||||
/* Just because the user configured --with-sjlj-exceptions=no doesn't
|
||||
|
135
gcc/explow.c
135
gcc/explow.c
@ -1,6 +1,6 @@
|
||||
/* Subroutines for manipulating rtx's in semantically interesting ways.
|
||||
Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -38,16 +38,14 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "recog.h"
|
||||
#include "langhooks.h"
|
||||
|
||||
static rtx break_out_memory_refs PARAMS ((rtx));
|
||||
static void emit_stack_probe PARAMS ((rtx));
|
||||
static rtx break_out_memory_refs (rtx);
|
||||
static void emit_stack_probe (rtx);
|
||||
|
||||
|
||||
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
trunc_int_for_mode (c, mode)
|
||||
HOST_WIDE_INT c;
|
||||
enum machine_mode mode;
|
||||
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
|
||||
{
|
||||
int width = GET_MODE_BITSIZE (mode);
|
||||
|
||||
@ -78,9 +76,7 @@ trunc_int_for_mode (c, mode)
|
||||
This function should be used via the `plus_constant' macro. */
|
||||
|
||||
rtx
|
||||
plus_constant_wide (x, c)
|
||||
rtx x;
|
||||
HOST_WIDE_INT c;
|
||||
plus_constant_wide (rtx x, HOST_WIDE_INT c)
|
||||
{
|
||||
RTX_CODE code;
|
||||
rtx y;
|
||||
@ -206,9 +202,7 @@ plus_constant_wide (x, c)
|
||||
it is not isomorphic to X. */
|
||||
|
||||
rtx
|
||||
eliminate_constant_term (x, constptr)
|
||||
rtx x;
|
||||
rtx *constptr;
|
||||
eliminate_constant_term (rtx x, rtx *constptr)
|
||||
{
|
||||
rtx x0, x1;
|
||||
rtx tem;
|
||||
@ -244,8 +238,7 @@ eliminate_constant_term (x, constptr)
|
||||
/* Return an rtx for the size in bytes of the value of EXP. */
|
||||
|
||||
rtx
|
||||
expr_size (exp)
|
||||
tree exp;
|
||||
expr_size (tree exp)
|
||||
{
|
||||
tree size = (*lang_hooks.expr_size) (exp);
|
||||
|
||||
@ -259,8 +252,7 @@ expr_size (exp)
|
||||
if the size can vary or is larger than an integer. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
int_expr_size (exp)
|
||||
tree exp;
|
||||
int_expr_size (tree exp)
|
||||
{
|
||||
tree t = (*lang_hooks.expr_size) (exp);
|
||||
|
||||
@ -292,8 +284,7 @@ int_expr_size (exp)
|
||||
Values returned by expand_expr with 1 for sum_ok fit this constraint. */
|
||||
|
||||
static rtx
|
||||
break_out_memory_refs (x)
|
||||
rtx x;
|
||||
break_out_memory_refs (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) == MEM
|
||||
|| (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
|
||||
@ -321,9 +312,7 @@ break_out_memory_refs (x)
|
||||
used. */
|
||||
|
||||
rtx
|
||||
convert_memory_address (to_mode, x)
|
||||
enum machine_mode to_mode;
|
||||
rtx x;
|
||||
convert_memory_address (enum machine_mode to_mode, rtx x)
|
||||
{
|
||||
enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
|
||||
rtx temp;
|
||||
@ -409,8 +398,7 @@ convert_memory_address (to_mode, x)
|
||||
but then you wouldn't get indexed addressing in the reference. */
|
||||
|
||||
rtx
|
||||
copy_all_regs (x)
|
||||
rtx x;
|
||||
copy_all_regs (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) == REG)
|
||||
{
|
||||
@ -439,9 +427,7 @@ copy_all_regs (x)
|
||||
works by copying X or subexpressions of it into registers. */
|
||||
|
||||
rtx
|
||||
memory_address (mode, x)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
memory_address (enum machine_mode mode, rtx x)
|
||||
{
|
||||
rtx oldx = x;
|
||||
|
||||
@ -569,9 +555,7 @@ memory_address (mode, x)
|
||||
/* Like `memory_address' but pretend `flag_force_addr' is 0. */
|
||||
|
||||
rtx
|
||||
memory_address_noforce (mode, x)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
memory_address_noforce (enum machine_mode mode, rtx x)
|
||||
{
|
||||
int ambient_force_addr = flag_force_addr;
|
||||
rtx val;
|
||||
@ -586,8 +570,7 @@ memory_address_noforce (mode, x)
|
||||
Pass through anything else unchanged. */
|
||||
|
||||
rtx
|
||||
validize_mem (ref)
|
||||
rtx ref;
|
||||
validize_mem (rtx ref)
|
||||
{
|
||||
if (GET_CODE (ref) != MEM)
|
||||
return ref;
|
||||
@ -604,9 +587,7 @@ validize_mem (ref)
|
||||
appropriate. */
|
||||
|
||||
void
|
||||
maybe_set_unchanging (ref, t)
|
||||
rtx ref;
|
||||
tree t;
|
||||
maybe_set_unchanging (rtx ref, tree t)
|
||||
{
|
||||
/* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
|
||||
initialization is only executed once, or whose initializer always
|
||||
@ -635,8 +616,7 @@ maybe_set_unchanging (ref, t)
|
||||
Perhaps even if it is a MEM, if there is no need to change it. */
|
||||
|
||||
rtx
|
||||
stabilize (x)
|
||||
rtx x;
|
||||
stabilize (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) != MEM
|
||||
|| ! rtx_unstable_p (XEXP (x, 0)))
|
||||
@ -649,8 +629,7 @@ stabilize (x)
|
||||
/* Copy the value or contents of X to a new temp reg and return that reg. */
|
||||
|
||||
rtx
|
||||
copy_to_reg (x)
|
||||
rtx x;
|
||||
copy_to_reg (rtx x)
|
||||
{
|
||||
rtx temp = gen_reg_rtx (GET_MODE (x));
|
||||
|
||||
@ -669,8 +648,7 @@ copy_to_reg (x)
|
||||
in case X is a constant. */
|
||||
|
||||
rtx
|
||||
copy_addr_to_reg (x)
|
||||
rtx x;
|
||||
copy_addr_to_reg (rtx x)
|
||||
{
|
||||
return copy_to_mode_reg (Pmode, x);
|
||||
}
|
||||
@ -679,9 +657,7 @@ copy_addr_to_reg (x)
|
||||
in case X is a constant. */
|
||||
|
||||
rtx
|
||||
copy_to_mode_reg (mode, x)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
copy_to_mode_reg (enum machine_mode mode, rtx x)
|
||||
{
|
||||
rtx temp = gen_reg_rtx (mode);
|
||||
|
||||
@ -706,9 +682,7 @@ copy_to_mode_reg (mode, x)
|
||||
since we mark it as a "constant" register. */
|
||||
|
||||
rtx
|
||||
force_reg (mode, x)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
force_reg (enum machine_mode mode, rtx x)
|
||||
{
|
||||
rtx temp, insn, set;
|
||||
|
||||
@ -749,8 +723,7 @@ force_reg (mode, x)
|
||||
that reg. Otherwise, return X. */
|
||||
|
||||
rtx
|
||||
force_not_mem (x)
|
||||
rtx x;
|
||||
force_not_mem (rtx x)
|
||||
{
|
||||
rtx temp;
|
||||
|
||||
@ -767,9 +740,7 @@ force_not_mem (x)
|
||||
MODE is the mode to use for X in case it is a constant. */
|
||||
|
||||
rtx
|
||||
copy_to_suggested_reg (x, target, mode)
|
||||
rtx x, target;
|
||||
enum machine_mode mode;
|
||||
copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
|
||||
{
|
||||
rtx temp;
|
||||
|
||||
@ -789,11 +760,8 @@ copy_to_suggested_reg (x, target, mode)
|
||||
FOR_CALL is nonzero if this call is promoting args for a call. */
|
||||
|
||||
enum machine_mode
|
||||
promote_mode (type, mode, punsignedp, for_call)
|
||||
tree type;
|
||||
enum machine_mode mode;
|
||||
int *punsignedp;
|
||||
int for_call ATTRIBUTE_UNUSED;
|
||||
promote_mode (tree type, enum machine_mode mode, int *punsignedp,
|
||||
int for_call ATTRIBUTE_UNUSED)
|
||||
{
|
||||
enum tree_code code = TREE_CODE (type);
|
||||
int unsignedp = *punsignedp;
|
||||
@ -832,8 +800,7 @@ promote_mode (type, mode, punsignedp, for_call)
|
||||
This pops when ADJUST is positive. ADJUST need not be constant. */
|
||||
|
||||
void
|
||||
adjust_stack (adjust)
|
||||
rtx adjust;
|
||||
adjust_stack (rtx adjust)
|
||||
{
|
||||
rtx temp;
|
||||
adjust = protect_from_queue (adjust, 0);
|
||||
@ -863,8 +830,7 @@ adjust_stack (adjust)
|
||||
This pushes when ADJUST is positive. ADJUST need not be constant. */
|
||||
|
||||
void
|
||||
anti_adjust_stack (adjust)
|
||||
rtx adjust;
|
||||
anti_adjust_stack (rtx adjust)
|
||||
{
|
||||
rtx temp;
|
||||
adjust = protect_from_queue (adjust, 0);
|
||||
@ -894,8 +860,7 @@ anti_adjust_stack (adjust)
|
||||
by this machine. SIZE is the desired size, which need not be constant. */
|
||||
|
||||
rtx
|
||||
round_push (size)
|
||||
rtx size;
|
||||
round_push (rtx size)
|
||||
{
|
||||
int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
|
||||
|
||||
@ -933,14 +898,11 @@ round_push (size)
|
||||
are emitted at the current position. */
|
||||
|
||||
void
|
||||
emit_stack_save (save_level, psave, after)
|
||||
enum save_level save_level;
|
||||
rtx *psave;
|
||||
rtx after;
|
||||
emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
|
||||
{
|
||||
rtx sa = *psave;
|
||||
/* The default is that we use a move insn and save in a Pmode object. */
|
||||
rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
|
||||
rtx (*fcn) (rtx, rtx) = gen_move_insn;
|
||||
enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
|
||||
|
||||
/* See if this machine has anything special to do for this kind of save. */
|
||||
@ -1016,13 +978,10 @@ emit_stack_save (save_level, psave, after)
|
||||
current position. */
|
||||
|
||||
void
|
||||
emit_stack_restore (save_level, sa, after)
|
||||
enum save_level save_level;
|
||||
rtx after;
|
||||
rtx sa;
|
||||
emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
|
||||
{
|
||||
/* The default is that we use a move insn. */
|
||||
rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
|
||||
rtx (*fcn) (rtx, rtx) = gen_move_insn;
|
||||
|
||||
/* See if this machine has anything special to do for this kind of save. */
|
||||
switch (save_level)
|
||||
@ -1056,7 +1015,7 @@ emit_stack_restore (save_level, sa, after)
|
||||
references to variable arrays below the code
|
||||
that deletes (pops) the arrays. */
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode,
|
||||
gen_rtx_MEM (BLKmode,
|
||||
gen_rtx_MEM (BLKmode,
|
||||
gen_rtx_SCRATCH (VOIDmode))));
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode,
|
||||
gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
|
||||
@ -1083,8 +1042,7 @@ emit_stack_restore (save_level, sa, after)
|
||||
frame, thus causing a crash if a longjmp unwinds to it. */
|
||||
|
||||
void
|
||||
optimize_save_area_alloca (insns)
|
||||
rtx insns;
|
||||
optimize_save_area_alloca (rtx insns)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -1168,10 +1126,7 @@ optimize_save_area_alloca (insns)
|
||||
KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
|
||||
|
||||
rtx
|
||||
allocate_dynamic_stack_space (size, target, known_align)
|
||||
rtx size;
|
||||
rtx target;
|
||||
int known_align;
|
||||
allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
|
||||
{
|
||||
#ifdef SETJMP_VIA_SAVE_AREA
|
||||
rtx setjmpless_size = NULL_RTX;
|
||||
@ -1416,8 +1371,7 @@ allocate_dynamic_stack_space (size, target, known_align)
|
||||
static GTY(()) rtx stack_check_libfunc;
|
||||
|
||||
void
|
||||
set_stack_check_libfunc (libfunc)
|
||||
rtx libfunc;
|
||||
set_stack_check_libfunc (rtx libfunc)
|
||||
{
|
||||
stack_check_libfunc = libfunc;
|
||||
}
|
||||
@ -1425,8 +1379,7 @@ set_stack_check_libfunc (libfunc)
|
||||
/* Emit one stack probe at ADDRESS, an address within the stack. */
|
||||
|
||||
static void
|
||||
emit_stack_probe (address)
|
||||
rtx address;
|
||||
emit_stack_probe (rtx address)
|
||||
{
|
||||
rtx memref = gen_rtx_MEM (word_mode, address);
|
||||
|
||||
@ -1451,9 +1404,7 @@ emit_stack_probe (address)
|
||||
#endif
|
||||
|
||||
void
|
||||
probe_stack_range (first, size)
|
||||
HOST_WIDE_INT first;
|
||||
rtx size;
|
||||
probe_stack_range (HOST_WIDE_INT first, rtx size)
|
||||
{
|
||||
/* First ensure SIZE is Pmode. */
|
||||
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
||||
@ -1585,10 +1536,8 @@ probe_stack_range (first, size)
|
||||
and 0 otherwise. */
|
||||
|
||||
rtx
|
||||
hard_function_value (valtype, func, outgoing)
|
||||
tree valtype;
|
||||
tree func ATTRIBUTE_UNUSED;
|
||||
int outgoing ATTRIBUTE_UNUSED;
|
||||
hard_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
|
||||
int outgoing ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx val;
|
||||
|
||||
@ -1631,8 +1580,7 @@ hard_function_value (valtype, func, outgoing)
|
||||
in which a scalar value of mode MODE was returned by a library call. */
|
||||
|
||||
rtx
|
||||
hard_libcall_value (mode)
|
||||
enum machine_mode mode;
|
||||
hard_libcall_value (enum machine_mode mode)
|
||||
{
|
||||
return LIBCALL_VALUE (mode);
|
||||
}
|
||||
@ -1643,8 +1591,7 @@ hard_libcall_value (mode)
|
||||
what `enum tree_code' means. */
|
||||
|
||||
int
|
||||
rtx_to_tree_code (code)
|
||||
enum rtx_code code;
|
||||
rtx_to_tree_code (enum rtx_code code)
|
||||
{
|
||||
enum tree_code tcode;
|
||||
|
||||
|
239
gcc/expmed.c
239
gcc/expmed.c
@ -37,24 +37,20 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "recog.h"
|
||||
#include "langhooks.h"
|
||||
|
||||
static void store_fixed_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, rtx));
|
||||
static void store_split_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, rtx));
|
||||
static rtx extract_fixed_bit_field PARAMS ((enum machine_mode, rtx,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
rtx, int));
|
||||
static rtx mask_rtx PARAMS ((enum machine_mode, int,
|
||||
int, int));
|
||||
static rtx lshift_value PARAMS ((enum machine_mode, rtx,
|
||||
int, int));
|
||||
static rtx extract_split_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, int));
|
||||
static void do_cmp_and_jump PARAMS ((rtx, rtx, enum rtx_code,
|
||||
enum machine_mode, rtx));
|
||||
static void store_fixed_bit_field (rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, rtx);
|
||||
static void store_split_bit_field (rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, rtx);
|
||||
static rtx extract_fixed_bit_field (enum machine_mode, rtx,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, rtx, int);
|
||||
static rtx mask_rtx (enum machine_mode, int, int, int);
|
||||
static rtx lshift_value (enum machine_mode, rtx, int, int);
|
||||
static rtx extract_split_bit_field (rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, int);
|
||||
static void do_cmp_and_jump (rtx, rtx, enum rtx_code, enum machine_mode, rtx);
|
||||
|
||||
/* Nonzero means divides or modulus operations are relatively cheap for
|
||||
powers of two, so don't use branches; emit the operation instead.
|
||||
@ -104,7 +100,7 @@ static int mul_widen_cost[NUM_MACHINE_MODES];
|
||||
static int mul_highpart_cost[NUM_MACHINE_MODES];
|
||||
|
||||
void
|
||||
init_expmed ()
|
||||
init_expmed (void)
|
||||
{
|
||||
rtx reg, shift_insn, shiftadd_insn, shiftsub_insn;
|
||||
int dummy;
|
||||
@ -207,9 +203,7 @@ init_expmed ()
|
||||
useful if X is a CONST_INT. */
|
||||
|
||||
rtx
|
||||
negate_rtx (mode, x)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
negate_rtx (enum machine_mode mode, rtx x)
|
||||
{
|
||||
rtx result = simplify_unary_operation (NEG, mode, x, mode);
|
||||
|
||||
@ -224,9 +218,7 @@ negate_rtx (mode, x)
|
||||
is false; else the mode of the specified operand. If OPNO is -1,
|
||||
all the caller cares about is whether the insn is available. */
|
||||
enum machine_mode
|
||||
mode_for_extraction (pattern, opno)
|
||||
enum extraction_pattern pattern;
|
||||
int opno;
|
||||
mode_for_extraction (enum extraction_pattern pattern, int opno)
|
||||
{
|
||||
const struct insn_data *data;
|
||||
|
||||
@ -287,13 +279,9 @@ mode_for_extraction (pattern, opno)
|
||||
else, we use the mode of operand 3. */
|
||||
|
||||
rtx
|
||||
store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, total_size)
|
||||
rtx str_rtx;
|
||||
unsigned HOST_WIDE_INT bitsize;
|
||||
unsigned HOST_WIDE_INT bitnum;
|
||||
enum machine_mode fieldmode;
|
||||
rtx value;
|
||||
HOST_WIDE_INT total_size;
|
||||
store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitnum, enum machine_mode fieldmode,
|
||||
rtx value, HOST_WIDE_INT total_size)
|
||||
{
|
||||
unsigned int unit
|
||||
= (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
@ -702,10 +690,9 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, total_size)
|
||||
Note that protect_from_queue has already been done on OP0 and VALUE. */
|
||||
|
||||
static void
|
||||
store_fixed_bit_field (op0, offset, bitsize, bitpos, value)
|
||||
rtx op0;
|
||||
unsigned HOST_WIDE_INT offset, bitsize, bitpos;
|
||||
rtx value;
|
||||
store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset,
|
||||
unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitpos, rtx value)
|
||||
{
|
||||
enum machine_mode mode;
|
||||
unsigned int total_bits = BITS_PER_WORD;
|
||||
@ -868,10 +855,8 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value)
|
||||
This does not yet handle fields wider than BITS_PER_WORD. */
|
||||
|
||||
static void
|
||||
store_split_bit_field (op0, bitsize, bitpos, value)
|
||||
rtx op0;
|
||||
unsigned HOST_WIDE_INT bitsize, bitpos;
|
||||
rtx value;
|
||||
store_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitpos, rtx value)
|
||||
{
|
||||
unsigned int unit;
|
||||
unsigned int bitsdone = 0;
|
||||
@ -1004,15 +989,10 @@ store_split_bit_field (op0, bitsize, bitpos, value)
|
||||
if they are equally easy. */
|
||||
|
||||
rtx
|
||||
extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
|
||||
target, mode, tmode, total_size)
|
||||
rtx str_rtx;
|
||||
unsigned HOST_WIDE_INT bitsize;
|
||||
unsigned HOST_WIDE_INT bitnum;
|
||||
int unsignedp;
|
||||
rtx target;
|
||||
enum machine_mode mode, tmode;
|
||||
HOST_WIDE_INT total_size;
|
||||
extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitnum, int unsignedp, rtx target,
|
||||
enum machine_mode mode, enum machine_mode tmode,
|
||||
HOST_WIDE_INT total_size)
|
||||
{
|
||||
unsigned int unit
|
||||
= (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
@ -1560,12 +1540,11 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
|
||||
If TARGET is not used, create a pseudo-reg of mode TMODE for the value. */
|
||||
|
||||
static rtx
|
||||
extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
|
||||
target, unsignedp)
|
||||
enum machine_mode tmode;
|
||||
rtx op0, target;
|
||||
unsigned HOST_WIDE_INT offset, bitsize, bitpos;
|
||||
int unsignedp;
|
||||
extract_fixed_bit_field (enum machine_mode tmode, rtx op0,
|
||||
unsigned HOST_WIDE_INT offset,
|
||||
unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitpos, rtx target,
|
||||
int unsignedp)
|
||||
{
|
||||
unsigned int total_bits = BITS_PER_WORD;
|
||||
enum machine_mode mode;
|
||||
@ -1692,9 +1671,7 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
|
||||
BITSIZE+BITPOS is too small for MODE. */
|
||||
|
||||
static rtx
|
||||
mask_rtx (mode, bitpos, bitsize, complement)
|
||||
enum machine_mode mode;
|
||||
int bitpos, bitsize, complement;
|
||||
mask_rtx (enum machine_mode mode, int bitpos, int bitsize, int complement)
|
||||
{
|
||||
HOST_WIDE_INT masklow, maskhigh;
|
||||
|
||||
@ -1735,10 +1712,7 @@ mask_rtx (mode, bitpos, bitsize, complement)
|
||||
VALUE truncated to BITSIZE bits and then shifted left BITPOS bits. */
|
||||
|
||||
static rtx
|
||||
lshift_value (mode, value, bitpos, bitsize)
|
||||
enum machine_mode mode;
|
||||
rtx value;
|
||||
int bitpos, bitsize;
|
||||
lshift_value (enum machine_mode mode, rtx value, int bitpos, int bitsize)
|
||||
{
|
||||
unsigned HOST_WIDE_INT v = INTVAL (value);
|
||||
HOST_WIDE_INT low, high;
|
||||
@ -1768,10 +1742,8 @@ lshift_value (mode, value, bitpos, bitsize)
|
||||
UNSIGNEDP is 1 if should zero-extend the contents; else sign-extend. */
|
||||
|
||||
static rtx
|
||||
extract_split_bit_field (op0, bitsize, bitpos, unsignedp)
|
||||
rtx op0;
|
||||
unsigned HOST_WIDE_INT bitsize, bitpos;
|
||||
int unsignedp;
|
||||
extract_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
||||
unsigned HOST_WIDE_INT bitpos, int unsignedp)
|
||||
{
|
||||
unsigned int unit;
|
||||
unsigned int bitsdone = 0;
|
||||
@ -1870,8 +1842,7 @@ extract_split_bit_field (op0, bitsize, bitpos, unsignedp)
|
||||
/* Add INC into TARGET. */
|
||||
|
||||
void
|
||||
expand_inc (target, inc)
|
||||
rtx target, inc;
|
||||
expand_inc (rtx target, rtx inc)
|
||||
{
|
||||
rtx value = expand_binop (GET_MODE (target), add_optab,
|
||||
target, inc,
|
||||
@ -1883,8 +1854,7 @@ expand_inc (target, inc)
|
||||
/* Subtract DEC from TARGET. */
|
||||
|
||||
void
|
||||
expand_dec (target, dec)
|
||||
rtx target, dec;
|
||||
expand_dec (rtx target, rtx dec)
|
||||
{
|
||||
rtx value = expand_binop (GET_MODE (target), sub_optab,
|
||||
target, dec,
|
||||
@ -1901,13 +1871,8 @@ expand_dec (target, dec)
|
||||
Return the rtx for where the value is. */
|
||||
|
||||
rtx
|
||||
expand_shift (code, mode, shifted, amount, target, unsignedp)
|
||||
enum tree_code code;
|
||||
enum machine_mode mode;
|
||||
rtx shifted;
|
||||
tree amount;
|
||||
rtx target;
|
||||
int unsignedp;
|
||||
expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
|
||||
tree amount, rtx target, int unsignedp)
|
||||
{
|
||||
rtx op1, temp = 0;
|
||||
int left = (code == LSHIFT_EXPR || code == LROTATE_EXPR);
|
||||
@ -2079,25 +2044,19 @@ struct algorithm
|
||||
char log[MAX_BITS_PER_WORD];
|
||||
};
|
||||
|
||||
static void synth_mult PARAMS ((struct algorithm *,
|
||||
unsigned HOST_WIDE_INT,
|
||||
int));
|
||||
static unsigned HOST_WIDE_INT choose_multiplier PARAMS ((unsigned HOST_WIDE_INT,
|
||||
int, int,
|
||||
unsigned HOST_WIDE_INT *,
|
||||
int *, int *));
|
||||
static unsigned HOST_WIDE_INT invert_mod2n PARAMS ((unsigned HOST_WIDE_INT,
|
||||
int));
|
||||
static void synth_mult (struct algorithm *, unsigned HOST_WIDE_INT, int);
|
||||
static unsigned HOST_WIDE_INT choose_multiplier (unsigned HOST_WIDE_INT, int,
|
||||
int, unsigned HOST_WIDE_INT *,
|
||||
int *, int *);
|
||||
static unsigned HOST_WIDE_INT invert_mod2n (unsigned HOST_WIDE_INT, int);
|
||||
/* Compute and return the best algorithm for multiplying by T.
|
||||
The algorithm must cost less than cost_limit
|
||||
If retval.cost >= COST_LIMIT, no algorithm was found and all
|
||||
other field of the returned struct are undefined. */
|
||||
|
||||
static void
|
||||
synth_mult (alg_out, t, cost_limit)
|
||||
struct algorithm *alg_out;
|
||||
unsigned HOST_WIDE_INT t;
|
||||
int cost_limit;
|
||||
synth_mult (struct algorithm *alg_out, unsigned HOST_WIDE_INT t,
|
||||
int cost_limit)
|
||||
{
|
||||
int m;
|
||||
struct algorithm *alg_in, *best_alg;
|
||||
@ -2341,10 +2300,7 @@ synth_mult (alg_out, t, cost_limit)
|
||||
you should swap the two operands if OP0 would be constant. */
|
||||
|
||||
rtx
|
||||
expand_mult (mode, op0, op1, target, unsignedp)
|
||||
enum machine_mode mode;
|
||||
rtx op0, op1, target;
|
||||
int unsignedp;
|
||||
expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target, int unsignedp)
|
||||
{
|
||||
rtx const_op1 = op1;
|
||||
|
||||
@ -2535,9 +2491,9 @@ expand_mult (mode, op0, op1, target, unsignedp)
|
||||
|
||||
insn = get_last_insn ();
|
||||
set_unique_reg_note (insn,
|
||||
REG_EQUAL,
|
||||
REG_EQUAL,
|
||||
gen_rtx_MULT (nmode, tem,
|
||||
GEN_INT (val_so_far)));
|
||||
GEN_INT (val_so_far)));
|
||||
}
|
||||
|
||||
if (variant == negate_variant)
|
||||
@ -2573,8 +2529,7 @@ expand_mult (mode, op0, op1, target, unsignedp)
|
||||
/* Return the smallest n such that 2**n >= X. */
|
||||
|
||||
int
|
||||
ceil_log2 (x)
|
||||
unsigned HOST_WIDE_INT x;
|
||||
ceil_log2 (unsigned HOST_WIDE_INT x)
|
||||
{
|
||||
return floor_log2 (x - 1) + 1;
|
||||
}
|
||||
@ -2597,13 +2552,9 @@ ceil_log2 (x)
|
||||
|
||||
static
|
||||
unsigned HOST_WIDE_INT
|
||||
choose_multiplier (d, n, precision, multiplier_ptr, post_shift_ptr, lgup_ptr)
|
||||
unsigned HOST_WIDE_INT d;
|
||||
int n;
|
||||
int precision;
|
||||
unsigned HOST_WIDE_INT *multiplier_ptr;
|
||||
int *post_shift_ptr;
|
||||
int *lgup_ptr;
|
||||
choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision,
|
||||
unsigned HOST_WIDE_INT *multiplier_ptr,
|
||||
int *post_shift_ptr, int *lgup_ptr)
|
||||
{
|
||||
HOST_WIDE_INT mhigh_hi, mlow_hi;
|
||||
unsigned HOST_WIDE_INT mhigh_lo, mlow_lo;
|
||||
@ -2694,9 +2645,7 @@ choose_multiplier (d, n, precision, multiplier_ptr, post_shift_ptr, lgup_ptr)
|
||||
congruent to 1 (mod 2**N). */
|
||||
|
||||
static unsigned HOST_WIDE_INT
|
||||
invert_mod2n (x, n)
|
||||
unsigned HOST_WIDE_INT x;
|
||||
int n;
|
||||
invert_mod2n (unsigned HOST_WIDE_INT x, int n)
|
||||
{
|
||||
/* Solve x*y == 1 (mod 2^n), where x is odd. Return y. */
|
||||
|
||||
@ -2731,10 +2680,8 @@ invert_mod2n (x, n)
|
||||
MODE is the mode of operation. */
|
||||
|
||||
rtx
|
||||
expand_mult_highpart_adjust (mode, adj_operand, op0, op1, target, unsignedp)
|
||||
enum machine_mode mode;
|
||||
rtx adj_operand, op0, op1, target;
|
||||
int unsignedp;
|
||||
expand_mult_highpart_adjust (enum machine_mode mode, rtx adj_operand, rtx op0,
|
||||
rtx op1, rtx target, int unsignedp)
|
||||
{
|
||||
rtx tem;
|
||||
enum rtx_code adj_code = unsignedp ? PLUS : MINUS;
|
||||
@ -2768,12 +2715,9 @@ expand_mult_highpart_adjust (mode, adj_operand, op0, op1, target, unsignedp)
|
||||
MAX_COST is the total allowed cost for the expanded RTL. */
|
||||
|
||||
rtx
|
||||
expand_mult_highpart (mode, op0, cnst1, target, unsignedp, max_cost)
|
||||
enum machine_mode mode;
|
||||
rtx op0, target;
|
||||
unsigned HOST_WIDE_INT cnst1;
|
||||
int unsignedp;
|
||||
int max_cost;
|
||||
expand_mult_highpart (enum machine_mode mode, rtx op0,
|
||||
unsigned HOST_WIDE_INT cnst1, rtx target,
|
||||
int unsignedp, int max_cost)
|
||||
{
|
||||
enum machine_mode wider_mode = GET_MODE_WIDER_MODE (mode);
|
||||
optab mul_highpart_optab;
|
||||
@ -2945,12 +2889,8 @@ expand_mult_highpart (mode, op0, cnst1, target, unsignedp, max_cost)
|
||||
#define EXACT_POWER_OF_2_OR_ZERO_P(x) (((x) & ((x) - 1)) == 0)
|
||||
|
||||
rtx
|
||||
expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
int rem_flag;
|
||||
enum tree_code code;
|
||||
enum machine_mode mode;
|
||||
rtx op0, op1, target;
|
||||
int unsignedp;
|
||||
expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
|
||||
rtx op0, rtx op1, rtx target, int unsignedp)
|
||||
{
|
||||
enum machine_mode compute_mode;
|
||||
rtx tquotient;
|
||||
@ -3266,7 +3206,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
&& (set = single_set (insn)) != 0
|
||||
&& SET_DEST (set) == quotient)
|
||||
set_unique_reg_note (insn,
|
||||
REG_EQUAL,
|
||||
REG_EQUAL,
|
||||
gen_rtx_UDIV (compute_mode, op0, op1));
|
||||
}
|
||||
else /* TRUNC_DIV, signed */
|
||||
@ -3354,7 +3294,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
&& abs_d < ((unsigned HOST_WIDE_INT) 1
|
||||
<< (HOST_BITS_PER_WIDE_INT - 1)))
|
||||
set_unique_reg_note (insn,
|
||||
REG_EQUAL,
|
||||
REG_EQUAL,
|
||||
gen_rtx_DIV (compute_mode,
|
||||
op0,
|
||||
GEN_INT
|
||||
@ -3445,7 +3385,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
&& (set = single_set (insn)) != 0
|
||||
&& SET_DEST (set) == quotient)
|
||||
set_unique_reg_note (insn,
|
||||
REG_EQUAL,
|
||||
REG_EQUAL,
|
||||
gen_rtx_DIV (compute_mode, op0, op1));
|
||||
}
|
||||
break;
|
||||
@ -3865,7 +3805,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
|
||||
insn = get_last_insn ();
|
||||
set_unique_reg_note (insn,
|
||||
REG_EQUAL,
|
||||
REG_EQUAL,
|
||||
gen_rtx_fmt_ee (unsignedp ? UDIV : DIV,
|
||||
compute_mode,
|
||||
op0, op1));
|
||||
@ -4044,9 +3984,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
|
||||
generated by loop.c. */
|
||||
|
||||
tree
|
||||
make_tree (type, x)
|
||||
tree type;
|
||||
rtx x;
|
||||
make_tree (tree type, rtx x)
|
||||
{
|
||||
tree t;
|
||||
|
||||
@ -4177,10 +4115,7 @@ make_tree (type, x)
|
||||
UNSIGNEDP is nonzero to do unsigned multiplication. */
|
||||
|
||||
bool
|
||||
const_mult_add_overflow_p (x, mult, add, mode, unsignedp)
|
||||
rtx x, mult, add;
|
||||
enum machine_mode mode;
|
||||
int unsignedp;
|
||||
const_mult_add_overflow_p (rtx x, rtx mult, rtx add, enum machine_mode mode, int unsignedp)
|
||||
{
|
||||
tree type, mult_type, add_type, result;
|
||||
|
||||
@ -4216,10 +4151,8 @@ const_mult_add_overflow_p (x, mult, add, mode, unsignedp)
|
||||
This may emit insns. */
|
||||
|
||||
rtx
|
||||
expand_mult_add (x, target, mult, add, mode, unsignedp)
|
||||
rtx x, target, mult, add;
|
||||
enum machine_mode mode;
|
||||
int unsignedp;
|
||||
expand_mult_add (rtx x, rtx target, rtx mult, rtx add, enum machine_mode mode,
|
||||
int unsignedp)
|
||||
{
|
||||
tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
|
||||
tree add_type = (GET_MODE (add) == VOIDmode
|
||||
@ -4240,9 +4173,7 @@ expand_mult_add (x, target, mult, add, mode, unsignedp)
|
||||
If TARGET is 0, a pseudo-register or constant is returned. */
|
||||
|
||||
rtx
|
||||
expand_and (mode, op0, op1, target)
|
||||
enum machine_mode mode;
|
||||
rtx op0, op1, target;
|
||||
expand_and (enum machine_mode mode, rtx op0, rtx op1, rtx target)
|
||||
{
|
||||
rtx tem = 0;
|
||||
|
||||
@ -4274,13 +4205,8 @@ expand_and (mode, op0, op1, target)
|
||||
"raw" out of the scc insn. */
|
||||
|
||||
rtx
|
||||
emit_store_flag (target, code, op0, op1, mode, unsignedp, normalizep)
|
||||
rtx target;
|
||||
enum rtx_code code;
|
||||
rtx op0, op1;
|
||||
enum machine_mode mode;
|
||||
int unsignedp;
|
||||
int normalizep;
|
||||
emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
|
||||
enum machine_mode mode, int unsignedp, int normalizep)
|
||||
{
|
||||
rtx subtarget;
|
||||
enum insn_code icode;
|
||||
@ -4714,13 +4640,8 @@ emit_store_flag (target, code, op0, op1, mode, unsignedp, normalizep)
|
||||
/* Like emit_store_flag, but always succeeds. */
|
||||
|
||||
rtx
|
||||
emit_store_flag_force (target, code, op0, op1, mode, unsignedp, normalizep)
|
||||
rtx target;
|
||||
enum rtx_code code;
|
||||
rtx op0, op1;
|
||||
enum machine_mode mode;
|
||||
int unsignedp;
|
||||
int normalizep;
|
||||
emit_store_flag_force (rtx target, enum rtx_code code, rtx op0, rtx op1,
|
||||
enum machine_mode mode, int unsignedp, int normalizep)
|
||||
{
|
||||
rtx tem, label;
|
||||
|
||||
@ -4759,10 +4680,8 @@ emit_store_flag_force (target, code, op0, op1, mode, unsignedp, normalizep)
|
||||
be handled if needed). */
|
||||
|
||||
static void
|
||||
do_cmp_and_jump (arg1, arg2, op, mode, label)
|
||||
rtx arg1, arg2, label;
|
||||
enum rtx_code op;
|
||||
enum machine_mode mode;
|
||||
do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, enum machine_mode mode,
|
||||
rtx label)
|
||||
{
|
||||
/* If this mode is an integer too wide to compare properly,
|
||||
compare word by word. Rely on cse to optimize constant cases. */
|
||||
|
456
gcc/expr.c
456
gcc/expr.c
@ -125,60 +125,52 @@ struct store_by_pieces
|
||||
int explicit_inc_to;
|
||||
unsigned HOST_WIDE_INT len;
|
||||
HOST_WIDE_INT offset;
|
||||
rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
|
||||
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
|
||||
void *constfundata;
|
||||
int reverse;
|
||||
};
|
||||
|
||||
static rtx enqueue_insn PARAMS ((rtx, rtx));
|
||||
static unsigned HOST_WIDE_INT move_by_pieces_ninsns
|
||||
PARAMS ((unsigned HOST_WIDE_INT,
|
||||
unsigned int));
|
||||
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
|
||||
struct move_by_pieces *));
|
||||
static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
|
||||
static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
|
||||
static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
|
||||
static tree emit_block_move_libcall_fn PARAMS ((int));
|
||||
static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
|
||||
static rtx clear_by_pieces_1 PARAMS ((void *, HOST_WIDE_INT,
|
||||
enum machine_mode));
|
||||
static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned int));
|
||||
static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
|
||||
unsigned int));
|
||||
static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
|
||||
enum machine_mode,
|
||||
struct store_by_pieces *));
|
||||
static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
|
||||
static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
|
||||
static tree clear_storage_libcall_fn PARAMS ((int));
|
||||
static rtx compress_float_constant PARAMS ((rtx, rtx));
|
||||
static rtx get_subtarget PARAMS ((rtx));
|
||||
static int is_zeros_p PARAMS ((tree));
|
||||
static int mostly_zeros_p PARAMS ((tree));
|
||||
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, enum machine_mode,
|
||||
tree, tree, int, int));
|
||||
static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
|
||||
static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, enum machine_mode,
|
||||
tree, enum machine_mode, int, tree,
|
||||
int));
|
||||
static rtx var_rtx PARAMS ((tree));
|
||||
static rtx enqueue_insn (rtx, rtx);
|
||||
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
|
||||
unsigned int);
|
||||
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
|
||||
struct move_by_pieces *);
|
||||
static bool block_move_libcall_safe_for_call_parm (void);
|
||||
static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
|
||||
static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
|
||||
static tree emit_block_move_libcall_fn (int);
|
||||
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
|
||||
static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
|
||||
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
|
||||
static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
|
||||
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
|
||||
struct store_by_pieces *);
|
||||
static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
|
||||
static rtx clear_storage_via_libcall (rtx, rtx);
|
||||
static tree clear_storage_libcall_fn (int);
|
||||
static rtx compress_float_constant (rtx, rtx);
|
||||
static rtx get_subtarget (rtx);
|
||||
static int is_zeros_p (tree);
|
||||
static int mostly_zeros_p (tree);
|
||||
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, enum machine_mode,
|
||||
tree, tree, int, int);
|
||||
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
|
||||
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
|
||||
tree, enum machine_mode, int, tree, int);
|
||||
static rtx var_rtx (tree);
|
||||
|
||||
static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
|
||||
static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
|
||||
tree));
|
||||
static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
|
||||
static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
|
||||
|
||||
static int is_aligning_offset PARAMS ((tree, tree));
|
||||
static rtx expand_increment PARAMS ((tree, int, int));
|
||||
static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
|
||||
static int is_aligning_offset (tree, tree);
|
||||
static rtx expand_increment (tree, int, int);
|
||||
static rtx do_store_flag (tree, rtx, enum machine_mode, int);
|
||||
#ifdef PUSH_ROUNDING
|
||||
static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
|
||||
static void emit_single_push_insn (enum machine_mode, rtx, tree);
|
||||
#endif
|
||||
static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
|
||||
static rtx const_vector_from_tree PARAMS ((tree));
|
||||
static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
|
||||
static rtx const_vector_from_tree (tree);
|
||||
|
||||
/* Record for each mode whether we can move a register directly to or
|
||||
from an object of that mode in memory. If we can't, we won't try
|
||||
@ -252,7 +244,7 @@ enum insn_code clrstr_optab[NUM_MACHINE_MODES];
|
||||
directly in memory and to initialize the block move optab. */
|
||||
|
||||
void
|
||||
init_expr_once ()
|
||||
init_expr_once (void)
|
||||
{
|
||||
rtx insn, pat;
|
||||
enum machine_mode mode;
|
||||
@ -345,7 +337,7 @@ init_expr_once ()
|
||||
/* This is run at the start of compiling a function. */
|
||||
|
||||
void
|
||||
init_expr ()
|
||||
init_expr (void)
|
||||
{
|
||||
cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
|
||||
|
||||
@ -361,7 +353,7 @@ init_expr ()
|
||||
/* Small sanity check that the queue is empty at the end of a function. */
|
||||
|
||||
void
|
||||
finish_expr_for_function ()
|
||||
finish_expr_for_function (void)
|
||||
{
|
||||
if (pending_chain)
|
||||
abort ();
|
||||
@ -378,8 +370,7 @@ finish_expr_for_function ()
|
||||
where you want to guarantee the pre-incrementation value of VAR. */
|
||||
|
||||
static rtx
|
||||
enqueue_insn (var, body)
|
||||
rtx var, body;
|
||||
enqueue_insn (rtx var, rtx body)
|
||||
{
|
||||
pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
|
||||
body, pending_chain);
|
||||
@ -402,9 +393,7 @@ enqueue_insn (var, body)
|
||||
If the queue is flushed in between, incorrect code will result. */
|
||||
|
||||
rtx
|
||||
protect_from_queue (x, modify)
|
||||
rtx x;
|
||||
int modify;
|
||||
protect_from_queue (rtx x, int modify)
|
||||
{
|
||||
RTX_CODE code = GET_CODE (x);
|
||||
|
||||
@ -488,8 +477,7 @@ protect_from_queue (x, modify)
|
||||
since memory addresses generally contain only those. */
|
||||
|
||||
int
|
||||
queued_subexp_p (x)
|
||||
rtx x;
|
||||
queued_subexp_p (rtx x)
|
||||
{
|
||||
enum rtx_code code = GET_CODE (x);
|
||||
switch (code)
|
||||
@ -511,7 +499,7 @@ queued_subexp_p (x)
|
||||
/* Perform all the pending incrementations. */
|
||||
|
||||
void
|
||||
emit_queue ()
|
||||
emit_queue (void)
|
||||
{
|
||||
rtx p;
|
||||
while ((p = pending_chain))
|
||||
@ -551,9 +539,7 @@ emit_queue ()
|
||||
This causes zero-extension instead of sign-extension. */
|
||||
|
||||
void
|
||||
convert_move (to, from, unsignedp)
|
||||
rtx to, from;
|
||||
int unsignedp;
|
||||
convert_move (rtx to, rtx from, int unsignedp)
|
||||
{
|
||||
enum machine_mode to_mode = GET_MODE (to);
|
||||
enum machine_mode from_mode = GET_MODE (from);
|
||||
@ -1332,10 +1318,7 @@ convert_move (to, from, unsignedp)
|
||||
except when putting X into an insn (in which case convert_move does it). */
|
||||
|
||||
rtx
|
||||
convert_to_mode (mode, x, unsignedp)
|
||||
enum machine_mode mode;
|
||||
rtx x;
|
||||
int unsignedp;
|
||||
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
|
||||
{
|
||||
return convert_modes (mode, VOIDmode, x, unsignedp);
|
||||
}
|
||||
@ -1354,10 +1337,7 @@ convert_to_mode (mode, x, unsignedp)
|
||||
except when putting X into an insn (in which case convert_move does it). */
|
||||
|
||||
rtx
|
||||
convert_modes (mode, oldmode, x, unsignedp)
|
||||
enum machine_mode mode, oldmode;
|
||||
rtx x;
|
||||
int unsignedp;
|
||||
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
|
||||
{
|
||||
rtx temp;
|
||||
|
||||
@ -1468,9 +1448,8 @@ convert_modes (mode, oldmode, x, unsignedp)
|
||||
succeed. */
|
||||
|
||||
int
|
||||
can_move_by_pieces (len, align)
|
||||
unsigned HOST_WIDE_INT len;
|
||||
unsigned int align ATTRIBUTE_UNUSED;
|
||||
can_move_by_pieces (unsigned HOST_WIDE_INT len,
|
||||
unsigned int align ATTRIBUTE_UNUSED)
|
||||
{
|
||||
return MOVE_BY_PIECES_P (len, align);
|
||||
}
|
||||
@ -1489,11 +1468,8 @@ can_move_by_pieces (len, align)
|
||||
stpcpy. */
|
||||
|
||||
rtx
|
||||
move_by_pieces (to, from, len, align, endp)
|
||||
rtx to, from;
|
||||
unsigned HOST_WIDE_INT len;
|
||||
unsigned int align;
|
||||
int endp;
|
||||
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
|
||||
unsigned int align, int endp)
|
||||
{
|
||||
struct move_by_pieces data;
|
||||
rtx to_addr, from_addr = XEXP (from, 0);
|
||||
@ -1643,9 +1619,7 @@ move_by_pieces (to, from, len, align, endp)
|
||||
ALIGN (in bits) is maximum alignment we can assume. */
|
||||
|
||||
static unsigned HOST_WIDE_INT
|
||||
move_by_pieces_ninsns (l, align)
|
||||
unsigned HOST_WIDE_INT l;
|
||||
unsigned int align;
|
||||
move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
|
||||
{
|
||||
unsigned HOST_WIDE_INT n_insns = 0;
|
||||
unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
|
||||
@ -1684,10 +1658,8 @@ move_by_pieces_ninsns (l, align)
|
||||
to make a move insn for that mode. DATA has all the other info. */
|
||||
|
||||
static void
|
||||
move_by_pieces_1 (genfun, mode, data)
|
||||
rtx (*genfun) PARAMS ((rtx, ...));
|
||||
enum machine_mode mode;
|
||||
struct move_by_pieces *data;
|
||||
move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
|
||||
struct move_by_pieces *data)
|
||||
{
|
||||
unsigned int size = GET_MODE_SIZE (mode);
|
||||
rtx to1 = NULL_RTX, from1;
|
||||
@ -1755,9 +1727,7 @@ move_by_pieces_1 (genfun, mode, data)
|
||||
0 otherwise. */
|
||||
|
||||
rtx
|
||||
emit_block_move (x, y, size, method)
|
||||
rtx x, y, size;
|
||||
enum block_op_methods method;
|
||||
emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
|
||||
{
|
||||
bool may_use_call;
|
||||
rtx retval = 0;
|
||||
@ -1828,12 +1798,12 @@ emit_block_move (x, y, size, method)
|
||||
return retval;
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move. Returns true if calling the
|
||||
/* A subroutine of emit_block_move. Returns true if calling the
|
||||
block move libcall will not clobber any parameters which may have
|
||||
already been placed on the stack. */
|
||||
|
||||
static bool
|
||||
block_move_libcall_safe_for_call_parm ()
|
||||
block_move_libcall_safe_for_call_parm (void)
|
||||
{
|
||||
if (PUSH_ARGS)
|
||||
return true;
|
||||
@ -1887,13 +1857,11 @@ block_move_libcall_safe_for_call_parm ()
|
||||
}
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move. Expand a movstr pattern;
|
||||
/* A subroutine of emit_block_move. Expand a movstr pattern;
|
||||
return true if successful. */
|
||||
|
||||
static bool
|
||||
emit_block_move_via_movstr (x, y, size, align)
|
||||
rtx x, y, size;
|
||||
unsigned int align;
|
||||
emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
|
||||
{
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
@ -1961,8 +1929,7 @@ emit_block_move_via_movstr (x, y, size, align)
|
||||
Return the return value from memcpy, 0 otherwise. */
|
||||
|
||||
static rtx
|
||||
emit_block_move_via_libcall (dst, src, size)
|
||||
rtx dst, src, size;
|
||||
emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
|
||||
{
|
||||
rtx dst_addr, src_addr;
|
||||
tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
|
||||
@ -2063,8 +2030,7 @@ emit_block_move_via_libcall (dst, src, size)
|
||||
static GTY(()) tree block_move_fn;
|
||||
|
||||
void
|
||||
init_block_move_fn (asmspec)
|
||||
const char *asmspec;
|
||||
init_block_move_fn (const char *asmspec)
|
||||
{
|
||||
if (!block_move_fn)
|
||||
{
|
||||
@ -2102,8 +2068,7 @@ init_block_move_fn (asmspec)
|
||||
}
|
||||
|
||||
static tree
|
||||
emit_block_move_libcall_fn (for_call)
|
||||
int for_call;
|
||||
emit_block_move_libcall_fn (int for_call)
|
||||
{
|
||||
static bool emitted_extern;
|
||||
|
||||
@ -2125,9 +2090,8 @@ emit_block_move_libcall_fn (for_call)
|
||||
/* ??? It'd be nice to copy in hunks larger than QImode. */
|
||||
|
||||
static void
|
||||
emit_block_move_via_loop (x, y, size, align)
|
||||
rtx x, y, size;
|
||||
unsigned int align ATTRIBUTE_UNUSED;
|
||||
emit_block_move_via_loop (rtx x, rtx y, rtx size,
|
||||
unsigned int align ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
|
||||
enum machine_mode iter_mode;
|
||||
@ -2177,11 +2141,7 @@ emit_block_move_via_loop (x, y, size, align)
|
||||
The number of registers to be filled is NREGS. */
|
||||
|
||||
void
|
||||
move_block_to_reg (regno, x, nregs, mode)
|
||||
int regno;
|
||||
rtx x;
|
||||
int nregs;
|
||||
enum machine_mode mode;
|
||||
move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
|
||||
{
|
||||
int i;
|
||||
#ifdef HAVE_load_multiple
|
||||
@ -2221,10 +2181,7 @@ move_block_to_reg (regno, x, nregs, mode)
|
||||
The number of registers to be filled is NREGS. */
|
||||
|
||||
void
|
||||
move_block_from_reg (regno, x, nregs)
|
||||
int regno;
|
||||
rtx x;
|
||||
int nregs;
|
||||
move_block_from_reg (int regno, rtx x, int nregs)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -2266,8 +2223,7 @@ move_block_from_reg (regno, x, nregs)
|
||||
The new set has the same modes as the original set. */
|
||||
|
||||
rtx
|
||||
gen_group_rtx (orig)
|
||||
rtx orig;
|
||||
gen_group_rtx (rtx orig)
|
||||
{
|
||||
int i, length;
|
||||
rtx *tmps;
|
||||
@ -2306,9 +2262,7 @@ gen_group_rtx (orig)
|
||||
would be needed. */
|
||||
|
||||
void
|
||||
emit_group_load (dst, orig_src, ssize)
|
||||
rtx dst, orig_src;
|
||||
int ssize;
|
||||
emit_group_load (rtx dst, rtx orig_src, int ssize)
|
||||
{
|
||||
rtx *tmps, src;
|
||||
int start, i;
|
||||
@ -2433,8 +2387,7 @@ emit_group_load (dst, orig_src, ssize)
|
||||
non-consecutive groups of registers, each represented by a PARALLEL. */
|
||||
|
||||
void
|
||||
emit_group_move (dst, src)
|
||||
rtx dst, src;
|
||||
emit_group_move (rtx dst, rtx src)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -2454,9 +2407,7 @@ emit_group_move (dst, src)
|
||||
block DST, or -1 if not known. */
|
||||
|
||||
void
|
||||
emit_group_store (orig_dst, src, ssize)
|
||||
rtx orig_dst, src;
|
||||
int ssize;
|
||||
emit_group_store (rtx orig_dst, rtx src, int ssize)
|
||||
{
|
||||
rtx *tmps, dst;
|
||||
int start, i;
|
||||
@ -2581,10 +2532,7 @@ emit_group_store (orig_dst, src, ssize)
|
||||
in registers regardless of the structure's alignment. */
|
||||
|
||||
rtx
|
||||
copy_blkmode_from_reg (tgtblk, srcreg, type)
|
||||
rtx tgtblk;
|
||||
rtx srcreg;
|
||||
tree type;
|
||||
copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
|
||||
{
|
||||
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
|
||||
rtx src = NULL, dst = NULL;
|
||||
@ -2655,8 +2603,7 @@ copy_blkmode_from_reg (tgtblk, srcreg, type)
|
||||
to by CALL_FUSAGE. REG must denote a hard register. */
|
||||
|
||||
void
|
||||
use_reg (call_fusage, reg)
|
||||
rtx *call_fusage, reg;
|
||||
use_reg (rtx *call_fusage, rtx reg)
|
||||
{
|
||||
if (GET_CODE (reg) != REG
|
||||
|| REGNO (reg) >= FIRST_PSEUDO_REGISTER)
|
||||
@ -2671,10 +2618,7 @@ use_reg (call_fusage, reg)
|
||||
starting at REGNO. All of these registers must be hard registers. */
|
||||
|
||||
void
|
||||
use_regs (call_fusage, regno, nregs)
|
||||
rtx *call_fusage;
|
||||
int regno;
|
||||
int nregs;
|
||||
use_regs (rtx *call_fusage, int regno, int nregs)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -2690,9 +2634,7 @@ use_regs (call_fusage, regno, nregs)
|
||||
non-contiguous locations. The Irix 6 ABI has examples of this. */
|
||||
|
||||
void
|
||||
use_group_regs (call_fusage, regs)
|
||||
rtx *call_fusage;
|
||||
rtx regs;
|
||||
use_group_regs (rtx *call_fusage, rtx regs)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -2716,11 +2658,9 @@ use_group_regs (call_fusage, regs)
|
||||
call to store_by_pieces should succeed. */
|
||||
|
||||
int
|
||||
can_store_by_pieces (len, constfun, constfundata, align)
|
||||
unsigned HOST_WIDE_INT len;
|
||||
rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
|
||||
void *constfundata;
|
||||
unsigned int align;
|
||||
can_store_by_pieces (unsigned HOST_WIDE_INT len,
|
||||
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
|
||||
void *constfundata, unsigned int align)
|
||||
{
|
||||
unsigned HOST_WIDE_INT max_size, l;
|
||||
HOST_WIDE_INT offset = 0;
|
||||
@ -2798,13 +2738,9 @@ can_store_by_pieces (len, constfun, constfundata, align)
|
||||
stpcpy. */
|
||||
|
||||
rtx
|
||||
store_by_pieces (to, len, constfun, constfundata, align, endp)
|
||||
rtx to;
|
||||
unsigned HOST_WIDE_INT len;
|
||||
rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
|
||||
void *constfundata;
|
||||
unsigned int align;
|
||||
int endp;
|
||||
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
|
||||
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
|
||||
void *constfundata, unsigned int align, int endp)
|
||||
{
|
||||
struct store_by_pieces data;
|
||||
|
||||
@ -2852,10 +2788,7 @@ store_by_pieces (to, len, constfun, constfundata, align, endp)
|
||||
before calling. ALIGN is maximum alignment we can assume. */
|
||||
|
||||
static void
|
||||
clear_by_pieces (to, len, align)
|
||||
rtx to;
|
||||
unsigned HOST_WIDE_INT len;
|
||||
unsigned int align;
|
||||
clear_by_pieces (rtx to, long unsigned int len, unsigned int align)
|
||||
{
|
||||
struct store_by_pieces data;
|
||||
|
||||
@ -2870,10 +2803,9 @@ clear_by_pieces (to, len, align)
|
||||
Return const0_rtx unconditionally. */
|
||||
|
||||
static rtx
|
||||
clear_by_pieces_1 (data, offset, mode)
|
||||
void *data ATTRIBUTE_UNUSED;
|
||||
HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
|
||||
enum machine_mode mode ATTRIBUTE_UNUSED;
|
||||
clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
|
||||
HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
|
||||
enum machine_mode mode ATTRIBUTE_UNUSED)
|
||||
{
|
||||
return const0_rtx;
|
||||
}
|
||||
@ -2884,9 +2816,8 @@ clear_by_pieces_1 (data, offset, mode)
|
||||
before calling. ALIGN is maximum alignment we can assume. */
|
||||
|
||||
static void
|
||||
store_by_pieces_1 (data, align)
|
||||
struct store_by_pieces *data;
|
||||
unsigned int align;
|
||||
store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
|
||||
unsigned int align ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx to_addr = XEXP (data->to, 0);
|
||||
unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
|
||||
@ -2970,10 +2901,8 @@ store_by_pieces_1 (data, align)
|
||||
to make a move insn for that mode. DATA has all the other info. */
|
||||
|
||||
static void
|
||||
store_by_pieces_2 (genfun, mode, data)
|
||||
rtx (*genfun) PARAMS ((rtx, ...));
|
||||
enum machine_mode mode;
|
||||
struct store_by_pieces *data;
|
||||
store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
|
||||
struct store_by_pieces *data)
|
||||
{
|
||||
unsigned int size = GET_MODE_SIZE (mode);
|
||||
rtx to1, cst;
|
||||
@ -3010,9 +2939,7 @@ store_by_pieces_2 (genfun, mode, data)
|
||||
its length in bytes. */
|
||||
|
||||
rtx
|
||||
clear_storage (object, size)
|
||||
rtx object;
|
||||
rtx size;
|
||||
clear_storage (rtx object, rtx size)
|
||||
{
|
||||
rtx retval = 0;
|
||||
unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
|
||||
@ -3045,9 +2972,7 @@ clear_storage (object, size)
|
||||
return true if successful. */
|
||||
|
||||
static bool
|
||||
clear_storage_via_clrstr (object, size, align)
|
||||
rtx object, size;
|
||||
unsigned int align;
|
||||
clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
@ -3103,8 +3028,7 @@ clear_storage_via_clrstr (object, size, align)
|
||||
Return the return value of memset, 0 otherwise. */
|
||||
|
||||
static rtx
|
||||
clear_storage_via_libcall (object, size)
|
||||
rtx object, size;
|
||||
clear_storage_via_libcall (rtx object, rtx size)
|
||||
{
|
||||
tree call_expr, arg_list, fn, object_tree, size_tree;
|
||||
enum machine_mode size_mode;
|
||||
@ -3184,8 +3108,7 @@ clear_storage_via_libcall (object, size)
|
||||
static GTY(()) tree block_clear_fn;
|
||||
|
||||
void
|
||||
init_block_clear_fn (asmspec)
|
||||
const char *asmspec;
|
||||
init_block_clear_fn (const char *asmspec)
|
||||
{
|
||||
if (!block_clear_fn)
|
||||
{
|
||||
@ -3222,8 +3145,7 @@ init_block_clear_fn (asmspec)
|
||||
}
|
||||
|
||||
static tree
|
||||
clear_storage_libcall_fn (for_call)
|
||||
int for_call;
|
||||
clear_storage_libcall_fn (int for_call)
|
||||
{
|
||||
static bool emitted_extern;
|
||||
|
||||
@ -3248,8 +3170,7 @@ clear_storage_libcall_fn (for_call)
|
||||
Return the last instruction emitted. */
|
||||
|
||||
rtx
|
||||
emit_move_insn (x, y)
|
||||
rtx x, y;
|
||||
emit_move_insn (rtx x, rtx y)
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
rtx y_cst = NULL_RTX;
|
||||
@ -3319,8 +3240,7 @@ emit_move_insn (x, y)
|
||||
are basically valid. */
|
||||
|
||||
rtx
|
||||
emit_move_insn_1 (x, y)
|
||||
rtx x, y;
|
||||
emit_move_insn_1 (rtx x, rtx y)
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
enum machine_mode submode;
|
||||
@ -3541,7 +3461,7 @@ emit_move_insn_1 (x, y)
|
||||
x = gen_lowpart (tmode, x);
|
||||
y = gen_lowpart (tmode, y);
|
||||
}
|
||||
|
||||
|
||||
insn_code = mov_optab->handlers[(int) tmode].insn_code;
|
||||
return emit_insn (GEN_FCN (insn_code) (x, y));
|
||||
}
|
||||
@ -3662,8 +3582,7 @@ emit_move_insn_1 (x, y)
|
||||
move as an extension. */
|
||||
|
||||
static rtx
|
||||
compress_float_constant (x, y)
|
||||
rtx x, y;
|
||||
compress_float_constant (rtx x, rtx y)
|
||||
{
|
||||
enum machine_mode dstmode = GET_MODE (x);
|
||||
enum machine_mode orig_srcmode = GET_MODE (y);
|
||||
@ -3726,9 +3645,7 @@ compress_float_constant (x, y)
|
||||
otherwise, the padding comes at high addresses. */
|
||||
|
||||
rtx
|
||||
push_block (size, extra, below)
|
||||
rtx size;
|
||||
int extra, below;
|
||||
push_block (rtx size, int extra, int below)
|
||||
{
|
||||
rtx temp;
|
||||
|
||||
@ -3777,10 +3694,7 @@ push_block (size, extra, below)
|
||||
/* Emit single push insn. */
|
||||
|
||||
static void
|
||||
emit_single_push_insn (mode, x, type)
|
||||
rtx x;
|
||||
enum machine_mode mode;
|
||||
tree type;
|
||||
emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
|
||||
{
|
||||
rtx dest_addr;
|
||||
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
|
||||
@ -3900,21 +3814,10 @@ emit_single_push_insn (mode, x, type)
|
||||
of bytes required. */
|
||||
|
||||
void
|
||||
emit_push_insn (x, mode, type, size, align, partial, reg, extra,
|
||||
args_addr, args_so_far, reg_parm_stack_space,
|
||||
alignment_pad)
|
||||
rtx x;
|
||||
enum machine_mode mode;
|
||||
tree type;
|
||||
rtx size;
|
||||
unsigned int align;
|
||||
int partial;
|
||||
rtx reg;
|
||||
int extra;
|
||||
rtx args_addr;
|
||||
rtx args_so_far;
|
||||
int reg_parm_stack_space;
|
||||
rtx alignment_pad;
|
||||
emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
|
||||
unsigned int align, int partial, rtx reg, int extra,
|
||||
rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
|
||||
rtx alignment_pad)
|
||||
{
|
||||
rtx xinner;
|
||||
enum direction stack_direction
|
||||
@ -4185,8 +4088,7 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
|
||||
operations. */
|
||||
|
||||
static rtx
|
||||
get_subtarget (x)
|
||||
rtx x;
|
||||
get_subtarget (rtx x)
|
||||
{
|
||||
return ((x == 0
|
||||
/* Only registers can be subtargets. */
|
||||
@ -4213,10 +4115,8 @@ get_subtarget (x)
|
||||
We now use WANT_VALUE to decide whether to do this. */
|
||||
|
||||
rtx
|
||||
expand_assignment (to, from, want_value, suggest_reg)
|
||||
tree to, from;
|
||||
int want_value;
|
||||
int suggest_reg ATTRIBUTE_UNUSED;
|
||||
expand_assignment (tree to, tree from, int want_value,
|
||||
int suggest_reg ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx to_rtx = 0;
|
||||
rtx result;
|
||||
@ -4488,10 +4388,7 @@ expand_assignment (to, from, want_value, suggest_reg)
|
||||
stack, and block moves may need to be treated specially. */
|
||||
|
||||
rtx
|
||||
store_expr (exp, target, want_value)
|
||||
tree exp;
|
||||
rtx target;
|
||||
int want_value;
|
||||
store_expr (tree exp, rtx target, int want_value)
|
||||
{
|
||||
rtx temp;
|
||||
int dont_return_target = 0;
|
||||
@ -4631,8 +4528,8 @@ store_expr (exp, target, want_value)
|
||||
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
|
||||
|
||||
/* If TEMP is a MEM and we want a result value, make the access
|
||||
now so it gets done only once. Strictly speaking, this is
|
||||
only necessary if the MEM is volatile, or if the address
|
||||
now so it gets done only once. Strictly speaking, this is
|
||||
only necessary if the MEM is volatile, or if the address
|
||||
overlaps TARGET. But not performing the load twice also
|
||||
reduces the amount of rtl we generate and then have to CSE. */
|
||||
if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
|
||||
@ -4851,8 +4748,7 @@ store_expr (exp, target, want_value)
|
||||
/* Return 1 if EXP just contains zeros. */
|
||||
|
||||
static int
|
||||
is_zeros_p (exp)
|
||||
tree exp;
|
||||
is_zeros_p (tree exp)
|
||||
{
|
||||
tree elt;
|
||||
|
||||
@ -4899,8 +4795,7 @@ is_zeros_p (exp)
|
||||
/* Return 1 if EXP contains mostly (3/4) zeros. */
|
||||
|
||||
static int
|
||||
mostly_zeros_p (exp)
|
||||
tree exp;
|
||||
mostly_zeros_p (tree exp)
|
||||
{
|
||||
if (TREE_CODE (exp) == CONSTRUCTOR)
|
||||
{
|
||||
@ -4941,15 +4836,9 @@ mostly_zeros_p (exp)
|
||||
clear a substructure if the outer structure has already been cleared. */
|
||||
|
||||
static void
|
||||
store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
|
||||
alias_set)
|
||||
rtx target;
|
||||
unsigned HOST_WIDE_INT bitsize;
|
||||
HOST_WIDE_INT bitpos;
|
||||
enum machine_mode mode;
|
||||
tree exp, type;
|
||||
int cleared;
|
||||
int alias_set;
|
||||
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
|
||||
HOST_WIDE_INT bitpos, enum machine_mode mode,
|
||||
tree exp, tree type, int cleared, int alias_set)
|
||||
{
|
||||
if (TREE_CODE (exp) == CONSTRUCTOR
|
||||
&& bitpos % BITS_PER_UNIT == 0
|
||||
@ -4991,11 +4880,7 @@ store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
|
||||
which has been packed to exclude padding bits. */
|
||||
|
||||
static void
|
||||
store_constructor (exp, target, cleared, size)
|
||||
tree exp;
|
||||
rtx target;
|
||||
int cleared;
|
||||
HOST_WIDE_INT size;
|
||||
store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
||||
{
|
||||
tree type = TREE_TYPE (exp);
|
||||
#ifdef WORD_REGISTER_OPERATIONS
|
||||
@ -5638,17 +5523,9 @@ store_constructor (exp, target, cleared, size)
|
||||
reference to the containing structure. */
|
||||
|
||||
static rtx
|
||||
store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
|
||||
alias_set)
|
||||
rtx target;
|
||||
HOST_WIDE_INT bitsize;
|
||||
HOST_WIDE_INT bitpos;
|
||||
enum machine_mode mode;
|
||||
tree exp;
|
||||
enum machine_mode value_mode;
|
||||
int unsignedp;
|
||||
tree type;
|
||||
int alias_set;
|
||||
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
||||
enum machine_mode mode, tree exp, enum machine_mode value_mode,
|
||||
int unsignedp, tree type, int alias_set)
|
||||
{
|
||||
HOST_WIDE_INT width_mask = 0;
|
||||
|
||||
@ -5717,7 +5594,7 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
|
||||
&& ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
|
||||
|| bitpos % GET_MODE_ALIGNMENT (mode))
|
||||
&& SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
|
||||
|| (bitpos % BITS_PER_UNIT != 0)))
|
||||
|| (bitpos % BITS_PER_UNIT != 0)))
|
||||
/* If the RHS and field are a constant size and the size of the
|
||||
RHS isn't the same size as the bitfield, we must use bitfield
|
||||
operations. */
|
||||
@ -5852,15 +5729,10 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
|
||||
this case, but the address of the object can be found. */
|
||||
|
||||
tree
|
||||
get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
|
||||
punsignedp, pvolatilep)
|
||||
tree exp;
|
||||
HOST_WIDE_INT *pbitsize;
|
||||
HOST_WIDE_INT *pbitpos;
|
||||
tree *poffset;
|
||||
enum machine_mode *pmode;
|
||||
int *punsignedp;
|
||||
int *pvolatilep;
|
||||
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
HOST_WIDE_INT *pbitpos, tree *poffset,
|
||||
enum machine_mode *pmode, int *punsignedp,
|
||||
int *pvolatilep)
|
||||
{
|
||||
tree size_tree = 0;
|
||||
enum machine_mode mode = VOIDmode;
|
||||
@ -6019,8 +5891,7 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
|
||||
/* Return 1 if T is an expression that get_inner_reference handles. */
|
||||
|
||||
int
|
||||
handled_component_p (t)
|
||||
tree t;
|
||||
handled_component_p (tree t)
|
||||
{
|
||||
switch (TREE_CODE (t))
|
||||
{
|
||||
@ -6050,8 +5921,7 @@ handled_component_p (t)
|
||||
The returned value may be a REG, SUBREG, MEM or constant. */
|
||||
|
||||
rtx
|
||||
force_operand (value, target)
|
||||
rtx value, target;
|
||||
force_operand (rtx value, rtx target)
|
||||
{
|
||||
rtx op1, op2;
|
||||
/* Use subtarget as the target for operand 0 of a binary operation. */
|
||||
@ -6182,10 +6052,7 @@ force_operand (value, target)
|
||||
searches for optimization opportunities. */
|
||||
|
||||
int
|
||||
safe_from_p (x, exp, top_p)
|
||||
rtx x;
|
||||
tree exp;
|
||||
int top_p;
|
||||
safe_from_p (rtx x, tree exp, int top_p)
|
||||
{
|
||||
rtx exp_rtl = 0;
|
||||
int i, nops;
|
||||
@ -6426,8 +6293,7 @@ safe_from_p (x, exp, top_p)
|
||||
variable or parameter; else return 0. */
|
||||
|
||||
static rtx
|
||||
var_rtx (exp)
|
||||
tree exp;
|
||||
var_rtx (tree exp)
|
||||
{
|
||||
STRIP_NOPS (exp);
|
||||
switch (TREE_CODE (exp))
|
||||
@ -6443,8 +6309,7 @@ var_rtx (exp)
|
||||
#ifdef MAX_INTEGER_COMPUTATION_MODE
|
||||
|
||||
void
|
||||
check_max_integer_computation_mode (exp)
|
||||
tree exp;
|
||||
check_max_integer_computation_mode (tree exp)
|
||||
{
|
||||
enum tree_code code;
|
||||
enum machine_mode mode;
|
||||
@ -6499,8 +6364,7 @@ check_max_integer_computation_mode (exp)
|
||||
This is used in updating alignment of MEMs in array references. */
|
||||
|
||||
static unsigned HOST_WIDE_INT
|
||||
highest_pow2_factor (exp)
|
||||
tree exp;
|
||||
highest_pow2_factor (tree exp)
|
||||
{
|
||||
unsigned HOST_WIDE_INT c0, c1;
|
||||
|
||||
@ -6569,9 +6433,7 @@ highest_pow2_factor (exp)
|
||||
of the alignment of TYPE. */
|
||||
|
||||
static unsigned HOST_WIDE_INT
|
||||
highest_pow2_factor_for_type (type, exp)
|
||||
tree type;
|
||||
tree exp;
|
||||
highest_pow2_factor_for_type (tree type, tree exp)
|
||||
{
|
||||
unsigned HOST_WIDE_INT type_align, factor;
|
||||
|
||||
@ -6589,9 +6451,7 @@ highest_pow2_factor_for_type (type, exp)
|
||||
the placeholder list at which the object is found is placed. */
|
||||
|
||||
tree
|
||||
find_placeholder (exp, plist)
|
||||
tree exp;
|
||||
tree *plist;
|
||||
find_placeholder (tree exp, tree *plist)
|
||||
{
|
||||
tree type = TREE_TYPE (exp);
|
||||
tree placeholder_expr;
|
||||
@ -6689,11 +6549,7 @@ find_placeholder (exp, plist)
|
||||
emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
|
||||
|
||||
rtx
|
||||
expand_expr (exp, target, tmode, modifier)
|
||||
tree exp;
|
||||
rtx target;
|
||||
enum machine_mode tmode;
|
||||
enum expand_modifier modifier;
|
||||
expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
|
||||
{
|
||||
rtx op0, op1, temp;
|
||||
tree type = TREE_TYPE (exp);
|
||||
@ -7593,7 +7449,7 @@ expand_expr (exp, target, tmode, modifier)
|
||||
forcing the SAVE_EXPR into memory. */
|
||||
if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
|
||||
{
|
||||
put_var_into_stack (TREE_OPERAND (exp, 0),
|
||||
put_var_into_stack (TREE_OPERAND (exp, 0),
|
||||
/*rescan=*/true);
|
||||
op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
|
||||
}
|
||||
@ -9693,9 +9549,7 @@ expand_expr (exp, target, tmode, modifier)
|
||||
aligned more than BIGGEST_ALIGNMENT. */
|
||||
|
||||
static int
|
||||
is_aligning_offset (offset, exp)
|
||||
tree offset;
|
||||
tree exp;
|
||||
is_aligning_offset (tree offset, tree exp)
|
||||
{
|
||||
/* Strip off any conversions and WITH_RECORD_EXPR nodes. */
|
||||
while (TREE_CODE (offset) == NON_LVALUE_EXPR
|
||||
@ -9744,9 +9598,7 @@ is_aligning_offset (offset, exp)
|
||||
offset will be `sizetype'. */
|
||||
|
||||
tree
|
||||
string_constant (arg, ptr_offset)
|
||||
tree arg;
|
||||
tree *ptr_offset;
|
||||
string_constant (tree arg, tree *ptr_offset)
|
||||
{
|
||||
STRIP_NOPS (arg);
|
||||
|
||||
@ -9786,9 +9638,7 @@ string_constant (arg, ptr_offset)
|
||||
POST is 1 for postinc/decrements and 0 for preinc/decrements. */
|
||||
|
||||
static rtx
|
||||
expand_increment (exp, post, ignore)
|
||||
tree exp;
|
||||
int post, ignore;
|
||||
expand_increment (tree exp, int post, int ignore)
|
||||
{
|
||||
rtx op0, op1;
|
||||
rtx temp, value;
|
||||
@ -10007,11 +9857,7 @@ expand_increment (exp, post, ignore)
|
||||
set/jump/set sequence. */
|
||||
|
||||
static rtx
|
||||
do_store_flag (exp, target, mode, only_cheap)
|
||||
tree exp;
|
||||
rtx target;
|
||||
enum machine_mode mode;
|
||||
int only_cheap;
|
||||
do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
|
||||
{
|
||||
enum rtx_code code;
|
||||
tree arg0, arg1, type;
|
||||
@ -10302,7 +10148,7 @@ do_store_flag (exp, target, mode, only_cheap)
|
||||
#endif /* CASE_VALUES_THRESHOLD */
|
||||
|
||||
unsigned int
|
||||
case_values_threshold ()
|
||||
case_values_threshold (void)
|
||||
{
|
||||
return CASE_VALUES_THRESHOLD;
|
||||
}
|
||||
@ -10310,11 +10156,8 @@ case_values_threshold ()
|
||||
/* Attempt to generate a casesi instruction. Returns 1 if successful,
|
||||
0 otherwise (i.e. if there is no casesi instruction). */
|
||||
int
|
||||
try_casesi (index_type, index_expr, minval, range,
|
||||
table_label, default_label)
|
||||
tree index_type, index_expr, minval, range;
|
||||
rtx table_label ATTRIBUTE_UNUSED;
|
||||
rtx default_label;
|
||||
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
|
||||
rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
|
||||
{
|
||||
enum machine_mode index_mode = SImode;
|
||||
int index_bits = GET_MODE_BITSIZE (index_mode);
|
||||
@ -10401,9 +10244,8 @@ try_casesi (index_type, index_expr, minval, range,
|
||||
index value is out of range. */
|
||||
|
||||
static void
|
||||
do_tablejump (index, mode, range, table_label, default_label)
|
||||
rtx index, range, table_label, default_label;
|
||||
enum machine_mode mode;
|
||||
do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
|
||||
rtx default_label)
|
||||
{
|
||||
rtx temp, vector;
|
||||
|
||||
@ -10467,10 +10309,8 @@ do_tablejump (index, mode, range, table_label, default_label)
|
||||
}
|
||||
|
||||
int
|
||||
try_tablejump (index_type, index_expr, minval, range,
|
||||
table_label, default_label)
|
||||
tree index_type, index_expr, minval, range;
|
||||
rtx table_label, default_label;
|
||||
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
|
||||
rtx table_label, rtx default_label)
|
||||
{
|
||||
rtx index;
|
||||
|
||||
@ -10500,8 +10340,7 @@ try_tablejump (index_type, index_expr, minval, range,
|
||||
vector mode, but we can emulate with narrower modes. */
|
||||
|
||||
int
|
||||
vector_mode_valid_p (mode)
|
||||
enum machine_mode mode;
|
||||
vector_mode_valid_p (enum machine_mode mode)
|
||||
{
|
||||
enum mode_class class = GET_MODE_CLASS (mode);
|
||||
enum machine_mode innermode;
|
||||
@ -10527,8 +10366,7 @@ vector_mode_valid_p (mode)
|
||||
|
||||
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
|
||||
static rtx
|
||||
const_vector_from_tree (exp)
|
||||
tree exp;
|
||||
const_vector_from_tree (tree exp)
|
||||
{
|
||||
rtvec v;
|
||||
int units, i;
|
||||
|
343
gcc/expr.h
343
gcc/expr.h
@ -186,7 +186,7 @@ tree split_complex_values (tree);
|
||||
#endif
|
||||
|
||||
/* Nonzero if we do not know how to pass TYPE solely in registers. */
|
||||
extern bool default_must_pass_in_stack PARAMS((enum machine_mode, tree));
|
||||
extern bool default_must_pass_in_stack (enum machine_mode, tree);
|
||||
#ifndef MUST_PASS_IN_STACK
|
||||
#define MUST_PASS_IN_STACK(MODE,TYPE) default_must_pass_in_stack(MODE, TYPE)
|
||||
#endif
|
||||
@ -274,82 +274,80 @@ enum optab_methods
|
||||
/* Generate code for a simple binary or unary operation. "Simple" in
|
||||
this case means "can be unambiguously described by a (mode, code)
|
||||
pair and mapped to a single optab." */
|
||||
extern rtx expand_simple_binop PARAMS ((enum machine_mode, enum rtx_code, rtx,
|
||||
rtx, rtx, int, enum optab_methods));
|
||||
extern rtx expand_simple_unop PARAMS ((enum machine_mode, enum rtx_code,
|
||||
rtx, rtx, int));
|
||||
extern rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx,
|
||||
rtx, rtx, int, enum optab_methods);
|
||||
extern rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx,
|
||||
int);
|
||||
|
||||
/* Report whether the machine description contains an insn which can
|
||||
perform the operation described by CODE and MODE. */
|
||||
extern int have_insn_for PARAMS ((enum rtx_code, enum machine_mode));
|
||||
extern int have_insn_for (enum rtx_code, enum machine_mode);
|
||||
|
||||
/* Emit code to make a call to a constant function or a library call. */
|
||||
extern void emit_libcall_block PARAMS ((rtx, rtx, rtx, rtx));
|
||||
extern void emit_libcall_block (rtx, rtx, rtx, rtx);
|
||||
|
||||
/* Create but don't emit one rtl instruction to perform certain operations.
|
||||
Modes must match; operands must meet the operation's predicates.
|
||||
Likewise for subtraction and for just copying.
|
||||
These do not call protect_from_queue; caller must do so. */
|
||||
extern rtx gen_add2_insn PARAMS ((rtx, rtx));
|
||||
extern rtx gen_add3_insn PARAMS ((rtx, rtx, rtx));
|
||||
extern rtx gen_sub2_insn PARAMS ((rtx, rtx));
|
||||
extern rtx gen_sub3_insn PARAMS ((rtx, rtx, rtx));
|
||||
extern rtx gen_move_insn PARAMS ((rtx, rtx));
|
||||
extern int have_add2_insn PARAMS ((rtx, rtx));
|
||||
extern int have_sub2_insn PARAMS ((rtx, rtx));
|
||||
extern rtx gen_add2_insn (rtx, rtx);
|
||||
extern rtx gen_add3_insn (rtx, rtx, rtx);
|
||||
extern rtx gen_sub2_insn (rtx, rtx);
|
||||
extern rtx gen_sub3_insn (rtx, rtx, rtx);
|
||||
extern rtx gen_move_insn (rtx, rtx);
|
||||
extern int have_add2_insn (rtx, rtx);
|
||||
extern int have_sub2_insn (rtx, rtx);
|
||||
|
||||
/* Emit a pair of rtl insns to compare two rtx's and to jump
|
||||
to a label if the comparison is true. */
|
||||
extern void emit_cmp_and_jump_insns PARAMS ((rtx, rtx, enum rtx_code, rtx,
|
||||
enum machine_mode, int, rtx));
|
||||
extern void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx,
|
||||
enum machine_mode, int, rtx);
|
||||
|
||||
/* Generate code to indirectly jump to a location given in the rtx LOC. */
|
||||
extern void emit_indirect_jump PARAMS ((rtx));
|
||||
extern void emit_indirect_jump (rtx);
|
||||
|
||||
#ifdef HAVE_conditional_move
|
||||
/* Emit a conditional move operation. */
|
||||
rtx emit_conditional_move PARAMS ((rtx, enum rtx_code, rtx, rtx,
|
||||
enum machine_mode, rtx, rtx,
|
||||
enum machine_mode, int));
|
||||
rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
||||
rtx, rtx, enum machine_mode, int);
|
||||
|
||||
/* Return nonzero if the conditional move is supported. */
|
||||
int can_conditionally_move_p PARAMS ((enum machine_mode mode));
|
||||
int can_conditionally_move_p (enum machine_mode mode);
|
||||
|
||||
#endif
|
||||
rtx emit_conditional_add PARAMS ((rtx, enum rtx_code, rtx, rtx,
|
||||
enum machine_mode, rtx, rtx,
|
||||
enum machine_mode, int));
|
||||
rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
||||
rtx, rtx, enum machine_mode, int);
|
||||
|
||||
|
||||
/* Functions from expmed.c: */
|
||||
|
||||
/* Arguments MODE, RTX: return an rtx for the negation of that value.
|
||||
May emit insns. */
|
||||
extern rtx negate_rtx PARAMS ((enum machine_mode, rtx));
|
||||
extern rtx negate_rtx (enum machine_mode, rtx);
|
||||
|
||||
/* Expand a logical AND operation. */
|
||||
extern rtx expand_and PARAMS ((enum machine_mode, rtx, rtx, rtx));
|
||||
extern rtx expand_and (enum machine_mode, rtx, rtx, rtx);
|
||||
|
||||
/* Emit a store-flag operation. */
|
||||
extern rtx emit_store_flag PARAMS ((rtx, enum rtx_code, rtx, rtx,
|
||||
enum machine_mode, int, int));
|
||||
extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
||||
int, int);
|
||||
|
||||
/* Like emit_store_flag, but always succeeds. */
|
||||
extern rtx emit_store_flag_force PARAMS ((rtx, enum rtx_code, rtx, rtx,
|
||||
enum machine_mode, int, int));
|
||||
extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
|
||||
enum machine_mode, int, int);
|
||||
|
||||
/* Functions from loop.c: */
|
||||
|
||||
/* Given an insn and condition, return a canonical description of
|
||||
the test being made. */
|
||||
extern rtx canonicalize_condition PARAMS ((rtx, rtx, int, rtx *, rtx));
|
||||
extern rtx canonicalize_condition (rtx, rtx, int, rtx *, rtx);
|
||||
|
||||
/* Given a JUMP_INSN, return a canonical description of the test
|
||||
being made. */
|
||||
extern rtx get_condition PARAMS ((rtx, rtx *));
|
||||
extern rtx get_condition (rtx, rtx *);
|
||||
|
||||
/* Generate a conditional trap instruction. */
|
||||
extern rtx gen_cond_trap PARAMS ((enum rtx_code, rtx, rtx, rtx));
|
||||
extern rtx gen_cond_trap (enum rtx_code, rtx, rtx, rtx);
|
||||
|
||||
/* Functions from builtins.c: */
|
||||
extern rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
||||
@ -374,34 +372,33 @@ extern int can_address_p (tree);
|
||||
|
||||
/* This is run once per compilation to set up which modes can be used
|
||||
directly in memory and to initialize the block move optab. */
|
||||
extern void init_expr_once PARAMS ((void));
|
||||
extern void init_expr_once (void);
|
||||
|
||||
/* This is run at the start of compiling a function. */
|
||||
extern void init_expr PARAMS ((void));
|
||||
extern void init_expr (void);
|
||||
|
||||
/* This is run at the end of compiling a function. */
|
||||
extern void finish_expr_for_function PARAMS ((void));
|
||||
extern void finish_expr_for_function (void);
|
||||
|
||||
/* Use protect_from_queue to convert a QUEUED expression
|
||||
into something that you can put immediately into an instruction. */
|
||||
extern rtx protect_from_queue PARAMS ((rtx, int));
|
||||
extern rtx protect_from_queue (rtx, int);
|
||||
|
||||
/* Perform all the pending incrementations. */
|
||||
extern void emit_queue PARAMS ((void));
|
||||
extern void emit_queue (void);
|
||||
|
||||
/* Tell if something has a queued subexpression. */
|
||||
extern int queued_subexp_p PARAMS ((rtx));
|
||||
extern int queued_subexp_p (rtx);
|
||||
|
||||
/* Emit some rtl insns to move data between rtx's, converting machine modes.
|
||||
Both modes must be floating or both fixed. */
|
||||
extern void convert_move PARAMS ((rtx, rtx, int));
|
||||
extern void convert_move (rtx, rtx, int);
|
||||
|
||||
/* Convert an rtx to specified machine mode and return the result. */
|
||||
extern rtx convert_to_mode PARAMS ((enum machine_mode, rtx, int));
|
||||
extern rtx convert_to_mode (enum machine_mode, rtx, int);
|
||||
|
||||
/* Convert an rtx to MODE from OLDMODE and return the result. */
|
||||
extern rtx convert_modes PARAMS ((enum machine_mode, enum machine_mode,
|
||||
rtx, int));
|
||||
extern rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int);
|
||||
|
||||
/* Emit code to move a block Y to a block X. */
|
||||
|
||||
@ -412,108 +409,106 @@ enum block_op_methods
|
||||
BLOCK_OP_CALL_PARM
|
||||
};
|
||||
|
||||
extern void init_block_move_fn PARAMS ((const char *));
|
||||
extern void init_block_clear_fn PARAMS ((const char *));
|
||||
extern void init_block_move_fn (const char *);
|
||||
extern void init_block_clear_fn (const char *);
|
||||
|
||||
extern rtx emit_block_move PARAMS ((rtx, rtx, rtx, enum block_op_methods));
|
||||
extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
|
||||
|
||||
/* Copy all or part of a value X into registers starting at REGNO.
|
||||
The number of registers to be filled is NREGS. */
|
||||
extern void move_block_to_reg PARAMS ((int, rtx, int, enum machine_mode));
|
||||
extern void move_block_to_reg (int, rtx, int, enum machine_mode);
|
||||
|
||||
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
|
||||
The number of registers to be filled is NREGS. */
|
||||
extern void move_block_from_reg PARAMS ((int, rtx, int));
|
||||
extern void move_block_from_reg (int, rtx, int);
|
||||
|
||||
/* Generate a non-consecutive group of registers represented by a PARALLEL. */
|
||||
extern rtx gen_group_rtx PARAMS ((rtx));
|
||||
extern rtx gen_group_rtx (rtx);
|
||||
|
||||
/* Load a BLKmode value into non-consecutive registers represented by a
|
||||
PARALLEL. */
|
||||
extern void emit_group_load PARAMS ((rtx, rtx, int));
|
||||
extern void emit_group_load (rtx, rtx, int);
|
||||
|
||||
/* Move a non-consecutive group of registers represented by a PARALLEL into
|
||||
a non-consecutive group of registers represented by a PARALLEL. */
|
||||
extern void emit_group_move PARAMS ((rtx, rtx));
|
||||
extern void emit_group_move (rtx, rtx);
|
||||
|
||||
/* Store a BLKmode value from non-consecutive registers represented by a
|
||||
PARALLEL. */
|
||||
extern void emit_group_store PARAMS ((rtx, rtx, int));
|
||||
extern void emit_group_store (rtx, rtx, int);
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* Copy BLKmode object from a set of registers. */
|
||||
extern rtx copy_blkmode_from_reg PARAMS ((rtx, rtx, tree));
|
||||
extern rtx copy_blkmode_from_reg (rtx, rtx, tree);
|
||||
#endif
|
||||
|
||||
/* Mark REG as holding a parameter for the next CALL_INSN. */
|
||||
extern void use_reg PARAMS ((rtx *, rtx));
|
||||
extern void use_reg (rtx *, rtx);
|
||||
|
||||
/* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
|
||||
for the next CALL_INSN. */
|
||||
extern void use_regs PARAMS ((rtx *, int, int));
|
||||
extern void use_regs (rtx *, int, int);
|
||||
|
||||
/* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
|
||||
extern void use_group_regs PARAMS ((rtx *, rtx));
|
||||
extern void use_group_regs (rtx *, rtx);
|
||||
|
||||
/* Write zeros through the storage of OBJECT.
|
||||
If OBJECT has BLKmode, SIZE is its length in bytes. */
|
||||
extern rtx clear_storage PARAMS ((rtx, rtx));
|
||||
extern rtx clear_storage (rtx, rtx);
|
||||
|
||||
/* Determine whether the LEN bytes can be moved by using several move
|
||||
instructions. Return nonzero if a call to move_by_pieces should
|
||||
succeed. */
|
||||
extern int can_move_by_pieces PARAMS ((unsigned HOST_WIDE_INT, unsigned int));
|
||||
extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
|
||||
|
||||
/* Return nonzero if it is desirable to store LEN bytes generated by
|
||||
CONSTFUN with several move instructions by store_by_pieces
|
||||
function. CONSTFUNDATA is a pointer which will be passed as argument
|
||||
in every CONSTFUN call.
|
||||
ALIGN is maximum alignment we can assume. */
|
||||
extern int can_store_by_pieces PARAMS ((unsigned HOST_WIDE_INT,
|
||||
rtx (*) (void *, HOST_WIDE_INT,
|
||||
enum machine_mode),
|
||||
void *, unsigned int));
|
||||
extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
|
||||
rtx (*) (void *, HOST_WIDE_INT,
|
||||
enum machine_mode),
|
||||
void *, unsigned int);
|
||||
|
||||
/* Generate several move instructions to store LEN bytes generated by
|
||||
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
|
||||
pointer which will be passed as argument in every CONSTFUN call.
|
||||
ALIGN is maximum alignment we can assume.
|
||||
Returns TO + LEN. */
|
||||
extern rtx store_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
rtx (*) (void *, HOST_WIDE_INT,
|
||||
enum machine_mode),
|
||||
void *, unsigned int, int));
|
||||
extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
|
||||
rtx (*) (void *, HOST_WIDE_INT, enum machine_mode),
|
||||
void *, unsigned int, int);
|
||||
|
||||
/* Emit insns to set X from Y. */
|
||||
extern rtx emit_move_insn PARAMS ((rtx, rtx));
|
||||
extern rtx emit_move_insn (rtx, rtx);
|
||||
|
||||
/* Emit insns to set X from Y, with no frills. */
|
||||
extern rtx emit_move_insn_1 PARAMS ((rtx, rtx));
|
||||
extern rtx emit_move_insn_1 (rtx, rtx);
|
||||
|
||||
/* Push a block of length SIZE (perhaps variable)
|
||||
and return an rtx to address the beginning of the block. */
|
||||
extern rtx push_block PARAMS ((rtx, int, int));
|
||||
extern rtx push_block (rtx, int, int);
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* Generate code to push something onto the stack, given its mode and type. */
|
||||
extern void emit_push_insn PARAMS ((rtx, enum machine_mode, tree, rtx,
|
||||
unsigned int, int, rtx, int, rtx, rtx,
|
||||
int, rtx));
|
||||
extern void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int,
|
||||
int, rtx, int, rtx, rtx, int, rtx);
|
||||
|
||||
/* Expand an assignment that stores the value of FROM into TO. */
|
||||
extern rtx expand_assignment PARAMS ((tree, tree, int, int));
|
||||
extern rtx expand_assignment (tree, tree, int, int);
|
||||
|
||||
/* Generate code for computing expression EXP,
|
||||
and storing the value into TARGET.
|
||||
If SUGGEST_REG is nonzero, copy the value through a register
|
||||
and return that register, if that is possible. */
|
||||
extern rtx store_expr PARAMS ((tree, rtx, int));
|
||||
extern rtx store_expr (tree, rtx, int);
|
||||
#endif
|
||||
|
||||
/* Given an rtx that may include add and multiply operations,
|
||||
generate them as insns and return a pseudo-reg containing the value.
|
||||
Useful after calling expand_expr with 1 as sum_ok. */
|
||||
extern rtx force_operand PARAMS ((rtx, rtx));
|
||||
extern rtx force_operand (rtx, rtx);
|
||||
|
||||
/* Return an object on the placeholder list that matches EXP, a
|
||||
PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
|
||||
@ -522,137 +517,134 @@ extern rtx force_operand PARAMS ((rtx, rtx));
|
||||
a location which initially points to a starting location in the
|
||||
placeholder list (zero means start of the list) and where a pointer into
|
||||
the placeholder list at which the object is found is placed. */
|
||||
extern tree find_placeholder PARAMS ((tree, tree *));
|
||||
extern tree find_placeholder (tree, tree *);
|
||||
|
||||
/* Generate code for computing expression EXP.
|
||||
An rtx for the computed value is returned. The value is never null.
|
||||
In the case of a void EXP, const0_rtx is returned. */
|
||||
extern rtx expand_expr PARAMS ((tree, rtx, enum machine_mode,
|
||||
enum expand_modifier));
|
||||
extern rtx expand_expr (tree, rtx, enum machine_mode, enum expand_modifier);
|
||||
|
||||
/* At the start of a function, record that we have no previously-pushed
|
||||
arguments waiting to be popped. */
|
||||
extern void init_pending_stack_adjust PARAMS ((void));
|
||||
extern void init_pending_stack_adjust (void);
|
||||
|
||||
/* When exiting from function, if safe, clear out any pending stack adjust
|
||||
so the adjustment won't get done. */
|
||||
extern void clear_pending_stack_adjust PARAMS ((void));
|
||||
extern void clear_pending_stack_adjust (void);
|
||||
|
||||
/* Pop any previously-pushed arguments that have not been popped yet. */
|
||||
extern void do_pending_stack_adjust PARAMS ((void));
|
||||
extern void do_pending_stack_adjust (void);
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* Return the tree node and offset if a given argument corresponds to
|
||||
a string constant. */
|
||||
extern tree string_constant PARAMS ((tree, tree *));
|
||||
extern tree string_constant (tree, tree *);
|
||||
|
||||
/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
|
||||
extern void jumpifnot PARAMS ((tree, rtx));
|
||||
extern void jumpifnot (tree, rtx);
|
||||
|
||||
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
|
||||
extern void jumpif PARAMS ((tree, rtx));
|
||||
extern void jumpif (tree, rtx);
|
||||
|
||||
/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
|
||||
the result is zero, or IF_TRUE_LABEL if the result is one. */
|
||||
extern void do_jump PARAMS ((tree, rtx, rtx));
|
||||
extern void do_jump (tree, rtx, rtx);
|
||||
#endif
|
||||
|
||||
/* Generate rtl to compare two rtx's, will call emit_cmp_insn. */
|
||||
extern rtx compare_from_rtx PARAMS ((rtx, rtx, enum rtx_code, int,
|
||||
enum machine_mode, rtx));
|
||||
extern void do_compare_rtx_and_jump PARAMS ((rtx, rtx, enum rtx_code, int,
|
||||
enum machine_mode, rtx,
|
||||
rtx, rtx));
|
||||
extern rtx compare_from_rtx (rtx, rtx, enum rtx_code, int, enum machine_mode,
|
||||
rtx);
|
||||
extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
|
||||
enum machine_mode, rtx, rtx, rtx);
|
||||
|
||||
/* Two different ways of generating switch statements. */
|
||||
extern int try_casesi PARAMS ((tree, tree, tree, tree, rtx, rtx));
|
||||
extern int try_tablejump PARAMS ((tree, tree, tree, tree, rtx, rtx));
|
||||
extern int try_casesi (tree, tree, tree, tree, rtx, rtx);
|
||||
extern int try_tablejump (tree, tree, tree, tree, rtx, rtx);
|
||||
|
||||
/* Smallest number of adjacent cases before we use a jump table.
|
||||
XXX Should be a target hook. */
|
||||
extern unsigned int case_values_threshold PARAMS ((void));
|
||||
extern unsigned int case_values_threshold (void);
|
||||
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* rtl.h and tree.h were included. */
|
||||
/* Return an rtx for the size in bytes of the value of an expr. */
|
||||
extern rtx expr_size PARAMS ((tree));
|
||||
extern rtx expr_size (tree);
|
||||
|
||||
/* Return a wide integer for the size in bytes of the value of EXP, or -1
|
||||
if the size can vary or is larger than an integer. */
|
||||
extern HOST_WIDE_INT int_expr_size PARAMS ((tree));
|
||||
extern HOST_WIDE_INT int_expr_size (tree);
|
||||
|
||||
extern rtx lookup_static_chain PARAMS ((tree));
|
||||
extern rtx lookup_static_chain (tree);
|
||||
|
||||
/* Convert a stack slot address ADDR valid in function FNDECL
|
||||
into an address valid in this function (using a static chain). */
|
||||
extern rtx fix_lexical_addr PARAMS ((rtx, tree));
|
||||
extern rtx fix_lexical_addr (rtx, tree);
|
||||
|
||||
/* Return the address of the trampoline for entering nested fn FUNCTION. */
|
||||
extern rtx trampoline_address PARAMS ((tree));
|
||||
extern rtx trampoline_address (tree);
|
||||
|
||||
/* Return an rtx that refers to the value returned by a function
|
||||
in its original home. This becomes invalid if any more code is emitted. */
|
||||
extern rtx hard_function_value PARAMS ((tree, tree, int));
|
||||
extern rtx hard_function_value (tree, tree, int);
|
||||
|
||||
extern rtx prepare_call_address PARAMS ((rtx, tree, rtx *, int, int));
|
||||
extern rtx prepare_call_address (rtx, tree, rtx *, int, int);
|
||||
|
||||
extern rtx expand_call PARAMS ((tree, rtx, int));
|
||||
extern rtx expand_call (tree, rtx, int);
|
||||
|
||||
extern rtx expand_shift PARAMS ((enum tree_code, enum machine_mode, rtx, tree,
|
||||
rtx, int));
|
||||
extern rtx expand_divmod PARAMS ((int, enum tree_code, enum machine_mode, rtx,
|
||||
rtx, rtx, int));
|
||||
extern void locate_and_pad_parm PARAMS ((enum machine_mode, tree, int, int,
|
||||
tree, struct args_size *,
|
||||
struct locate_and_pad_arg_data *));
|
||||
extern rtx expand_inline_function PARAMS ((tree, tree, rtx, int, tree, rtx));
|
||||
extern rtx expand_shift (enum tree_code, enum machine_mode, rtx, tree, rtx,
|
||||
int);
|
||||
extern rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx,
|
||||
rtx, int);
|
||||
extern void locate_and_pad_parm (enum machine_mode, tree, int, int, tree,
|
||||
struct args_size *,
|
||||
struct locate_and_pad_arg_data *);
|
||||
extern rtx expand_inline_function (tree, tree, rtx, int, tree, rtx);
|
||||
|
||||
/* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
|
||||
extern rtx label_rtx PARAMS ((tree));
|
||||
extern rtx label_rtx (tree);
|
||||
|
||||
/* As label_rtx, but additionally the label is placed on the forced label
|
||||
list of its containing function (i.e. it is treated as reachable even
|
||||
if how is not obvious). */
|
||||
extern rtx force_label_rtx PARAMS ((tree));
|
||||
extern rtx force_label_rtx (tree);
|
||||
#endif
|
||||
|
||||
/* Indicate how an input argument register was promoted. */
|
||||
extern rtx promoted_input_arg PARAMS ((unsigned int, enum machine_mode *,
|
||||
int *));
|
||||
extern rtx promoted_input_arg (unsigned int, enum machine_mode *, int *);
|
||||
|
||||
/* Return an rtx like arg but sans any constant terms.
|
||||
Returns the original rtx if it has no constant terms.
|
||||
The constant terms are added and stored via a second arg. */
|
||||
extern rtx eliminate_constant_term PARAMS ((rtx, rtx *));
|
||||
extern rtx eliminate_constant_term (rtx, rtx *);
|
||||
|
||||
/* Convert arg to a valid memory address for specified machine mode,
|
||||
by emitting insns to perform arithmetic if nec. */
|
||||
extern rtx memory_address PARAMS ((enum machine_mode, rtx));
|
||||
extern rtx memory_address (enum machine_mode, rtx);
|
||||
|
||||
/* Like `memory_address' but pretent `flag_force_addr' is 0. */
|
||||
extern rtx memory_address_noforce PARAMS ((enum machine_mode, rtx));
|
||||
extern rtx memory_address_noforce (enum machine_mode, rtx);
|
||||
|
||||
/* Set the alias set of MEM to SET. */
|
||||
extern void set_mem_alias_set PARAMS ((rtx, HOST_WIDE_INT));
|
||||
extern void set_mem_alias_set (rtx, HOST_WIDE_INT);
|
||||
|
||||
/* Set the alignment of MEM to ALIGN bits. */
|
||||
extern void set_mem_align PARAMS ((rtx, unsigned int));
|
||||
extern void set_mem_align (rtx, unsigned int);
|
||||
|
||||
/* Set the expr for MEM to EXPR. */
|
||||
extern void set_mem_expr PARAMS ((rtx, tree));
|
||||
extern void set_mem_expr (rtx, tree);
|
||||
|
||||
/* Set the offset for MEM to OFFSET. */
|
||||
extern void set_mem_offset PARAMS ((rtx, rtx));
|
||||
extern void set_mem_offset (rtx, rtx);
|
||||
|
||||
/* Set the size for MEM to SIZE. */
|
||||
extern void set_mem_size PARAMS ((rtx, rtx));
|
||||
extern void set_mem_size (rtx, rtx);
|
||||
|
||||
/* Return a memory reference like MEMREF, but with its mode changed
|
||||
to MODE and its address changed to ADDR.
|
||||
(VOIDmode means don't change the mode.
|
||||
NULL for ADDR means don't change the address.) */
|
||||
extern rtx change_address PARAMS ((rtx, enum machine_mode, rtx));
|
||||
extern rtx change_address (rtx, enum machine_mode, rtx);
|
||||
|
||||
/* Return a memory reference like MEMREF, but with its mode changed
|
||||
to MODE and its address offset by OFFSET bytes. */
|
||||
@ -673,120 +665,117 @@ extern rtx change_address PARAMS ((rtx, enum machine_mode, rtx));
|
||||
#define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
|
||||
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
|
||||
|
||||
extern rtx adjust_address_1 PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT,
|
||||
int, int));
|
||||
extern rtx adjust_automodify_address_1 PARAMS ((rtx, enum machine_mode,
|
||||
rtx, HOST_WIDE_INT, int));
|
||||
extern rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int);
|
||||
extern rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx,
|
||||
HOST_WIDE_INT, int);
|
||||
|
||||
/* Return a memory reference like MEMREF, but whose address is changed by
|
||||
adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
|
||||
known to be in OFFSET (possibly 1). */
|
||||
extern rtx offset_address PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT));
|
||||
extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
|
||||
|
||||
/* Return a memory reference like MEMREF, but with its address changed to
|
||||
ADDR. The caller is asserting that the actual piece of memory pointed
|
||||
to is the same, just the form of the address is being changed, such as
|
||||
by putting something into a register. */
|
||||
extern rtx replace_equiv_address PARAMS ((rtx, rtx));
|
||||
extern rtx replace_equiv_address (rtx, rtx);
|
||||
|
||||
/* Likewise, but the reference is not required to be valid. */
|
||||
extern rtx replace_equiv_address_nv PARAMS ((rtx, rtx));
|
||||
extern rtx replace_equiv_address_nv (rtx, rtx);
|
||||
|
||||
/* Return a memory reference like MEMREF, but with its mode widened to
|
||||
MODE and adjusted by OFFSET. */
|
||||
extern rtx widen_memory_access PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT));
|
||||
extern rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT);
|
||||
|
||||
/* Return a memory reference like MEMREF, but which is known to have a
|
||||
valid address. */
|
||||
extern rtx validize_mem PARAMS ((rtx));
|
||||
extern rtx validize_mem (rtx);
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* Given REF, either a MEM or a REG, and T, either the type of X or
|
||||
the expression corresponding to REF, set RTX_UNCHANGING_P if
|
||||
appropriate. */
|
||||
extern void maybe_set_unchanging PARAMS ((rtx, tree));
|
||||
extern void maybe_set_unchanging (rtx, tree);
|
||||
|
||||
/* Given REF, a MEM, and T, either the type of X or the expression
|
||||
corresponding to REF, set the memory attributes. OBJECTP is nonzero
|
||||
if we are making a new object of this type. */
|
||||
extern void set_mem_attributes PARAMS ((rtx, tree, int));
|
||||
extern void set_mem_attributes (rtx, tree, int);
|
||||
|
||||
/* Similar, except that BITPOS has not yet been applied to REF, so if
|
||||
we alter MEM_OFFSET according to T then we should subtract BITPOS
|
||||
expecting that it'll be added back in later. */
|
||||
extern void set_mem_attributes_minus_bitpos PARAMS ((rtx, tree, int,
|
||||
HOST_WIDE_INT));
|
||||
extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT);
|
||||
#endif
|
||||
|
||||
/* Assemble the static constant template for function entry trampolines. */
|
||||
extern rtx assemble_trampoline_template PARAMS ((void));
|
||||
extern rtx assemble_trampoline_template (void);
|
||||
|
||||
/* Given rtx, return new rtx whose address won't be affected by
|
||||
any side effects. It has been copied to a new temporary reg. */
|
||||
extern rtx stabilize PARAMS ((rtx));
|
||||
extern rtx stabilize (rtx);
|
||||
|
||||
/* Given an rtx, copy all regs it refers to into new temps
|
||||
and return a modified copy that refers to the new temps. */
|
||||
extern rtx copy_all_regs PARAMS ((rtx));
|
||||
extern rtx copy_all_regs (rtx);
|
||||
|
||||
/* Copy given rtx to a new temp reg and return that. */
|
||||
extern rtx copy_to_reg PARAMS ((rtx));
|
||||
extern rtx copy_to_reg (rtx);
|
||||
|
||||
/* Like copy_to_reg but always make the reg Pmode. */
|
||||
extern rtx copy_addr_to_reg PARAMS ((rtx));
|
||||
extern rtx copy_addr_to_reg (rtx);
|
||||
|
||||
/* Like copy_to_reg but always make the reg the specified mode MODE. */
|
||||
extern rtx copy_to_mode_reg PARAMS ((enum machine_mode, rtx));
|
||||
extern rtx copy_to_mode_reg (enum machine_mode, rtx);
|
||||
|
||||
/* Copy given rtx to given temp reg and return that. */
|
||||
extern rtx copy_to_suggested_reg PARAMS ((rtx, rtx, enum machine_mode));
|
||||
extern rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode);
|
||||
|
||||
/* Copy a value to a register if it isn't already a register.
|
||||
Args are mode (in case value is a constant) and the value. */
|
||||
extern rtx force_reg PARAMS ((enum machine_mode, rtx));
|
||||
extern rtx force_reg (enum machine_mode, rtx);
|
||||
|
||||
/* Return given rtx, copied into a new temp reg if it was in memory. */
|
||||
extern rtx force_not_mem PARAMS ((rtx));
|
||||
extern rtx force_not_mem (rtx);
|
||||
|
||||
#ifdef TREE_CODE
|
||||
/* Return mode and signedness to use when object is promoted. */
|
||||
extern enum machine_mode promote_mode PARAMS ((tree, enum machine_mode,
|
||||
int *, int));
|
||||
extern enum machine_mode promote_mode (tree, enum machine_mode, int *, int);
|
||||
#endif
|
||||
|
||||
/* Remove some bytes from the stack. An rtx says how many. */
|
||||
extern void adjust_stack PARAMS ((rtx));
|
||||
extern void adjust_stack (rtx);
|
||||
|
||||
/* Add some bytes to the stack. An rtx says how many. */
|
||||
extern void anti_adjust_stack PARAMS ((rtx));
|
||||
extern void anti_adjust_stack (rtx);
|
||||
|
||||
/* This enum is used for the following two functions. */
|
||||
enum save_level {SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL};
|
||||
|
||||
/* Save the stack pointer at the specified level. */
|
||||
extern void emit_stack_save PARAMS ((enum save_level, rtx *, rtx));
|
||||
extern void emit_stack_save (enum save_level, rtx *, rtx);
|
||||
|
||||
/* Restore the stack pointer from a save area of the specified level. */
|
||||
extern void emit_stack_restore PARAMS ((enum save_level, rtx, rtx));
|
||||
extern void emit_stack_restore (enum save_level, rtx, rtx);
|
||||
|
||||
/* Allocate some space on the stack dynamically and return its address. An rtx
|
||||
says how many bytes. */
|
||||
extern rtx allocate_dynamic_stack_space PARAMS ((rtx, rtx, int));
|
||||
extern rtx allocate_dynamic_stack_space (rtx, rtx, int);
|
||||
|
||||
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
||||
FIRST is a constant and size is a Pmode RTX. These are offsets from the
|
||||
current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
|
||||
subtract from the stack. If SIZE is constant, this is done
|
||||
with a fixed number of probes. Otherwise, we must make a loop. */
|
||||
extern void probe_stack_range PARAMS ((HOST_WIDE_INT, rtx));
|
||||
extern void probe_stack_range (HOST_WIDE_INT, rtx);
|
||||
|
||||
/* Return an rtx that refers to the value returned by a library call
|
||||
in its original home. This becomes invalid if any more code is emitted. */
|
||||
extern rtx hard_libcall_value PARAMS ((enum machine_mode));
|
||||
extern rtx hard_libcall_value (enum machine_mode);
|
||||
|
||||
/* Given an rtx, return an rtx for a value rounded up to a multiple
|
||||
of STACK_BOUNDARY / BITS_PER_UNIT. */
|
||||
extern rtx round_push PARAMS ((rtx));
|
||||
extern rtx round_push (rtx);
|
||||
|
||||
/* Return the mode desired by operand N of a particular bitfield
|
||||
insert/extract insn, or MAX_MACHINE_MODE if no such insn is
|
||||
@ -794,41 +783,39 @@ extern rtx round_push PARAMS ((rtx));
|
||||
|
||||
enum extraction_pattern { EP_insv, EP_extv, EP_extzv };
|
||||
extern enum machine_mode
|
||||
mode_for_extraction PARAMS ((enum extraction_pattern, int));
|
||||
mode_for_extraction (enum extraction_pattern, int);
|
||||
|
||||
extern rtx store_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT,
|
||||
enum machine_mode, rtx, HOST_WIDE_INT));
|
||||
extern rtx extract_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, int, rtx,
|
||||
enum machine_mode, enum machine_mode,
|
||||
HOST_WIDE_INT));
|
||||
extern rtx expand_mult PARAMS ((enum machine_mode, rtx, rtx, rtx, int));
|
||||
extern bool const_mult_add_overflow_p PARAMS ((rtx, rtx, rtx, enum machine_mode, int));
|
||||
extern rtx expand_mult_add PARAMS ((rtx, rtx, rtx, rtx,enum machine_mode, int));
|
||||
extern rtx expand_mult_highpart_adjust PARAMS ((enum machine_mode, rtx, rtx, rtx, rtx, int));
|
||||
extern rtx store_bit_field (rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, enum machine_mode, rtx,
|
||||
HOST_WIDE_INT);
|
||||
extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT, int, rtx,
|
||||
enum machine_mode, enum machine_mode,
|
||||
HOST_WIDE_INT);
|
||||
extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
|
||||
extern bool const_mult_add_overflow_p (rtx, rtx, rtx, enum machine_mode, int);
|
||||
extern rtx expand_mult_add (rtx, rtx, rtx, rtx,enum machine_mode, int);
|
||||
extern rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int);
|
||||
|
||||
extern rtx assemble_static_space PARAMS ((unsigned HOST_WIDE_INT));
|
||||
extern int safe_from_p PARAMS ((rtx, tree, int));
|
||||
extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
|
||||
extern int safe_from_p (rtx, tree, int);
|
||||
|
||||
/* Call this once to initialize the contents of the optabs
|
||||
appropriately for the current target machine. */
|
||||
extern void init_optabs PARAMS ((void));
|
||||
extern void init_all_optabs PARAMS ((void));
|
||||
extern void init_optabs (void);
|
||||
extern void init_all_optabs (void);
|
||||
|
||||
/* Call this to initialize an optab function entry. */
|
||||
extern rtx init_one_libfunc PARAMS ((const char *));
|
||||
extern rtx init_one_libfunc (const char *);
|
||||
|
||||
extern void do_jump_by_parts_equality_rtx PARAMS ((rtx, rtx, rtx));
|
||||
extern void do_jump_by_parts_greater_rtx PARAMS ((enum machine_mode,
|
||||
int, rtx, rtx, rtx,
|
||||
rtx));
|
||||
extern void do_jump_by_parts_equality_rtx (rtx, rtx, rtx);
|
||||
extern void do_jump_by_parts_greater_rtx (enum machine_mode, int, rtx, rtx,
|
||||
rtx, rtx);
|
||||
|
||||
#ifdef TREE_CODE /* Don't lose if tree.h not included. */
|
||||
extern void mark_seen_cases PARAMS ((tree, unsigned char *,
|
||||
HOST_WIDE_INT, int));
|
||||
extern void mark_seen_cases (tree, unsigned char *, HOST_WIDE_INT, int);
|
||||
#endif
|
||||
|
||||
extern int vector_mode_valid_p PARAMS ((enum machine_mode));
|
||||
extern int vector_mode_valid_p (enum machine_mode);
|
||||
|
||||
extern tree placeholder_list;
|
||||
|
Loading…
x
Reference in New Issue
Block a user