Add memory reuse to virtual operands in the operand scanner.

2006-12-18  Andrew MacLeod  <amacleod@redhat.com>

	* tree-ssa-operands.h (struct vdef_optype_d): Rename to voptype_d.
	(struct vuse_optype_d): Delete.
	(SSA_OPERAND_MEMORY_SIZE): Delete.
	(struct ssa_operand_memory_d): Change mem array to size one.
	(NUM_VOP_FREE_BUCKETS): Define.
	(free_vuses, free_vdefs): Replace with vop_free_buckets array.
	(vdef_ops, vuse_ops, struct ssa_operand_iterator_d): Use voptype_d type.
	* tree-pretty-print.c (dump_vops): Use voptype_d type.
	* tree-ssa-operands.c (vop_free_bucket_size): New.  Number of operands
	which fit into a chunk of memory from a specific bucket.
	(vop_free_bucket_index): New.  Find correct size memory bucket.
	(init_vop_buckets): New.  Initialize VOP free memory buckets.
	(add_vop_to_freelist): New.  Add a VOP to the correct free list.
	(ssa_operand_mem_size): New.  Current size of an operand memory chunk.
	(init_ssa_operands): Initialize operand memory and free lists.
	(fini_ssa_operands): Remove references to free_vuses and free_vdefs.
	(ssa_operand_alloc): Use graduated size memory allocation.
	(APPEND_OP_AFTER, MOVE_HEAD_AFTER, MOVE_HEAD_TO_FREELIST, 
	INITIALIZE_USE): Remove.
	(alloc_vop): New.  Allocate a virtual operand.
	(alloc_vdef, alloc_vuse): Delete.
	(add_def_op, add_use_op): Directly setup pointers.
	(add_vop): New.  Add a virtual operand.
	(add_vuse_op, add_vdef_op): Call add_vop.
	(realloc_vop): New.  Reallocate a virtual operand.
	(realloc_vdef, realloc_vuse): Call realloc_vop.
	(finalize_ssa_def_ops): Delete.  Move content to finalize_ssa_defs.
	(finalize_ssa_defs): Optimize for common case, remove code based on
	sorted pointers which was a waste of time.
	(finalize_ssa_use_ops): Delete.  Move content to finalize_ssa_uses.
	(finalize_ssa_uses): Update last pointer.
	(finalize_ssa_vdef_ops): Delete.  Move content to finalize_ssa_vdefs.
	(finalize_ssa_vdefs, finalize_ssa_vuse_ops): Use voptype_d and
	directly manipulate pointers.
	(copy_virtual_operands): Use voptype_d, and no need to update pointers.

From-SVN: r120009
This commit is contained in:
Andrew MacLeod 2006-12-18 16:21:08 +00:00 committed by Andrew Macleod
parent 85bf4c61f4
commit 79f99d4276
4 changed files with 358 additions and 330 deletions

View File

@ -1,3 +1,41 @@
2006-12-18 Andrew MacLeod <amacleod@redhat.com>
* tree-ssa-operands.h (struct vdef_optype_d): Rename to voptype_d.
(struct vuse_optype_d): Delete.
(SSA_OPERAND_MEMORY_SIZE): Delete.
(struct ssa_operand_memory_d): Change mem array to size one.
(NUM_VOP_FREE_BUCKETS): Define.
(free_vuses, free_vdefs): Replace with vop_free_buckets array.
(vdef_ops, vuse_ops, struct ssa_operand_iterator_d): Use voptype_d type.
* tree-pretty-print.c (dump_vops): Use voptype_d type.
* tree-ssa-operands.c (vop_free_bucket_size): New. Number of operands
which fit into a chunk of memory from a specific bucket.
(vop_free_bucket_index): New. Find correct size memory bucket.
(init_vop_buckets): New. Initialize VOP free memory buckets.
(add_vop_to_freelist): New. Add a VOP to the correct free list.
(ssa_operand_mem_size): New. Current size of an operand memory chunk.
(init_ssa_operands): Initialize operand memory and free lists.
(fini_ssa_operands): Remove references to free_vuses and free_vdefs.
(ssa_operand_alloc): Use graduated size memory allocation.
(APPEND_OP_AFTER, MOVE_HEAD_AFTER, MOVE_HEAD_TO_FREELIST,
INITIALIZE_USE): Remove.
(alloc_vop): New. Allocate a virtual operand.
(alloc_vdef, alloc_vuse): Delete.
(add_def_op, add_use_op): Directly setup pointers.
(add_vop): New. Add a virtual operand.
(add_vuse_op, add_vdef_op): Call add_vop.
(realloc_vop): New. Reallocate a virtual operand.
(realloc_vdef, realloc_vuse): Call realloc_vop.
(finalize_ssa_def_ops): Delete. Move content to finalize_ssa_defs.
(finalize_ssa_defs): Optimize for common case, remove code based on
sorted pointers which was a waste of time.
(finalize_ssa_use_ops): Delete. Move content to finalize_ssa_uses.
(finalize_ssa_uses): Update last pointer.
(finalize_ssa_vdef_ops): Delete. Move content to finalize_ssa_vdefs.
(finalize_ssa_vdefs, finalize_ssa_vuse_ops): Use voptype_d and
directly manipulate pointers.
(copy_virtual_operands): Use voptype_d, and no need to update pointers.
2006-12-18 Nathan Sidwell <nathan@codesourcery.com>
* config/rs6000/rs6000.md (*movdf_hardfloat32): Use %X format to

View File

@ -2692,8 +2692,8 @@ newline_and_indent (pretty_printer *buffer, int spc)
static void
dump_vops (pretty_printer *buffer, tree stmt, int spc, int flags)
{
struct vdef_optype_d *vdefs;
struct vuse_optype_d *vuses;
struct voptype_d *vdefs;
struct voptype_d *vuses;
int i, n;
if (!ssa_operands_active () || !stmt_references_memory_p (stmt))

View File

@ -265,6 +265,95 @@ ssa_operands_active (void)
}
/* VOPs are of variable sized, so the free list maps "free buckets" to the
following table:
bucket # operands
------ ----------
0 1
1 2
...
15 16
16 17-24
17 25-32
18 31-40
...
29 121-128
Any VOPs larger than this are simply added to the largest bucket when they
are freed. */
/* Return the number of operands used in bucket BUCKET. */
static inline int
vop_free_bucket_size (int bucket)
{
#ifdef ENABLE_CHECKING
gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
#endif
if (bucket < 16)
return bucket + 1;
return (bucket - 13) * 8;
}
/* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
beyond the end of the bucket table, return -1. */
static inline int
vop_free_bucket_index (int num)
{
gcc_assert (num > 0);
/* Sizes 1 through 16 use buckets 0-15. */
if (num <= 16)
return num - 1;
/* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */
if (num < 256)
return 14 + (num - 1) / 8;
return -1;
}
/* Initialize the VOP free buckets. */
static inline void
init_vop_buckets (void)
{
int x;
for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
}
/* Add PTR to the appropriate VOP bucket. */
static inline void
add_vop_to_freelist (voptype_p ptr)
{
int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
/* Too large, use the largest bucket so its not a complete throw away. */
if (bucket == -1)
bucket = NUM_VOP_FREE_BUCKETS - 1;
ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
}
/* These are the sizes of the operand memory buffer which gets allocated each
time more operands space is required. The final value is the amount that is
allocated every time after that. */
#define OP_SIZE_INIT 0
#define OP_SIZE_1 30
#define OP_SIZE_2 110
#define OP_SIZE_3 511
/* Current size of the operand memory buffer. */
static unsigned int ssa_operand_mem_size;
/* Initialize the operand cache routines. */
void
@ -283,9 +372,11 @@ init_ssa_operands (void)
gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
gimple_ssa_operands (cfun)->operand_memory_index = ssa_operand_mem_size;
gimple_ssa_operands (cfun)->ops_active = true;
memset (&clobber_stats, 0, sizeof (clobber_stats));
init_vop_buckets ();
ssa_operand_mem_size = OP_SIZE_INIT;
}
@ -315,8 +406,6 @@ fini_ssa_operands (void)
gimple_ssa_operands (cfun)->free_defs = NULL;
gimple_ssa_operands (cfun)->free_uses = NULL;
gimple_ssa_operands (cfun)->free_vuses = NULL;
gimple_ssa_operands (cfun)->free_vdefs = NULL;
while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
{
@ -362,13 +451,32 @@ ssa_operand_alloc (unsigned size)
{
char *ptr;
gcc_assert (size <= SSA_OPERAND_MEMORY_SIZE);
if (gimple_ssa_operands (cfun)->operand_memory_index + size
>= SSA_OPERAND_MEMORY_SIZE)
>= ssa_operand_mem_size)
{
struct ssa_operand_memory_d *ptr;
ptr = GGC_NEW (struct ssa_operand_memory_d);
if (ssa_operand_mem_size == OP_SIZE_INIT)
ssa_operand_mem_size = OP_SIZE_1 * sizeof (struct voptype_d);
else
if (ssa_operand_mem_size == OP_SIZE_1 * sizeof (struct voptype_d))
ssa_operand_mem_size = OP_SIZE_2 * sizeof (struct voptype_d);
else
ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d);
/* Go right to the maximum size if the request is too large. */
if (size > ssa_operand_mem_size)
ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d);
/* Fail if there is not enough space. If thre are this many operands
required, first make sure there isn't a different probem causing this
many operands. If the decision is that this is OK, then we can
specially allocate a buffer just for this request. */
gcc_assert (size <= ssa_operand_mem_size);
ptr = (struct ssa_operand_memory_d *)
ggc_alloc (sizeof (struct ssa_operand_memory_d)
+ ssa_operand_mem_size - 1);
ptr->next = gimple_ssa_operands (cfun)->operand_memory;
gimple_ssa_operands (cfun)->operand_memory = ptr;
gimple_ssa_operands (cfun)->operand_memory_index = 0;
@ -380,6 +488,8 @@ ssa_operand_alloc (unsigned size)
}
/* Allocate a DEF operand. */
static inline struct def_optype_d *
alloc_def (void)
{
@ -392,11 +502,13 @@ alloc_def (void)
}
else
ret = (struct def_optype_d *)
ssa_operand_alloc (sizeof (struct def_optype_d));
ssa_operand_alloc (sizeof (struct def_optype_d));
return ret;
}
/* Allocate a USE operand. */
static inline struct use_optype_d *
alloc_use (void)
{
@ -408,50 +520,40 @@ alloc_use (void)
= gimple_ssa_operands (cfun)->free_uses->next;
}
else
ret = (struct use_optype_d *)ssa_operand_alloc (sizeof (struct use_optype_d));
ret = (struct use_optype_d *)
ssa_operand_alloc (sizeof (struct use_optype_d));
return ret;
}
/* Allocate a vop with NUM elements. */
static inline struct vdef_optype_d *
alloc_vdef (int num)
static inline struct voptype_d *
alloc_vop (int num)
{
struct vdef_optype_d *ret;
/* Eliminate free list for the moment. */
#if 0
if (free_vdefs)
struct voptype_d *ret = NULL;
int alloc_size = 0;
int bucket = vop_free_bucket_index (num);
if (bucket != -1)
{
ret = free_vdefs;
free_vdefs = free_vdefs->next;
/* If there is a free operand, use it. */
if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
{
ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
}
else
alloc_size = vop_free_bucket_size(bucket);
}
else
#endif
ret = (struct vdef_optype_d *)ssa_operand_alloc (
sizeof (struct vdef_optype_d) + (num - 1) * sizeof (vuse_element_t));
VUSE_VECT_NUM_ELEM (ret->usev) = num;
return ret;
}
alloc_size = num;
if (alloc_size > 0)
ret = (struct voptype_d *)ssa_operand_alloc (
sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
static inline struct vuse_optype_d *
alloc_vuse (int num)
{
struct vuse_optype_d *ret;
/* No free list for the moment. */
#if 0
if (free_vuses)
{
ret = free_vuses;
free_vuses = free_vuses->next;
}
else
#endif
ret = (struct vuse_optype_d *)ssa_operand_alloc (
sizeof (struct vuse_optype_d) + (num - 1) * sizeof (vuse_element_t));
VUSE_VECT_NUM_ELEM (ret->usev) = num;
return ret;
}
@ -472,92 +574,72 @@ set_virtual_use_link (use_operand_p ptr, tree stmt)
link_imm_use (ptr, *(ptr->use));
}
/* Appends ELT after TO, and moves the TO pointer to ELT. */
#define APPEND_OP_AFTER(ELT, TO) \
do \
{ \
(TO)->next = (ELT); \
(TO) = (ELT); \
} while (0)
/* Appends head of list FROM after TO, and move both pointers
to their successors. */
#define MOVE_HEAD_AFTER(FROM, TO) \
do \
{ \
APPEND_OP_AFTER (FROM, TO); \
(FROM) = (FROM)->next; \
} while (0)
/* Moves OP to appropriate freelist. OP is set to its successor. */
#define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
do \
{ \
TYPE##_optype_p next = (OP)->next; \
(OP)->next \
= gimple_ssa_operands (cfun)->free_##TYPE##s; \
gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
(OP) = next; \
} while (0)
/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
of immediate uses. STMT is the current statement. */
#define INITIALIZE_USE(USE_PTR, VAL, STMT) \
do \
{ \
(USE_PTR)->use = (VAL); \
link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
} while (0)
/* Adds OP to the list of defs after LAST, and moves
LAST to the new element. */
/* Adds OP to the list of defs after LAST. */
static inline def_optype_p
add_def_op (tree *op, def_optype_p *last)
add_def_op (tree *op, def_optype_p last)
{
def_optype_p new;
new = alloc_def ();
DEF_OP_PTR (new) = op;
APPEND_OP_AFTER (new, *last);
last->next = new;
new->next = NULL;
return new;
}
/* Adds OP to the list of uses of statement STMT after LAST, and moves
LAST to the new element. */
/* Adds OP to the list of uses of statement STMT after LAST. */
static inline use_optype_p
add_use_op (tree stmt, tree *op, use_optype_p *last)
add_use_op (tree stmt, tree *op, use_optype_p last)
{
use_optype_p new;
new = alloc_use ();
INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
APPEND_OP_AFTER (new, *last);
USE_OP_PTR (new)->use = op;
link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
last->next = new;
new->next = NULL;
return new;
}
/* Return a virtual op pointer with NUM elements which are all initialized to OP
and are linked into the immeidate uses for STMT. The new vop is appended
after PREV. */
static inline voptype_p
add_vop (tree stmt, tree op, int num, voptype_p prev)
{
voptype_p new;
int x;
new = alloc_vop (num);
for (x = 0; x < num; x++)
{
VUSE_OP_PTR (new, x)->prev = NULL;
SET_VUSE_OP (new, x, op);
VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
}
if (prev)
prev->next = new;
new->next = NULL;
return new;
}
/* Adds OP to the list of vuses of statement STMT after LAST, and moves
LAST to the new element. */
static inline vuse_optype_p
add_vuse_op (tree stmt, tree op, int num, vuse_optype_p *last)
static inline voptype_p
add_vuse_op (tree stmt, tree op, int num, voptype_p last)
{
vuse_optype_p new;
int x;
new = alloc_vuse (num);
for (x = 0; x < num; x++)
{
SET_VUSE_OP (new, x, op);
INITIALIZE_USE (VUSE_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
}
APPEND_OP_AFTER (new, *last);
voptype_p new = add_vop (stmt, op, num, last);
VDEF_RESULT (new) = NULL_TREE;
return new;
}
@ -565,95 +647,28 @@ add_vuse_op (tree stmt, tree op, int num, vuse_optype_p *last)
/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
LAST to the new element. */
static inline vdef_optype_p
add_vdef_op (tree stmt, tree op, int num, vdef_optype_p *last)
static inline voptype_p
add_vdef_op (tree stmt, tree op, int num, voptype_p last)
{
int x;
vdef_optype_p new;
new = alloc_vdef (num);
voptype_p new = add_vop (stmt, op, num, last);
VDEF_RESULT (new) = op;
for (x = 0; x < num; x++)
{
SET_VDEF_OP (new, x, op);
INITIALIZE_USE (VDEF_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
}
APPEND_OP_AFTER (new, *last);
return new;
}
/* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
is the head of the operand list it belongs to. */
struct vdef_optype_d *
realloc_vdef (struct vdef_optype_d *ptr, int num_elem)
static inline struct voptype_d *
realloc_vop (struct voptype_d *ptr, int num_elem, struct voptype_d **root)
{
int x, lim;
tree val, stmt;
struct vdef_optype_d *ret, *tmp;
tree stmt, val;
struct voptype_d *ret, *tmp;
if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
return ptr;
val = VDEF_RESULT (ptr);
if (TREE_CODE (val) == SSA_NAME)
val = SSA_NAME_VAR (val);
stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
/* Delink all the existing uses. */
for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
{
use_operand_p use_p = VDEF_OP_PTR (ptr, x);
delink_imm_use (use_p);
}
/* If we want less space, simply use this one, and shrink the size. */
if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
{
VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
return ptr;
}
/* It is growing. Allocate a new one and replace the old one. */
tmp = ptr;
ret = add_vdef_op (stmt, val, num_elem, &ptr);
ret->next = NULL;
ptr = tmp;
lim = VUSE_VECT_NUM_ELEM (ptr->usev);
memset (ptr, 0,
sizeof (struct vdef_optype_d) + sizeof (vuse_element_t) * (lim- 1));
/* Now simply remove the old one. */
if (VDEF_OPS (stmt) == ptr)
{
VDEF_OPS (stmt) = ret;
return ret;
}
else
for (tmp = VDEF_OPS (stmt);
tmp != NULL && tmp->next != ptr;
tmp = tmp->next)
{
tmp->next = ret;
return ret;
}
/* The pointer passed in isn't in STMT's VDEF lists. */
gcc_unreachable ();
}
struct vuse_optype_d *
realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
{
int x, lim;
tree val, stmt;
struct vuse_optype_d *ret, *tmp;
if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
return ptr;
val = VUSE_OP (ptr, 0);
if (TREE_CODE (val) == SSA_NAME)
val = SSA_NAME_VAR (val);
@ -675,23 +690,22 @@ realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
}
/* It is growing. Allocate a new one and replace the old one. */
tmp = ptr;
ret = add_vuse_op (stmt, val, num_elem, &ptr);
ret->next = NULL;
ptr = tmp;
ret = add_vuse_op (stmt, val, num_elem, ptr);
/* Clear PTR and add its memory to the free list. */
lim = VUSE_VECT_NUM_ELEM (ptr->usev);
memset (ptr, 0,
sizeof (struct vuse_optype_d) + sizeof (vuse_element_t) * (lim - 1));
memset (ptr, 0,
sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
add_vop_to_freelist (ptr);
/* Now simply link it in, find the node which points to this one. */
if (VUSE_OPS (stmt) == ptr)
/* Now simply remove the old one. */
if (*root == ptr)
{
VUSE_OPS (stmt) = ret;
*root = ret;
return ret;
}
else
for (tmp = VUSE_OPS (stmt);
for (tmp = *root;
tmp != NULL && tmp->next != ptr;
tmp = tmp->next)
{
@ -699,22 +713,54 @@ realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
return ret;
}
/* The pointer passed in isn't in STMT's VUSE lists. */
/* The pointer passed in isn't in STMT's VDEF lists. */
gcc_unreachable ();
}
/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
struct voptype_d *
realloc_vdef (struct voptype_d *ptr, int num_elem)
{
tree val, stmt;
struct voptype_d *ret;
val = VDEF_RESULT (ptr);
stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
VDEF_RESULT (ret) = val;
return ret;
}
/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
struct voptype_d *
realloc_vuse (struct voptype_d *ptr, int num_elem)
{
tree stmt;
struct voptype_d *ret;
stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
return ret;
}
/* Takes elements from build_defs and turns them into def operands of STMT.
TODO -- Given that def operands list is not necessarily sorted, merging
the operands this way does not make much sense.
-- Make build_defs VEC of tree *. */
TODO -- Make build_defs VEC of tree *. */
static inline void
finalize_ssa_def_ops (tree stmt)
finalize_ssa_defs (tree stmt)
{
unsigned new_i;
struct def_optype_d new_list;
def_optype_p old_ops, last;
tree *old_base;
unsigned int num = VEC_length (tree, build_defs);
/* There should only be a single real definition per assignment. */
gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
new_list.next = NULL;
last = &new_list;
@ -722,35 +768,11 @@ finalize_ssa_def_ops (tree stmt)
old_ops = DEF_OPS (stmt);
new_i = 0;
while (old_ops && new_i < VEC_length (tree, build_defs))
{
tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
old_base = DEF_OP_PTR (old_ops);
if (old_base == new_base)
{
/* if variables are the same, reuse this node. */
MOVE_HEAD_AFTER (old_ops, last);
new_i++;
}
else if (old_base < new_base)
{
/* if old is less than new, old goes to the free list. */
MOVE_HEAD_TO_FREELIST (old_ops, def);
}
else
{
/* This is a new operand. */
add_def_op (new_base, &last);
new_i++;
}
}
/* If there is anything remaining in the build_defs list, simply emit it. */
for ( ; new_i < VEC_length (tree, build_defs); new_i++)
add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
last->next = NULL;
/* Check for the common case of 1 def that hasn't changed. */
if (old_ops && old_ops->next == NULL && num == 1
&& (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
return;
/* If there is anything in the old list, free it. */
if (old_ops)
@ -759,6 +781,10 @@ finalize_ssa_def_ops (tree stmt)
gimple_ssa_operands (cfun)->free_defs = old_ops;
}
/* If there is anything remaining in the build_defs list, simply emit it. */
for ( ; new_i < num; new_i++)
last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
/* Now set the stmt's operands. */
DEF_OPS (stmt) = new_list.next;
@ -769,36 +795,36 @@ finalize_ssa_def_ops (tree stmt)
for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
x++;
gcc_assert (x == VEC_length (tree, build_defs));
gcc_assert (x == num);
}
#endif
}
/* This routine will create stmt operands for STMT from the def build list. */
static void
finalize_ssa_defs (tree stmt)
{
unsigned int num = VEC_length (tree, build_defs);
/* There should only be a single real definition per assignment. */
gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
/* If there is an old list, often the new list is identical, or close, so
find the elements at the beginning that are the same as the vector. */
finalize_ssa_def_ops (stmt);
}
/* Takes elements from build_uses and turns them into use operands of STMT.
TODO -- Make build_uses VEC of tree *. */
static inline void
finalize_ssa_use_ops (tree stmt)
finalize_ssa_uses (tree stmt)
{
unsigned new_i;
struct use_optype_d new_list;
use_optype_p old_ops, ptr, last;
#ifdef ENABLE_CHECKING
{
unsigned x;
unsigned num = VEC_length (tree, build_uses);
/* If the pointer to the operand is the statement itself, something is
wrong. It means that we are pointing to a local variable (the
initial call to update_stmt_operands does not pass a pointer to a
statement). */
for (x = 0; x < num; x++)
gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
}
#endif
new_list.next = NULL;
last = &new_list;
@ -815,9 +841,9 @@ finalize_ssa_use_ops (tree stmt)
/* Now create nodes for all the new nodes. */
for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
last->next = NULL;
last = add_use_op (stmt,
(tree *) VEC_index (tree, build_uses, new_i),
last);
/* Now set the stmt's operands. */
USE_OPS (stmt) = new_list.next;
@ -833,38 +859,17 @@ finalize_ssa_use_ops (tree stmt)
#endif
}
/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
static void
finalize_ssa_uses (tree stmt)
{
#ifdef ENABLE_CHECKING
{
unsigned x;
unsigned num = VEC_length (tree, build_uses);
/* If the pointer to the operand is the statement itself, something is
wrong. It means that we are pointing to a local variable (the
initial call to update_stmt_operands does not pass a pointer to a
statement). */
for (x = 0; x < num; x++)
gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
}
#endif
finalize_ssa_use_ops (stmt);
}
/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
STMT. FIXME, for now VDEF operators should have a single operand
in their RHS. */
static inline void
finalize_ssa_vdef_ops (tree stmt)
finalize_ssa_vdefs (tree stmt)
{
unsigned new_i;
struct vdef_optype_d new_list;
vdef_optype_p old_ops, ptr, last;
struct voptype_d new_list;
voptype_p old_ops, ptr, last;
stmt_ann_t ann = stmt_ann (stmt);
/* Set the symbols referenced by STMT. */
@ -906,37 +911,43 @@ finalize_ssa_vdef_ops (tree stmt)
if (old_uid == new_uid)
{
/* If the symbols are the same, reuse the existing operand. */
MOVE_HEAD_AFTER (old_ops, last);
last->next = old_ops;
last = old_ops;
old_ops = old_ops->next;
last->next = NULL;
set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
new_i++;
}
else if (old_uid < new_uid)
{
/* If old is less than new, old goes to the free list. */
voptype_p next;
delink_imm_use (VDEF_OP_PTR (old_ops, 0));
MOVE_HEAD_TO_FREELIST (old_ops, vdef);
next = old_ops->next;
add_vop_to_freelist (old_ops);
old_ops = next;
}
else
{
/* This is a new operand. */
add_vdef_op (stmt, op, 1, &last);
last = add_vdef_op (stmt, op, 1, last);
new_i++;
}
}
/* If there is anything remaining in BUILD_VDEFS, simply emit it. */
for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, &last);
last->next = NULL;
last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
/* If there is anything in the old list, free it. */
if (old_ops)
{
for (ptr = old_ops; ptr; ptr = ptr->next)
delink_imm_use (VDEF_OP_PTR (ptr, 0));
old_ops->next = gimple_ssa_operands (cfun)->free_vdefs;
gimple_ssa_operands (cfun)->free_vdefs = old_ops;
for (ptr = old_ops; ptr; ptr = last)
{
last = ptr->next;
delink_imm_use (VDEF_OP_PTR (ptr, 0));
add_vop_to_freelist (ptr);
}
}
/* Now set STMT's operands. */
@ -954,14 +965,6 @@ finalize_ssa_vdef_ops (tree stmt)
}
static void
finalize_ssa_vdefs (tree stmt)
{
finalize_ssa_vdef_ops (stmt);
}
/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
STMT. */
@ -970,8 +973,7 @@ finalize_ssa_vuse_ops (tree stmt)
{
unsigned new_i;
int old_i;
struct vuse_optype_d new_list;
vuse_optype_p old_ops, last;
voptype_p old_ops, last;
VEC(tree,heap) *new_ops;
stmt_ann_t ann;
@ -1042,9 +1044,7 @@ finalize_ssa_vuse_ops (tree stmt)
{
for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
gimple_ssa_operands (cfun)->free_vuses = old_ops;
add_vop_to_freelist (old_ops);
VUSE_OPS (stmt) = NULL;
}
@ -1054,15 +1054,12 @@ finalize_ssa_vuse_ops (tree stmt)
tree op;
unsigned i;
new_list.next = NULL;
last = &new_list;
add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), &last);
last->next = NULL;
last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
SET_USE (VUSE_OP_PTR (last, (int) i), op);
VUSE_OPS (stmt) = new_list.next;
VUSE_OPS (stmt) = last;
}
#ifdef ENABLE_CHECKING
@ -2391,10 +2388,10 @@ void
copy_virtual_operands (tree dest, tree src)
{
int i, n;
vuse_optype_p src_vuses, dest_vuses;
vdef_optype_p src_vdefs, dest_vdefs;
struct vuse_optype_d vuse;
struct vdef_optype_d vdef;
voptype_p src_vuses, dest_vuses;
voptype_p src_vdefs, dest_vdefs;
struct voptype_d vuse;
struct voptype_d vdef;
stmt_ann_t dest_ann;
VDEF_OPS (dest) = NULL;
@ -2421,8 +2418,7 @@ copy_virtual_operands (tree dest, tree src)
for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
{
n = VUSE_NUM (src_vuses);
dest_vuses = add_vuse_op (dest, NULL_TREE, n, &dest_vuses);
dest_vuses->next = NULL;
dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
for (i = 0; i < n; i++)
SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
@ -2435,8 +2431,7 @@ copy_virtual_operands (tree dest, tree src)
for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
{
n = VUSE_NUM (src_vdefs);
dest_vdefs = add_vdef_op (dest, NULL_TREE, n, &dest_vdefs);
dest_vdefs->next = NULL;
dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
for (i = 0; i < n; i++)
SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));

View File

@ -98,32 +98,28 @@ typedef struct vuse_vec_d *vuse_vec_p;
#define VUSE_ELEMENT_VAR(V,X) (VUSE_VECT_ELEMENT ((V),(X)).use_var)
/* This represents the VDEFS for a stmt. */
struct vdef_optype_d
/* This represents the virtual ops of a stmt. */
struct voptype_d
{
struct vdef_optype_d *next;
struct voptype_d *next;
tree def_var;
vuse_vec_t usev;
};
typedef struct vdef_optype_d *vdef_optype_p;
typedef struct voptype_d *voptype_p;
/* This represents the VUSEs for a stmt. */
struct vuse_optype_d
{
struct vuse_optype_d *next;
vuse_vec_t usev;
};
typedef struct vuse_optype_d *vuse_optype_p;
#define SSA_OPERAND_MEMORY_SIZE (511 * sizeof (struct vuse_optype_d))
/* This structure represents a variable sized buffer which is allocated by the
operand memory manager. Operands are subalocated out of this block. The
MEM array varies in size. */
struct ssa_operand_memory_d GTY((chain_next("%h.next")))
{
struct ssa_operand_memory_d *next;
char mem[SSA_OPERAND_MEMORY_SIZE];
char mem[1];
};
/* Number of different size free buckets for virtual operands. */
#define NUM_VOP_FREE_BUCKETS 29
/* Per-function operand caches. */
struct ssa_operands GTY(()) {
struct ssa_operand_memory_d *operand_memory;
@ -133,8 +129,7 @@ struct ssa_operands GTY(()) {
struct def_optype_d * GTY ((skip (""))) free_defs;
struct use_optype_d * GTY ((skip (""))) free_uses;
struct vuse_optype_d * GTY ((skip (""))) free_vuses;
struct vdef_optype_d * GTY ((skip (""))) free_vdefs;
struct voptype_d * GTY ((skip (""))) vop_free_buckets[NUM_VOP_FREE_BUCKETS];
VEC(tree,heap) * GTY ((skip (""))) mpt_table;
};
@ -146,8 +141,8 @@ struct stmt_operands_d
struct use_optype_d * use_ops;
/* Virtual operands (VDEF, VUSE). */
struct vdef_optype_d * vdef_ops;
struct vuse_optype_d * vuse_ops;
struct voptype_d * vdef_ops;
struct voptype_d * vuse_ops;
/* Sets of memory symbols loaded and stored. */
bitmap stores;
@ -206,8 +201,8 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define PHI_ARG_INDEX_FROM_USE(USE) phi_arg_index_from_use (USE)
extern struct vdef_optype_d *realloc_vdef (struct vdef_optype_d *, int);
extern struct vuse_optype_d *realloc_vuse (struct vuse_optype_d *, int);
extern struct voptype_d *realloc_vdef (struct voptype_d *, int);
extern struct voptype_d *realloc_vuse (struct voptype_d *, int);
extern void init_ssa_operands (void);
extern void fini_ssa_operands (void);
@ -249,9 +244,9 @@ typedef struct ssa_operand_iterator_d
{
def_optype_p defs;
use_optype_p uses;
vuse_optype_p vuses;
vdef_optype_p vdefs;
vdef_optype_p mayuses;
voptype_p vuses;
voptype_p vdefs;
voptype_p mayuses;
enum ssa_op_iter_type iter_type;
int phi_i;
int num_phi;