re PR rtl-optimization/37948 (IRA generates slower code)

2008-11-07  Vladimir Makarov  <vmakarov@redhat.com>
	    
	PR rtl-optimizations/37948
	* ira-int.h (struct ira_allocno_copy): New member constraint_p.
	(ira_create_copy, ira_add_allocno_copy): New parameter.

	* ira-conflicts.c (process_regs_for_copy): New parameter.  Pass it
	to ira_add_allocno_copy.
	(process_reg_shuffles, add_insn_allocno_copies): Pass a new
	parameter to process_regs_for_copy.
	(propagate_copies): Pass a new parameter to ira_add_allocno_copy.
	Fix typo in passing second allocno to ira_add_allocno_copy.

	* ira-color.c (update_conflict_hard_regno_costs): Use head of
	coalesced allocnos list.
	(assign_hard_reg): Ditto.  Check that assigned allocnos are not in
	the graph.
	(add_ira_allocno_to_bucket): Rename to add_allocno_to_bucket.
	(add_ira_allocno_to_ordered_bucket): Rename to
	add_allocno_to_ordered_bucket.
	(push_ira_allocno_to_stack): Rename to push_allocno_to_stack.  Use
	head of coalesced allocnos list.
	(push_allocnos_to_stack): Remove calculation of ALLOCNO_TEMP.
	Check that it is aready calculated.
	(push_ira_allocno_to_spill): Rename to push_ira_allocno_to_spill.
	(setup_allocno_left_conflicts_num): Use head of coalesced allocnos
	list.
	(coalesce_allocnos): Do extended coalescing too.

	* ira-emit.c (add_range_and_copies_from_move_list): Pass a new
	parameter to ira_add_allocno_copy.

	* ira-build.c (ira_create_copy, ira_add_allocno_copy): Add a new
	parameter.
	(print_copy): Print copy origination too.

	* ira-costs.c (scan_one_insn): Use alloc_pref for load from
	equivalent memory.

From-SVN: r141753
This commit is contained in:
Vladimir Makarov 2008-11-10 23:21:45 +00:00 committed by Vladimir Makarov
parent befc25099b
commit 548a63222e
7 changed files with 185 additions and 137 deletions

View File

@ -1,3 +1,42 @@
2008-11-07 Vladimir Makarov <vmakarov@redhat.com>
PR rtl-optimizations/37948
* ira-int.h (struct ira_allocno_copy): New member constraint_p.
(ira_create_copy, ira_add_allocno_copy): New parameter.
* ira-conflicts.c (process_regs_for_copy): New parameter. Pass it
to ira_add_allocno_copy.
(process_reg_shuffles, add_insn_allocno_copies): Pass a new
parameter to process_regs_for_copy.
(propagate_copies): Pass a new parameter to ira_add_allocno_copy.
Fix typo in passing second allocno to ira_add_allocno_copy.
* ira-color.c (update_conflict_hard_regno_costs): Use head of
coalesced allocnos list.
(assign_hard_reg): Ditto. Check that assigned allocnos are not in
the graph.
(add_ira_allocno_to_bucket): Rename to add_allocno_to_bucket.
(add_ira_allocno_to_ordered_bucket): Rename to
add_allocno_to_ordered_bucket.
(push_ira_allocno_to_stack): Rename to push_allocno_to_stack. Use
head of coalesced allocnos list.
(push_allocnos_to_stack): Remove calculation of ALLOCNO_TEMP.
Check that it is aready calculated.
(push_ira_allocno_to_spill): Rename to push_ira_allocno_to_spill.
(setup_allocno_left_conflicts_num): Use head of coalesced allocnos
list.
(coalesce_allocnos): Do extended coalescing too.
* ira-emit.c (add_range_and_copies_from_move_list): Pass a new
parameter to ira_add_allocno_copy.
* ira-build.c (ira_create_copy, ira_add_allocno_copy): Add a new
parameter.
(print_copy): Print copy origination too.
* ira-costs.c (scan_one_insn): Use alloc_pref for load from
equivalent memory.
2008-11-10 Kaz Kojima <kkojima@gcc.gnu.org>
PR rtl-optimization/37514

View File

@ -961,9 +961,10 @@ find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
}
/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
SECOND, FREQ, and INSN. */
SECOND, FREQ, CONSTRAINT_P, and INSN. */
ira_copy_t
ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq, rtx insn,
ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
bool constraint_p, rtx insn,
ira_loop_tree_node_t loop_tree_node)
{
ira_copy_t cp;
@ -973,6 +974,7 @@ ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq, rtx insn,
cp->first = first;
cp->second = second;
cp->freq = freq;
cp->constraint_p = constraint_p;
cp->insn = insn;
cp->loop_tree_node = loop_tree_node;
VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
@ -1081,7 +1083,8 @@ ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
LOOP_TREE_NODE. */
ira_copy_t
ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
rtx insn, ira_loop_tree_node_t loop_tree_node)
bool constraint_p, rtx insn,
ira_loop_tree_node_t loop_tree_node)
{
ira_copy_t cp;
@ -1090,7 +1093,8 @@ ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
cp->freq += freq;
return cp;
}
cp = ira_create_copy (first, second, freq, insn, loop_tree_node);
cp = ira_create_copy (first, second, freq, constraint_p, insn,
loop_tree_node);
ira_assert (first != NULL && second != NULL);
ira_add_allocno_copy_to_list (cp);
ira_swap_allocno_copy_ends_if_necessary (cp);
@ -1101,9 +1105,11 @@ ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
static void
print_copy (FILE *f, ira_copy_t cp)
{
fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d\n", cp->num,
fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq);
ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
cp->insn != NULL
? "move" : cp->constraint_p ? "constraint" : "shuffle");
}
/* Print info about copy CP into stderr. */

View File

@ -300,7 +300,8 @@ update_conflict_hard_regno_costs (int *costs, bool decr_p)
cover_class = ALLOCNO_COVER_CLASS (allocno);
if (cover_class != ALLOCNO_COVER_CLASS (another_allocno)
|| ALLOCNO_ASSIGNED_P (another_allocno)
|| ALLOCNO_MAY_BE_SPILLED_P (another_allocno))
|| ALLOCNO_MAY_BE_SPILLED_P (ALLOCNO_FIRST_COALESCED_ALLOCNO
(another_allocno)))
continue;
class_size = ira_class_hard_regs_num[cover_class];
ira_allocate_and_copy_costs
@ -469,7 +470,8 @@ assign_hard_reg (ira_allocno_t allocno, bool retry_p)
}
continue;
}
else if (! ALLOCNO_MAY_BE_SPILLED_P (conflict_allocno))
else if (! ALLOCNO_MAY_BE_SPILLED_P (ALLOCNO_FIRST_COALESCED_ALLOCNO
(conflict_allocno)))
{
ira_allocate_and_copy_costs
(&ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (conflict_allocno),
@ -555,6 +557,7 @@ assign_hard_reg (ira_allocno_t allocno, bool retry_p)
for (j = 0, a = ALLOCNO_NEXT_COALESCED_ALLOCNO (allocno);;
a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
{
ira_assert (! ALLOCNO_IN_GRAPH_P (a));
sorted_allocnos[j++] = a;
if (a == allocno)
break;
@ -612,7 +615,7 @@ static int uncolorable_allocnos_num[N_REG_CLASSES];
/* Add ALLOCNO to bucket *BUCKET_PTR. ALLOCNO should be not in a bucket
before the call. */
static void
add_ira_allocno_to_bucket (ira_allocno_t allocno, ira_allocno_t *bucket_ptr)
add_allocno_to_bucket (ira_allocno_t allocno, ira_allocno_t *bucket_ptr)
{
ira_allocno_t first_allocno;
enum reg_class cover_class;
@ -706,8 +709,8 @@ sort_bucket (ira_allocno_t *bucket_ptr)
their priority. ALLOCNO should be not in a bucket before the
call. */
static void
add_ira_allocno_to_ordered_bucket (ira_allocno_t allocno,
ira_allocno_t *bucket_ptr)
add_allocno_to_ordered_bucket (ira_allocno_t allocno,
ira_allocno_t *bucket_ptr)
{
ira_allocno_t before, after;
enum reg_class cover_class;
@ -780,7 +783,7 @@ static splay_tree uncolorable_allocnos_splay_tree[N_REG_CLASSES];
conflicting allocnos from the uncolorable bucket to the colorable
one. */
static void
push_ira_allocno_to_stack (ira_allocno_t allocno)
push_allocno_to_stack (ira_allocno_t allocno)
{
int conflicts_num, conflict_size, size;
ira_allocno_t a, conflict_allocno;
@ -799,62 +802,66 @@ push_ira_allocno_to_stack (ira_allocno_t allocno)
a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
{
FOR_EACH_ALLOCNO_CONFLICT (a, conflict_allocno, aci)
if (bitmap_bit_p (coloring_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
{
ira_assert (cover_class == ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
if (ALLOCNO_IN_GRAPH_P (conflict_allocno)
&& ! ALLOCNO_ASSIGNED_P (conflict_allocno))
{
conflicts_num = ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno);
conflict_size
= (ira_reg_class_nregs
[cover_class][ALLOCNO_MODE (conflict_allocno)]);
ira_assert
(ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) >= size);
if (conflicts_num + conflict_size
<= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
{
ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) -= size;
{
conflict_allocno = ALLOCNO_FIRST_COALESCED_ALLOCNO (conflict_allocno);
if (bitmap_bit_p (coloring_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
{
ira_assert (cover_class
== ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
}
conflicts_num
= ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) - size;
if (uncolorable_allocnos_splay_tree[cover_class] != NULL
&& !ALLOCNO_SPLAY_REMOVED_P (conflict_allocno)
&& USE_SPLAY_P (cover_class))
{
ira_assert
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
if (ALLOCNO_IN_GRAPH_P (conflict_allocno)
&& ! ALLOCNO_ASSIGNED_P (conflict_allocno))
{
conflicts_num = ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno);
conflict_size
= (ira_reg_class_nregs
[cover_class][ALLOCNO_MODE (conflict_allocno)]);
ira_assert
(ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) >= size);
if (conflicts_num + conflict_size
<= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
{
ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) -= size;
continue;
}
conflicts_num
= ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) - size;
if (uncolorable_allocnos_splay_tree[cover_class] != NULL
&& !ALLOCNO_SPLAY_REMOVED_P (conflict_allocno)
&& USE_SPLAY_P (cover_class))
{
ira_assert
(splay_tree_lookup
(uncolorable_allocnos_splay_tree[cover_class],
(splay_tree_key) conflict_allocno) != NULL);
splay_tree_remove
(uncolorable_allocnos_splay_tree[cover_class],
(splay_tree_key) conflict_allocno);
ALLOCNO_SPLAY_REMOVED_P (conflict_allocno) = true;
VEC_safe_push (ira_allocno_t, heap,
removed_splay_allocno_vec,
conflict_allocno);
}
ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) = conflicts_num;
if (conflicts_num + conflict_size
<= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
{
delete_allocno_from_bucket (conflict_allocno,
&uncolorable_allocno_bucket);
add_ira_allocno_to_ordered_bucket (conflict_allocno,
&colorable_allocno_bucket);
}
}
}
splay_tree_remove
(uncolorable_allocnos_splay_tree[cover_class],
(splay_tree_key) conflict_allocno);
ALLOCNO_SPLAY_REMOVED_P (conflict_allocno) = true;
VEC_safe_push (ira_allocno_t, heap,
removed_splay_allocno_vec,
conflict_allocno);
}
ALLOCNO_LEFT_CONFLICTS_NUM (conflict_allocno) = conflicts_num;
if (conflicts_num + conflict_size
<= ALLOCNO_AVAILABLE_REGS_NUM (conflict_allocno))
{
delete_allocno_from_bucket
(conflict_allocno, &uncolorable_allocno_bucket);
add_allocno_to_ordered_bucket
(conflict_allocno, &colorable_allocno_bucket);
}
}
}
}
if (a == allocno)
break;
}
@ -889,7 +896,7 @@ remove_allocno_from_bucket_and_push (ira_allocno_t allocno, bool colorable_p)
> ALLOCNO_AVAILABLE_REGS_NUM (allocno))));
if (! colorable_p)
ALLOCNO_MAY_BE_SPILLED_P (allocno) = true;
push_ira_allocno_to_stack (allocno);
push_allocno_to_stack (allocno);
}
/* Put all allocnos from colorable bucket onto the coloring stack. */
@ -904,14 +911,14 @@ push_only_colorable (void)
/* Puts ALLOCNO chosen for potential spilling onto the coloring
stack. */
static void
push_ira_allocno_to_spill (ira_allocno_t allocno)
push_allocno_to_spill (ira_allocno_t allocno)
{
delete_allocno_from_bucket (allocno, &uncolorable_allocno_bucket);
ALLOCNO_MAY_BE_SPILLED_P (allocno) = true;
if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
fprintf (ira_dump_file, " Pushing p%d(%d) (potential spill)\n",
ALLOCNO_NUM (allocno), ALLOCNO_REGNO (allocno));
push_ira_allocno_to_stack (allocno);
push_allocno_to_stack (allocno);
}
/* Return the frequency of exit edges (if EXIT_P) or entry from/to the
@ -1124,7 +1131,7 @@ push_allocnos_to_stack (void)
cover_class = ALLOCNO_COVER_CLASS (allocno);
if (cover_class == NO_REGS)
{
push_ira_allocno_to_spill (allocno);
push_allocno_to_spill (allocno);
continue;
}
/* Potential spilling. */
@ -1172,22 +1179,7 @@ push_allocnos_to_stack (void)
if (ALLOCNO_IN_GRAPH_P (i_allocno))
{
i++;
if (ALLOCNO_TEMP (i_allocno) == INT_MAX)
{
ira_allocno_t a;
int cost = 0;
for (a = ALLOCNO_NEXT_COALESCED_ALLOCNO (i_allocno);;
a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
{
cost += calculate_allocno_spill_cost (i_allocno);
if (a == i_allocno)
break;
}
/* ??? Remove cost of copies between the coalesced
allocnos. */
ALLOCNO_TEMP (i_allocno) = cost;
}
ira_assert (ALLOCNO_TEMP (i_allocno) != INT_MAX);
i_allocno_cost = ALLOCNO_TEMP (i_allocno);
i_allocno_pri
= (i_allocno_cost
@ -1351,41 +1343,45 @@ setup_allocno_left_conflicts_num (ira_allocno_t allocno)
a = ALLOCNO_NEXT_COALESCED_ALLOCNO (a))
{
FOR_EACH_ALLOCNO_CONFLICT (a, conflict_allocno, aci)
if (bitmap_bit_p (consideration_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
{
ira_assert (cover_class
== ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
if (! ALLOCNO_ASSIGNED_P (conflict_allocno))
conflict_allocnos_size
+= (ira_reg_class_nregs
[cover_class][ALLOCNO_MODE (conflict_allocno)]);
else if ((hard_regno = ALLOCNO_HARD_REGNO (conflict_allocno))
>= 0)
{
int last = (hard_regno
+ hard_regno_nregs
{
conflict_allocno
= ALLOCNO_FIRST_COALESCED_ALLOCNO (conflict_allocno);
if (bitmap_bit_p (consideration_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
{
ira_assert (cover_class
== ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
if (! ALLOCNO_ASSIGNED_P (conflict_allocno))
conflict_allocnos_size
+= (ira_reg_class_nregs
[cover_class][ALLOCNO_MODE (conflict_allocno)]);
else if ((hard_regno = ALLOCNO_HARD_REGNO (conflict_allocno))
>= 0)
{
int last = (hard_regno
+ hard_regno_nregs
[hard_regno][ALLOCNO_MODE (conflict_allocno)]);
while (hard_regno < last)
{
if (! TEST_HARD_REG_BIT (temp_set, hard_regno))
{
conflict_allocnos_size++;
SET_HARD_REG_BIT (temp_set, hard_regno);
}
hard_regno++;
}
}
}
while (hard_regno < last)
{
if (! TEST_HARD_REG_BIT (temp_set, hard_regno))
{
conflict_allocnos_size++;
SET_HARD_REG_BIT (temp_set, hard_regno);
}
hard_regno++;
}
}
}
}
if (a == allocno)
break;
}
@ -1410,9 +1406,9 @@ put_allocno_into_bucket (ira_allocno_t allocno)
if (ALLOCNO_LEFT_CONFLICTS_NUM (allocno)
+ ira_reg_class_nregs[cover_class][ALLOCNO_MODE (allocno)]
<= ALLOCNO_AVAILABLE_REGS_NUM (allocno))
add_ira_allocno_to_bucket (allocno, &colorable_allocno_bucket);
add_allocno_to_bucket (allocno, &colorable_allocno_bucket);
else
add_ira_allocno_to_bucket (allocno, &uncolorable_allocno_bucket);
add_allocno_to_bucket (allocno, &uncolorable_allocno_bucket);
}
/* The function is used to sort allocnos according to their execution
@ -1552,7 +1548,7 @@ coalesce_allocnos (bool reload_p)
if ((reload_p
|| (ALLOCNO_COVER_CLASS (cp->second) == cover_class
&& ALLOCNO_MODE (cp->second) == mode))
&& cp->insn != NULL
&& (cp->insn != NULL || cp->constraint_p)
&& ((! reload_p && ! ALLOCNO_ASSIGNED_P (cp->second))
|| (reload_p
&& ALLOCNO_ASSIGNED_P (cp->second)

View File

@ -329,7 +329,8 @@ go_through_subreg (rtx x, int *offset)
registers. When nothing is changed, the function returns
FALSE. */
static bool
process_regs_for_copy (rtx reg1, rtx reg2, rtx insn, int freq)
process_regs_for_copy (rtx reg1, rtx reg2, bool constraint_p,
rtx insn, int freq)
{
int allocno_preferenced_hard_regno, cost, index, offset1, offset2;
bool only_regs_p;
@ -363,7 +364,8 @@ process_regs_for_copy (rtx reg1, rtx reg2, rtx insn, int freq)
{
cp = ira_add_allocno_copy (ira_curr_regno_allocno_map[REGNO (reg1)],
ira_curr_regno_allocno_map[REGNO (reg2)],
freq, insn, ira_curr_loop_tree_node);
freq, constraint_p, insn,
ira_curr_loop_tree_node);
bitmap_set_bit (ira_curr_loop_tree_node->local_copies, cp->num);
return true;
}
@ -426,7 +428,7 @@ process_reg_shuffles (rtx reg, int op_num, int freq)
|| recog_data.operand_type[i] != OP_OUT)
continue;
process_regs_for_copy (reg, another_reg, NULL_RTX, freq);
process_regs_for_copy (reg, another_reg, false, NULL_RTX, freq);
}
}
@ -451,7 +453,7 @@ add_insn_allocno_copies (rtx insn)
REG_P (SET_SRC (set))
? SET_SRC (set)
: SUBREG_REG (SET_SRC (set))) != NULL_RTX)
process_regs_for_copy (SET_DEST (set), SET_SRC (set), insn, freq);
process_regs_for_copy (SET_DEST (set), SET_SRC (set), false, insn, freq);
else
{
extract_insn (insn);
@ -470,7 +472,8 @@ add_insn_allocno_copies (rtx insn)
for (j = 0, commut_p = false; j < 2; j++, commut_p = true)
if ((dup = get_dup (i, commut_p)) != NULL_RTX
&& REG_SUBREG_P (dup)
&& process_regs_for_copy (operand, dup, NULL_RTX, freq))
&& process_regs_for_copy (operand, dup, true,
NULL_RTX, freq))
bound_p = true;
if (bound_p)
continue;
@ -524,8 +527,8 @@ propagate_copies (void)
parent_a2 = parent->regno_allocno_map[ALLOCNO_REGNO (a2)];
ira_assert (parent_a1 != NULL && parent_a2 != NULL);
if (! CONFLICT_ALLOCNO_P (parent_a1, parent_a2))
ira_add_allocno_copy (parent_a1, parent_a1, cp->freq,
cp->insn, cp->loop_tree_node);
ira_add_allocno_copy (parent_a1, parent_a2, cp->freq,
cp->constraint_p, cp->insn, cp->loop_tree_node);
}
}

View File

@ -989,11 +989,14 @@ scan_one_insn (rtx insn)
&& (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != NULL_RTX
&& MEM_P (XEXP (note, 0)))
{
COSTS_OF_ALLOCNO (allocno_costs,
ALLOCNO_NUM (ira_curr_regno_allocno_map
[REGNO (SET_DEST (set))]))->mem_cost
-= (ira_memory_move_cost[GET_MODE (SET_DEST (set))][GENERAL_REGS][1]
* frequency);
enum reg_class cl = GENERAL_REGS;
rtx reg = SET_DEST (set);
int num = ALLOCNO_NUM (ira_curr_regno_allocno_map[REGNO (reg)]);
if (allocno_pref)
cl = allocno_pref[num];
COSTS_OF_ALLOCNO (allocno_costs, num)->mem_cost
-= ira_memory_move_cost[GET_MODE (reg)][cl][1] * frequency;
record_address_regs (GET_MODE (SET_SRC (set)), XEXP (SET_SRC (set), 0),
0, MEM, SCRATCH, frequency * 2);
}

View File

@ -863,7 +863,7 @@ add_range_and_copies_from_move_list (move_t list, ira_loop_tree_node_t node,
IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (to), hard_regs_live);
update_costs (from, true, freq);
update_costs (to, false, freq);
cp = ira_add_allocno_copy (from, to, freq, move->insn, NULL);
cp = ira_add_allocno_copy (from, to, freq, false, move->insn, NULL);
if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
fprintf (ira_dump_file, " Adding cp%d:a%dr%d-a%dr%d\n",
cp->num, ALLOCNO_NUM (cp->first),

View File

@ -496,6 +496,7 @@ struct ira_allocno_copy
ira_allocno_t first, second;
/* Execution frequency of the copy. */
int freq;
bool constraint_p;
/* It is a move insn which is an origin of the copy. The member
value for the copy representing two operand insn constraints or
for the copy created to remove register shuffle is NULL. In last
@ -859,12 +860,12 @@ extern allocno_live_range_t ira_create_allocno_live_range
extern void ira_finish_allocno_live_range (allocno_live_range_t);
extern void ira_free_allocno_updated_costs (ira_allocno_t);
extern ira_copy_t ira_create_copy (ira_allocno_t, ira_allocno_t,
int, rtx, ira_loop_tree_node_t);
int, bool, rtx, ira_loop_tree_node_t);
extern void ira_add_allocno_copy_to_list (ira_copy_t);
extern void ira_swap_allocno_copy_ends_if_necessary (ira_copy_t);
extern void ira_remove_allocno_copy_from_list (ira_copy_t);
extern ira_copy_t ira_add_allocno_copy (ira_allocno_t, ira_allocno_t, int, rtx,
ira_loop_tree_node_t);
extern ira_copy_t ira_add_allocno_copy (ira_allocno_t, ira_allocno_t, int,
bool, rtx, ira_loop_tree_node_t);
extern int *ira_allocate_cost_vector (enum reg_class);
extern void ira_free_cost_vector (int *, enum reg_class);