#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "machmode.h"
#include "hard-reg-set.h"
#include "rtl.h"
#include "tm_p.h"
#include "flags.h"
#include "regs.h"
#include "function.h"
#include "insn-config.h"
#include "recog.h"
#include "reload.h"
#include "output.h"
#include "toplev.h"
#ifdef CONFIG_DARWIN_H
#define REWRITE_WEIGHT_COMPUTATION
#endif
extern int flag_iasm_blocks;
static int max_allocno;
static int *reg_allocno;
struct allocno
{
int reg;
int size;
int calls_crossed;
int n_refs;
int freq;
int live_length;
HARD_REG_SET hard_reg_conflicts;
HARD_REG_SET hard_reg_preferences;
HARD_REG_SET hard_reg_copy_preferences;
HARD_REG_SET hard_reg_full_preferences;
HARD_REG_SET regs_someone_prefers;
#ifdef STACK_REGS
bool no_stack_reg;
#endif
};
static struct allocno *allocno;
static int *allocno_order;
static int *reg_may_share;
#define INT_BITS HOST_BITS_PER_WIDE_INT
#define INT_TYPE HOST_WIDE_INT
static INT_TYPE *conflicts;
static INT_TYPE *pseudo_preferences;
static int allocno_row_words;
#define CONFLICTP(I, J) \
(conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
& ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
#define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
do { \
int i_; \
int allocno_; \
INT_TYPE *p_ = (ALLOCNO_SET); \
\
for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
i_--, allocno_ += INT_BITS) \
{ \
unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
\
for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
{ \
if (word_ & 1) \
{CODE;} \
} \
} \
} while (0)
#define TEST_PSEUDO_PREF(I, J) \
(pseudo_preferences[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
& ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
#define SET_PSEUDO_PREF(I, J) \
(pseudo_preferences[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
|= ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
#define CLEAR_PSEUDO_PREF(I, J) \
(pseudo_preferences[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
&= ~((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
#if 0
#define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
OUT_ALLOCNO, (CODE))
#endif
static HARD_REG_SET hard_regs_live;
static HARD_REG_SET no_global_alloc_regs;
static HARD_REG_SET regs_used_so_far;
static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
#ifdef REWRITE_WEIGHT_COMPUTATION
static HOST_WIDE_INT local_reg_weight[FIRST_PSEUDO_REGISTER];
#else
static int local_reg_freq[FIRST_PSEUDO_REGISTER];
static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
#endif
#define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
static INT_TYPE *allocnos_live;
#define SET_ALLOCNO_LIVE(I) \
(allocnos_live[(unsigned) (I) / INT_BITS] \
|= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
#define CLEAR_ALLOCNO_LIVE(I) \
(allocnos_live[(unsigned) (I) / INT_BITS] \
&= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
#if 0
#define NUM_NO_CONFLICT_PAIRS 4
int n_no_conflict_pairs;
static struct { int allocno1, allocno2;}
no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
#endif
static rtx *regs_set;
static int n_regs_set;
static HARD_REG_SET eliminable_regset;
static int allocno_compare (const void *, const void *);
static void global_conflicts (void);
static void mirror_conflicts (void);
static void expand_preferences (void);
static void prune_preferences (void);
static void find_reg (int, HARD_REG_SET, int, int, int);
static void record_one_conflict (int);
static void record_conflicts (int *, int);
static void mark_reg_store (rtx, rtx, void *);
static void mark_reg_clobber (rtx, rtx, void *);
static void mark_reg_conflicts (rtx);
static void mark_reg_death (rtx);
static void mark_reg_live_nc (int, enum machine_mode);
static void set_preference (rtx, rtx);
static void dump_conflicts (FILE *);
static void reg_becomes_live (rtx, rtx, void *);
static void reg_dies (int, enum machine_mode, struct insn_chain *);
static void allocate_bb_info (void);
static void free_bb_info (void);
static bool check_earlyclobber (rtx);
static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
static int mark_reg_use_for_earlyclobber (rtx *, void *);
static void calculate_local_reg_bb_info (void);
static void set_up_bb_rts_numbers (void);
static int rpost_cmp (const void *, const void *);
static void calculate_reg_pav (void);
static void modify_reg_pav (void);
static void make_accurate_live_analysis (void);
int
global_alloc (FILE *file)
{
int retval;
#ifdef ELIMINABLE_REGS
static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
int need_fp
= (! flag_omit_frame_pointer
|| (current_function_calls_alloca && EXIT_IGNORE_STACK)
|| FRAME_POINTER_REQUIRED);
size_t i;
rtx x;
make_accurate_live_analysis ();
max_allocno = 0;
CLEAR_HARD_REG_SET (no_global_alloc_regs);
#ifdef ELIMINABLE_REGS
for (i = 0; i < ARRAY_SIZE (eliminables); i++)
{
bool cannot_elim
= (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
|| (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
if (!regs_asm_clobbered[eliminables[i].from])
{
SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
if (cannot_elim)
SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
}
else if (cannot_elim)
error ("%s cannot be used in asm here",
reg_names[eliminables[i].from]);
else
regs_ever_live[eliminables[i].from] = 1;
}
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
{
SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
if (need_fp)
SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
}
else if (need_fp)
{
if (!flag_iasm_blocks)
error ("%s cannot be used in asm here",
reg_names[HARD_FRAME_POINTER_REGNUM]);
}
else
regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
#endif
#else
if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
{
SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
if (need_fp)
SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
}
else if (need_fp)
error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
else
regs_ever_live[FRAME_POINTER_REGNUM] = 1;
#endif
CLEAR_HARD_REG_SET (regs_used_so_far);
#ifdef LEAF_REGISTERS
{
const char *cheap_regs;
const char *const leaf_regs = LEAF_REGISTERS;
if (only_leaf_regs_used () && leaf_function_p ())
cheap_regs = leaf_regs;
else
cheap_regs = call_used_regs;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i] || cheap_regs[i])
SET_HARD_REG_BIT (regs_used_so_far, i);
}
#else
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i] || call_used_regs[i])
SET_HARD_REG_BIT (regs_used_so_far, i);
#endif
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (reg_renumber[i] >= 0)
SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
reg_allocno = xmalloc (max_regno * sizeof (int));
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
reg_allocno[i] = -1;
reg_may_share = xcalloc (max_regno, sizeof (int));
for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
{
int r1 = REGNO (XEXP (x, 0));
int r2 = REGNO (XEXP (XEXP (x, 1), 0));
if (r1 > r2)
reg_may_share[r1] = r2;
else
reg_may_share[r2] = r1;
}
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
&& (! current_function_has_nonlocal_label
|| REG_N_CALLS_CROSSED (i) == 0))
{
if (reg_renumber[i] < 0
&& reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
reg_allocno[i] = reg_allocno[reg_may_share[i]];
else
reg_allocno[i] = max_allocno++;
gcc_assert (REG_LIVE_LENGTH (i));
}
else
reg_allocno[i] = -1;
allocno = xcalloc (max_allocno, sizeof (struct allocno));
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (reg_allocno[i] >= 0)
{
int num = reg_allocno[i];
allocno[num].reg = i;
allocno[num].size = PSEUDO_REGNO_SIZE (i);
allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
allocno[num].n_refs += REG_N_REFS (i);
allocno[num].freq += REG_FREQ (i);
if (allocno[num].live_length < REG_LIVE_LENGTH (i))
allocno[num].live_length = REG_LIVE_LENGTH (i);
}
#ifndef REWRITE_WEIGHT_COMPUTATION
memset (local_reg_live_length, 0, sizeof local_reg_live_length);
#endif
memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
#ifdef REWRITE_WEIGHT_COMPUTATION
memset (local_reg_weight, 0, sizeof local_reg_weight);
#else
memset (local_reg_freq, 0, sizeof local_reg_freq);
#endif
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (reg_renumber[i] >= 0)
{
int regno = reg_renumber[i];
int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
int j;
for (j = regno; j < endregno; j++)
{
local_reg_n_refs[j] += REG_N_REFS (i);
#ifdef REWRITE_WEIGHT_COMPUTATION
if ( REG_LIVE_LENGTH (i) > 0 )
local_reg_weight[j] += (REG_FREQ (i) * 100)
/ REG_LIVE_LENGTH (i);
#else
local_reg_freq[j] += REG_FREQ (i);
local_reg_live_length[j] += REG_LIVE_LENGTH (i);
#endif
}
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i])
#ifdef REWRITE_WEIGHT_COMPUTATION
local_reg_n_refs[i] = 0;
#else
local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
#endif
allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
pseudo_preferences =
xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
if (max_allocno > 0)
{
global_conflicts ();
mirror_conflicts ();
{
int i, j;
for (i = max_allocno - 1; i >= 0; i--)
{
EXECUTE_IF_SET_IN_ALLOCNO_SET (pseudo_preferences
+ i * allocno_row_words,
j,
{
if (REG_N_SETS (allocno[i].reg) == 1
&& REG_N_SETS (allocno[j].reg) == 1)
{
conflicts[(i) * allocno_row_words
+ (unsigned) (j) / INT_BITS]
&= ~((INT_TYPE) 1 << ((unsigned) (j) % INT_BITS));
conflicts[(j) * allocno_row_words
+ (unsigned) (i) / INT_BITS]
&= ~((INT_TYPE) 1 << ((unsigned) (i) % INT_BITS));
}
});
}
}
for (i = 0; i < (size_t) max_allocno; i++)
{
AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
eliminable_regset);
AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
eliminable_regset);
AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
eliminable_regset);
}
expand_preferences ();
allocno_order = xmalloc (max_allocno * sizeof (int));
for (i = 0; i < (size_t) max_allocno; i++)
allocno_order[i] = i;
for (i = 0; i < (size_t) max_allocno; i++)
{
if (allocno[i].size == 0)
allocno[i].size = 1;
if (allocno[i].live_length == 0)
allocno[i].live_length = -1;
}
qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
prune_preferences ();
if (file)
dump_conflicts (file);
for (i = 0; i < (size_t) max_allocno; i++)
if (reg_renumber[allocno[allocno_order[i]].reg] < 0
&& REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
{
if (N_REG_CLASSES > 1)
{
find_reg (allocno_order[i], 0, 0, 0, 0);
if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
continue;
}
if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
find_reg (allocno_order[i], 0, 1, 0, 0);
}
free (allocno_order);
}
#if 0
if (n_basic_blocks > 0)
#endif
{
build_insn_chain (get_insns ());
retval = reload (get_insns (), 1);
}
free (reg_allocno);
free (reg_may_share);
free (allocno);
free (conflicts);
free (pseudo_preferences);
free (allocnos_live);
return retval;
}
static int
allocno_compare (const void *v1p, const void *v2p)
{
int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
int pri1
= (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
/ allocno[v1].live_length)
* (10000 / REG_FREQ_MAX) * allocno[v1].size);
int pri2
= (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
/ allocno[v2].live_length)
* (10000 / REG_FREQ_MAX) * allocno[v2].size);
if (pri2 - pri1)
return pri2 - pri1;
return v1 - v2;
}
static void
global_conflicts (void)
{
unsigned i;
basic_block b;
rtx insn;
int *block_start_allocnos;
regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
block_start_allocnos = xmalloc (max_allocno * sizeof (int));
FOR_EACH_BB (b)
{
memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
{
regset old = b->global_live_at_start;
int ax = 0;
reg_set_iterator rsi;
REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
{
int a = reg_allocno[i];
if (a >= 0)
{
SET_ALLOCNO_LIVE (a);
block_start_allocnos[ax++] = a;
}
else if ((a = reg_renumber[i]) >= 0)
mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
}
record_conflicts (block_start_allocnos, ax);
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, b->preds)
if (e->flags & EDGE_ABNORMAL)
break;
if (e != NULL)
{
#ifdef STACK_REGS
EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
{
allocno[ax].no_stack_reg = 1;
});
for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
record_one_conflict (ax);
#endif
if (! current_function_has_nonlocal_label)
for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
if (call_used_regs [ax])
record_one_conflict (ax);
}
}
}
insn = BB_HEAD (b);
while (1)
{
RTX_CODE code = GET_CODE (insn);
rtx link;
n_regs_set = 0;
if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
{
#if 0
int i = 0;
for (link = REG_NOTES (insn);
link && i < NUM_NO_CONFLICT_PAIRS;
link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
{
no_conflict_pairs[i].allocno1
= reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
no_conflict_pairs[i].allocno2
= reg_allocno[REGNO (XEXP (link, 0))];
i++;
}
#endif
note_stores (PATTERN (insn), mark_reg_clobber, NULL);
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD)
mark_reg_death (XEXP (link, 0));
note_stores (PATTERN (insn), mark_reg_store, NULL);
#ifdef AUTO_INC_DEC
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
#endif
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD)
{
int used_in_output = 0;
int i;
rtx reg = XEXP (link, 0);
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
{
rtx set = XVECEXP (PATTERN (insn), 0, i);
if (GET_CODE (set) == SET
&& !REG_P (SET_DEST (set))
&& !rtx_equal_p (reg, SET_DEST (set))
&& reg_overlap_mentioned_p (reg, SET_DEST (set)))
used_in_output = 1;
}
if (used_in_output)
mark_reg_conflicts (reg);
}
while (n_regs_set-- > 0)
{
rtx note = find_regno_note (insn, REG_UNUSED,
REGNO (regs_set[n_regs_set]));
if (note)
mark_reg_death (XEXP (note, 0));
}
}
if (insn == BB_END (b))
break;
insn = NEXT_INSN (insn);
}
}
free (block_start_allocnos);
free (regs_set);
}
static void
expand_preferences (void)
{
rtx insn;
rtx link;
rtx set;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (set = single_set (insn)) != 0
&& REG_P (SET_DEST (set))
&& reg_allocno[REGNO (SET_DEST (set))] >= 0)
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD
&& REG_P (XEXP (link, 0))
&& reg_allocno[REGNO (XEXP (link, 0))] >= 0
&& ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
reg_allocno[REGNO (XEXP (link, 0))]))
{
int j;
int a1 = reg_allocno[REGNO (SET_DEST (set))];
int a2 = reg_allocno[REGNO (XEXP (link, 0))];
if (XEXP (link, 0) == SET_SRC (set))
{
IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
allocno[a2].hard_reg_copy_preferences);
IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
allocno[a1].hard_reg_copy_preferences);
}
if (!flag_schedule_insns
&& REG_NOTES (insn) == link && XEXP (link, 1) == 0
&& VECTOR_MODE_P (GET_MODE (SET_DEST (set)))
&& VECTOR_MODE_P (GET_MODE (XEXP (link, 0))))
{
SET_PSEUDO_PREF (a1, a2);
SET_PSEUDO_PREF (a2, a1);
}
if (XEXP (link, 0) == SET_SRC (set)
|| (!flag_schedule_insns
&& REG_NOTES (insn) == link && XEXP (link, 1) == 0
&& VECTOR_MODE_P (GET_MODE (SET_DEST (set)))
&& VECTOR_MODE_P (GET_MODE (XEXP (link, 0)))))
{
for (j = allocno_row_words - 1; j >= 0; j--)
{
pseudo_preferences[a1 * allocno_row_words + j]
|= pseudo_preferences[a2 * allocno_row_words + j];
pseudo_preferences[a2 * allocno_row_words + j]
|= pseudo_preferences[a1 * allocno_row_words + j];
}
}
IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
allocno[a2].hard_reg_preferences);
IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
allocno[a1].hard_reg_preferences);
IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
allocno[a2].hard_reg_full_preferences);
IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
allocno[a1].hard_reg_full_preferences);
}
}
static void
prune_preferences (void)
{
int i;
int num;
int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
for (i = max_allocno - 1; i >= 0; i--)
{
HARD_REG_SET temp;
num = allocno_order[i];
allocno_to_order[num] = i;
COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
if (allocno[num].calls_crossed == 0)
IOR_HARD_REG_SET (temp, fixed_reg_set);
else
IOR_HARD_REG_SET (temp, call_used_reg_set);
IOR_COMPL_HARD_REG_SET
(temp,
reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
}
for (i = max_allocno - 1; i >= 0; i--)
{
HARD_REG_SET temp, temp2;
int allocno2;
num = allocno_order[i];
CLEAR_HARD_REG_SET (temp);
CLEAR_HARD_REG_SET (temp2);
EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
allocno2,
{
if (allocno_to_order[allocno2] > i)
{
if (allocno[allocno2].size <= allocno[num].size)
IOR_HARD_REG_SET (temp,
allocno[allocno2].hard_reg_full_preferences);
else
IOR_HARD_REG_SET (temp2,
allocno[allocno2].hard_reg_full_preferences);
}
});
AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
IOR_HARD_REG_SET (temp, temp2);
COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
}
free (allocno_to_order);
}
static void
find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
{
int i, best_reg, pass;
HARD_REG_SET used, used1, used2;
enum reg_class class = (alt_regs_p
? reg_alternate_class (allocno[num].reg)
: reg_preferred_class (allocno[num].reg));
enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
if (accept_call_clobbered)
COPY_HARD_REG_SET (used1, call_fixed_reg_set);
else if (allocno[num].calls_crossed == 0)
COPY_HARD_REG_SET (used1, fixed_reg_set);
else
COPY_HARD_REG_SET (used1, call_used_reg_set);
IOR_HARD_REG_SET (used1, no_global_alloc_regs);
if (losers)
IOR_HARD_REG_SET (used1, losers);
IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
COPY_HARD_REG_SET (used2, used1);
IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
#ifdef CANNOT_CHANGE_MODE_CLASS
cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
#endif
COPY_HARD_REG_SET (used, used1);
IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
best_reg = -1;
for (i = FIRST_PSEUDO_REGISTER, pass = 0;
pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
pass++)
{
if (pass == 1)
COPY_HARD_REG_SET (used, used1);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
#ifdef REG_ALLOC_ORDER
int regno = reg_alloc_order[i];
#else
int regno = i;
#endif
if (! TEST_HARD_REG_BIT (used, regno)
&& HARD_REGNO_MODE_OK (regno, mode)
&& (allocno[num].calls_crossed == 0
|| accept_call_clobbered
|| ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
{
int j;
int lim = regno + hard_regno_nregs[regno][mode];
for (j = regno + 1;
(j < lim
&& ! TEST_HARD_REG_BIT (used, j));
j++);
if (j == lim)
{
best_reg = regno;
break;
}
#ifndef REG_ALLOC_ORDER
i = j;
#endif
}
}
}
AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
reg_class_contents[(int) NO_REGS], no_copy_prefs);
if (best_reg >= 0)
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
&& HARD_REGNO_MODE_OK (i, mode)
&& (allocno[num].calls_crossed == 0
|| accept_call_clobbered
|| ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
&& (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
|| reg_class_subset_p (REGNO_REG_CLASS (i),
REGNO_REG_CLASS (best_reg))
|| reg_class_subset_p (REGNO_REG_CLASS (best_reg),
REGNO_REG_CLASS (i))))
{
int j;
int lim = i + hard_regno_nregs[i][mode];
for (j = i + 1;
(j < lim
&& ! TEST_HARD_REG_BIT (used, j)
&& (REGNO_REG_CLASS (j)
== REGNO_REG_CLASS (best_reg + (j - i))
|| reg_class_subset_p (REGNO_REG_CLASS (j),
REGNO_REG_CLASS (best_reg + (j - i)))
|| reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
REGNO_REG_CLASS (j))));
j++);
if (j == lim)
{
best_reg = i;
goto no_prefs;
}
}
}
no_copy_prefs:
AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
reg_class_contents[(int) NO_REGS], no_prefs);
if (best_reg >= 0)
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
&& HARD_REGNO_MODE_OK (i, mode)
&& (allocno[num].calls_crossed == 0
|| accept_call_clobbered
|| ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
&& (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
|| reg_class_subset_p (REGNO_REG_CLASS (i),
REGNO_REG_CLASS (best_reg))
|| reg_class_subset_p (REGNO_REG_CLASS (best_reg),
REGNO_REG_CLASS (i))))
{
int j;
int lim = i + hard_regno_nregs[i][mode];
for (j = i + 1;
(j < lim
&& ! TEST_HARD_REG_BIT (used, j)
&& (REGNO_REG_CLASS (j)
== REGNO_REG_CLASS (best_reg + (j - i))
|| reg_class_subset_p (REGNO_REG_CLASS (j),
REGNO_REG_CLASS (best_reg + (j - i)))
|| reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
REGNO_REG_CLASS (j))));
j++);
if (j == lim)
{
best_reg = i;
break;
}
}
}
no_prefs:
if (flag_caller_saves && best_reg < 0)
{
if (! accept_call_clobbered
&& allocno[num].calls_crossed != 0
&& CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
allocno[num].calls_crossed))
{
HARD_REG_SET new_losers;
if (! losers)
CLEAR_HARD_REG_SET (new_losers);
else
COPY_HARD_REG_SET (new_losers, losers);
IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
find_reg (num, new_losers, alt_regs_p, 1, retrying);
if (reg_renumber[allocno[num].reg] >= 0)
{
caller_save_needed = 1;
return;
}
}
}
if (best_reg < 0 && !retrying
&& allocno[num].size == 1)
{
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{
#ifdef REG_ALLOC_ORDER
int regno = reg_alloc_order[i];
#else
int regno = i;
#endif
if (local_reg_n_refs[regno] != 0
&& ! TEST_HARD_REG_BIT (used2, regno)
&& HARD_REGNO_MODE_OK (regno, mode)
&& hard_regno_nregs[regno][mode] == 1
&& (allocno[num].calls_crossed == 0
|| accept_call_clobbered
|| ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
#ifdef CANNOT_CHANGE_MODE_CLASS
&& ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
mode)
#endif
#ifdef STACK_REGS
&& (!allocno[num].no_stack_reg
|| regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
#endif
)
{
#ifdef REWRITE_WEIGHT_COMPUTATION
HOST_WIDE_INT tmp = (allocno[num].freq * 100)
/ allocno[num].live_length;
#else
double tmp1 = ((double) local_reg_freq[regno]
/ local_reg_live_length[regno]);
double tmp2 = ((double) allocno[num].freq
/ allocno[num].live_length);
#endif
#ifdef REWRITE_WEIGHT_COMPUTATION
if (local_reg_weight[regno] < tmp)
#else
if (tmp1 < tmp2)
#endif
{
int k;
for (k = 0; k < max_regno; k++)
if (reg_renumber[k] >= 0)
{
int r = reg_renumber[k];
int endregno
= r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
if (regno >= r && regno < endregno)
reg_renumber[k] = -1;
}
best_reg = regno;
break;
}
}
}
}
if (best_reg >= 0)
{
int lim, j;
HARD_REG_SET this_reg;
reg_renumber[allocno[num].reg] = best_reg;
if (reg_may_share[allocno[num].reg])
for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
if (reg_allocno[j] == num)
reg_renumber[j] = best_reg;
CLEAR_HARD_REG_SET (this_reg);
lim = best_reg + hard_regno_nregs[best_reg][mode];
for (j = best_reg; j < lim; j++)
{
SET_HARD_REG_BIT (this_reg, j);
SET_HARD_REG_BIT (regs_used_so_far, j);
local_reg_n_refs[j] = 0;
#ifndef REWRITE_WEIGHT_COMPUTATION
local_reg_freq[j] = 0;
#endif
}
lim = num;
EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
{
IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
});
{
int k, l;
EXECUTE_IF_SET_IN_ALLOCNO_SET (
pseudo_preferences + num * allocno_row_words, k,
{
if (!CONFLICTP (num, k) && reg_renumber[allocno[k].reg] < 0)
SET_REGBIT (hard_reg_copy_preferences, k, best_reg);
if (num != k && !CONFLICTP (num, k))
EXECUTE_IF_SET_IN_ALLOCNO_SET (
conflicts + k * allocno_row_words, l,
{
if (k != l && reg_renumber[allocno[l].reg] < 0)
SET_REGBIT (regs_someone_prefers, l, best_reg);
});
});
EXECUTE_IF_SET_IN_ALLOCNO_SET (
conflicts + num * allocno_row_words, k,
{
if (num != k)
EXECUTE_IF_SET_IN_ALLOCNO_SET (
pseudo_preferences + k * allocno_row_words, l,
{
if (k != l && !CONFLICTP (k, l)
&& reg_renumber[allocno[l].reg] < 0)
SET_REGBIT (regs_someone_prefers, l, best_reg);
});
});
}
}
}
void
retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
{
int alloc_no = reg_allocno[regno];
if (alloc_no >= 0)
{
if (N_REG_CLASSES > 1)
find_reg (alloc_no, forbidden_regs, 0, 0, 1);
if (reg_renumber[regno] < 0
&& reg_alternate_class (regno) != NO_REGS)
find_reg (alloc_no, forbidden_regs, 1, 0, 1);
if (reg_renumber[regno] >= 0)
{
REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
mark_home_live (regno);
}
}
}
static void
record_one_conflict (int regno)
{
int j;
if (regno < FIRST_PSEUDO_REGISTER)
EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
{
SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
});
else
{
int ialloc = reg_allocno[regno];
int ialloc_prod = ialloc * allocno_row_words;
IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
for (j = allocno_row_words - 1; j >= 0; j--)
conflicts[ialloc_prod + j] |= allocnos_live[j];
}
}
static void
record_conflicts (int *allocno_vec, int len)
{
while (--len >= 0)
IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
hard_regs_live);
}
static void
mirror_conflicts (void)
{
int i, j;
int rw = allocno_row_words;
int rwb = rw * INT_BITS;
INT_TYPE *p = conflicts, *pp = pseudo_preferences;
INT_TYPE *q0 = conflicts, *q1, *q2;
INT_TYPE *qq0 = pseudo_preferences, *qq1, *qq2;
unsigned INT_TYPE mask;
for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
{
if (! mask)
{
mask = 1;
q0++;
qq0++;
}
for (j = allocno_row_words - 1, q1 = q0, qq1 = qq0;
j >= 0;
j--, q1 += rwb, qq1 += rwb)
{
unsigned INT_TYPE word;
for (word = (unsigned INT_TYPE) *p++, q2 = q1;
word;
word >>= 1, q2 += rw)
{
if (word & 1)
*q2 |= mask;
}
for (word = (unsigned INT_TYPE) *pp++, qq2 = qq1;
word;
word >>= 1, qq2 += rw)
{
if (word & 1)
*qq2 |= mask;
}
}
}
}
static void
mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
{
int regno;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
if (!REG_P (reg))
return;
regs_set[n_regs_set++] = reg;
if (setter && GET_CODE (setter) != CLOBBER)
set_preference (reg, SET_SRC (setter));
regno = REGNO (reg);
if (regno >= FIRST_PSEUDO_REGISTER)
{
if (reg_allocno[regno] >= 0)
{
SET_ALLOCNO_LIVE (reg_allocno[regno]);
record_one_conflict (regno);
}
}
if (reg_renumber[regno] >= 0)
regno = reg_renumber[regno];
if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
{
int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
while (regno < last)
{
record_one_conflict (regno);
SET_HARD_REG_BIT (hard_regs_live, regno);
regno++;
}
}
}
static void
mark_reg_clobber (rtx reg, rtx setter, void *data)
{
if (GET_CODE (setter) == CLOBBER)
mark_reg_store (reg, setter, data);
}
static void
mark_reg_conflicts (rtx reg)
{
int regno;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
if (!REG_P (reg))
return;
regno = REGNO (reg);
if (regno >= FIRST_PSEUDO_REGISTER)
{
if (reg_allocno[regno] >= 0)
record_one_conflict (regno);
}
if (reg_renumber[regno] >= 0)
regno = reg_renumber[regno];
if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
{
int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
while (regno < last)
{
record_one_conflict (regno);
regno++;
}
}
}
static void
mark_reg_death (rtx reg)
{
int regno = REGNO (reg);
if (regno >= FIRST_PSEUDO_REGISTER)
{
if (reg_allocno[regno] >= 0)
CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
}
if (reg_renumber[regno] >= 0)
regno = reg_renumber[regno];
if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
{
int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
while (regno < last)
{
CLEAR_HARD_REG_BIT (hard_regs_live, regno);
regno++;
}
}
}
static void
mark_reg_live_nc (int regno, enum machine_mode mode)
{
int last = regno + hard_regno_nregs[regno][mode];
while (regno < last)
{
SET_HARD_REG_BIT (hard_regs_live, regno);
regno++;
}
}
static void
set_preference (rtx dest, rtx src)
{
unsigned int src_regno, dest_regno;
rtx src_reg = src, dest_reg = dest;
int offset = 0;
unsigned int i;
int copy = 1;
if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e'
&& ! (GET_CODE (src) == SUBREG && VECTOR_MODE_P (GET_MODE (dest))))
src = XEXP (src, 0), copy = 0;
if (REG_P (src))
src_regno = REGNO (src);
else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
{
src_reg = SUBREG_REG (src);
src_regno = REGNO (SUBREG_REG (src));
if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
GET_MODE (SUBREG_REG (src)),
SUBREG_BYTE (src),
GET_MODE (src));
else
offset += (SUBREG_BYTE (src)
/ REGMODE_NATURAL_SIZE (GET_MODE (src)));
}
else
return;
if (REG_P (dest))
dest_regno = REGNO (dest);
else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
{
dest_reg = SUBREG_REG (dest);
dest_regno = REGNO (SUBREG_REG (dest));
if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
GET_MODE (SUBREG_REG (dest)),
SUBREG_BYTE (dest),
GET_MODE (dest));
else
offset -= (SUBREG_BYTE (dest)
/ REGMODE_NATURAL_SIZE (GET_MODE (dest)));
}
else
return;
if (reg_renumber[src_regno] >= 0)
src_regno = reg_renumber[src_regno];
if (reg_renumber[dest_regno] >= 0)
dest_regno = reg_renumber[dest_regno];
if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
&& reg_allocno[src_regno] >= 0)
{
dest_regno -= offset;
if (dest_regno < FIRST_PSEUDO_REGISTER)
{
if (copy)
SET_REGBIT (hard_reg_copy_preferences,
reg_allocno[src_regno], dest_regno);
SET_REGBIT (hard_reg_preferences,
reg_allocno[src_regno], dest_regno);
for (i = dest_regno;
i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
i++)
SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
}
}
if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
&& reg_allocno[dest_regno] >= 0)
{
src_regno += offset;
if (src_regno < FIRST_PSEUDO_REGISTER)
{
if (copy)
SET_REGBIT (hard_reg_copy_preferences,
reg_allocno[dest_regno], src_regno);
SET_REGBIT (hard_reg_preferences,
reg_allocno[dest_regno], src_regno);
for (i = src_regno;
i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
i++)
SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
}
}
if (src_regno >= FIRST_PSEUDO_REGISTER && reg_allocno[src_regno] >= 0
&& VECTOR_MODE_P (GET_MODE (src_reg))
&& dest_regno >= FIRST_PSEUDO_REGISTER && reg_allocno[dest_regno] >= 0
&& VECTOR_MODE_P (GET_MODE (dest_reg))
&& copy)
{
src_regno += offset;
SET_PSEUDO_PREF (reg_allocno[dest_regno], reg_allocno[src_regno]);
}
}
rtx find_tied_stack_pseudo (int);
rtx
find_tied_stack_pseudo (int r)
{
int i = reg_allocno[r];
int j;
if (i == -1)
return 0;
EXECUTE_IF_SET_IN_ALLOCNO_SET(pseudo_preferences + i * allocno_row_words, j,
{
if (!CONFLICTP(i, j)
&& reg_renumber[allocno[j].reg] < 0
&& reg_equiv_memory_loc[allocno[j].reg] != 0
&& PSEUDO_REGNO_BYTES (allocno[j].reg)
== PSEUDO_REGNO_BYTES (allocno[i].reg)
&& strict_memory_address_p (VOIDmode, reg_equiv_memory_loc[allocno[j].reg]))
return copy_rtx (reg_equiv_memory_loc[allocno[j].reg]);
});
return 0;
}
void remove_invalidated_death_notes (rtx);
void remove_invalidated_death_notes (rtx first)
{
rtx insn;
int regno, orig_regno, i, j;
rtx r, *pnote, *next_pnote;
for (insn = first; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
for (pnote = ®_NOTES (insn); *pnote; pnote = next_pnote)
{
next_pnote = &XEXP (*pnote, 1);
if (REG_NOTE_KIND (*pnote) == REG_DEAD)
{
r = XEXP (*pnote, 0);
if (!REG_P (r))
continue;
regno = REGNO (r);
orig_regno = ORIGINAL_REGNO (r);
if (orig_regno < FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
continue;
if (regno == orig_regno)
continue;
i = reg_allocno[orig_regno];
EXECUTE_IF_SET_IN_ALLOCNO_SET(pseudo_preferences + i * allocno_row_words, j,
{
if (i != j
&& !CONFLICTP(i, j)
&& reg_renumber[allocno[j].reg] == regno)
{
*pnote = *next_pnote;
next_pnote = pnote;
goto next_note;
}
});
}
next_note:;
}
}
}
void
mark_elimination (int from, int to)
{
basic_block bb;
FOR_EACH_BB (bb)
{
regset r = bb->global_live_at_start;
if (REGNO_REG_SET_P (r, from))
{
CLEAR_REGNO_REG_SET (r, from);
SET_REGNO_REG_SET (r, to);
}
}
}
static regset live_relevant_regs;
static void
reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
{
int regno;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
if (!REG_P (reg))
return;
regno = REGNO (reg);
if (regno < FIRST_PSEUDO_REGISTER)
{
int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
while (nregs-- > 0)
{
SET_REGNO_REG_SET (live_relevant_regs, regno);
if (! fixed_regs[regno])
SET_REGNO_REG_SET ((regset) regs_set, regno);
regno++;
}
}
else if (reg_renumber[regno] >= 0)
{
SET_REGNO_REG_SET (live_relevant_regs, regno);
SET_REGNO_REG_SET ((regset) regs_set, regno);
}
}
static void
reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
{
if (regno < FIRST_PSEUDO_REGISTER)
{
int nregs = hard_regno_nregs[regno][mode];
while (nregs-- > 0)
{
CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
if (! fixed_regs[regno])
SET_REGNO_REG_SET (&chain->dead_or_set, regno);
regno++;
}
}
else
{
CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
if (reg_renumber[regno] >= 0)
SET_REGNO_REG_SET (&chain->dead_or_set, regno);
}
}
void
build_insn_chain (rtx first)
{
struct insn_chain **p = &reload_insn_chain;
struct insn_chain *prev = 0;
basic_block b = ENTRY_BLOCK_PTR->next_bb;
live_relevant_regs = ALLOC_REG_SET (®_obstack);
for (; first; first = NEXT_INSN (first))
{
struct insn_chain *c;
if (first == BB_HEAD (b))
{
unsigned i;
bitmap_iterator bi;
CLEAR_REG_SET (live_relevant_regs);
EXECUTE_IF_SET_IN_BITMAP (b->global_live_at_start, 0, i, bi)
{
if (i < FIRST_PSEUDO_REGISTER
? ! TEST_HARD_REG_BIT (eliminable_regset, i)
: reg_renumber[i] >= 0)
SET_REGNO_REG_SET (live_relevant_regs, i);
}
}
if (!NOTE_P (first) && !BARRIER_P (first))
{
c = new_insn_chain ();
c->prev = prev;
prev = c;
*p = c;
p = &c->next;
c->insn = first;
c->block = b->index;
if (INSN_P (first))
{
rtx link;
for (link = REG_NOTES (first); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD
&& REG_P (XEXP (link, 0)))
reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
c);
COPY_REG_SET (&c->live_throughout, live_relevant_regs);
note_stores (PATTERN (first), reg_becomes_live,
&c->dead_or_set);
}
else
COPY_REG_SET (&c->live_throughout, live_relevant_regs);
if (INSN_P (first))
{
rtx link;
for (link = REG_NOTES (first); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_UNUSED
&& REG_P (XEXP (link, 0)))
reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
c);
}
}
if (first == BB_END (b))
b = b->next_bb;
if (b == EXIT_BLOCK_PTR)
{
#ifdef ENABLE_CHECKING
for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
gcc_assert (!INSN_P (first)
|| GET_CODE (PATTERN (first)) == USE
|| ((GET_CODE (PATTERN (first)) == ADDR_VEC
|| GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
&& prev_real_insn (first) != 0
&& JUMP_P (prev_real_insn (first))));
#endif
break;
}
}
FREE_REG_SET (live_relevant_regs);
*p = 0;
}
static void
dump_conflicts (FILE *file)
{
int i;
int has_preferences, has_copy_preferences, has_pseudo_ties;
int nregs;
nregs = 0;
for (i = 0; i < max_allocno; i++)
{
if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
continue;
nregs++;
}
fprintf (file, ";; %d regs to allocate:", nregs);
for (i = 0; i < max_allocno; i++)
{
int j;
if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
continue;
fprintf (file, " %d", allocno[allocno_order[i]].reg);
for (j = 0; j < max_regno; j++)
if (reg_allocno[j] == allocno_order[i]
&& j != allocno[allocno_order[i]].reg)
fprintf (file, "+%d", j);
if (allocno[allocno_order[i]].size != 1)
fprintf (file, " (%d)", allocno[allocno_order[i]].size);
}
fprintf (file, "\n");
for (i = 0; i < max_allocno; i++)
{
int j;
fprintf (file, ";; %d conflicts:", allocno[i].reg);
for (j = 0; j < max_allocno; j++)
if (CONFLICTP (j, i))
fprintf (file, " %d", allocno[j].reg);
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
fprintf (file, " %d", j);
fprintf (file, "\n");
has_pseudo_ties = 0;
for (j = 0; j < max_allocno; j++)
if (TEST_PSEUDO_PREF (j, i))
{
if (!has_pseudo_ties)
{
has_pseudo_ties = 1;
fprintf (file, ";; %d pseudo ties:", allocno[i].reg);
}
fprintf (file, " %d", allocno[j].reg);
}
if (has_pseudo_ties)
fprintf (file, "\n");
has_preferences = 0;
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
has_preferences = 1;
if (! has_preferences)
continue;
fprintf (file, ";; %d preferences:", allocno[i].reg);
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
fprintf (file, " %d", j);
fprintf (file, "\n");
has_copy_preferences = 0;
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
if (TEST_HARD_REG_BIT (allocno[i].hard_reg_copy_preferences, j))
has_copy_preferences = 1;
if (! has_copy_preferences)
continue;
fprintf (file, ";; %d copy preferences:", allocno[i].reg);
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
if (TEST_HARD_REG_BIT (allocno[i].hard_reg_copy_preferences, j))
fprintf (file, " %d", j);
fprintf (file, "\n");
}
fprintf (file, "\n");
fprintf (file, "Allocation order:");
for (i = 0; i < max_allocno; i++)
fprintf(file, " %d", allocno[allocno_order[i]].reg);
fprintf (file, "\n");
}
void
dump_global_regs (FILE *file)
{
int i, j;
fprintf (file, ";; Register dispositions:\n");
for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
if (reg_renumber[i] >= 0)
{
fprintf (file, "%d in %d ", i, reg_renumber[i]);
if (++j % 6 == 0)
fprintf (file, "\n");
}
fprintf (file, "\n\n;; Hard regs used: ");
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i])
fprintf (file, " %d", i);
fprintf (file, "\n\n");
}
struct bb_info
{
int rts_number;
bitmap earlyclobber;
bitmap killed, avloc;
bitmap live_pavin, live_pavout;
};
#define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
#define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
static void
allocate_bb_info (void)
{
int i;
basic_block bb;
struct bb_info *bb_info;
bitmap init;
alloc_aux_for_blocks (sizeof (struct bb_info));
init = BITMAP_ALLOC (NULL);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
bitmap_set_bit (init, i);
FOR_EACH_BB (bb)
{
bb_info = bb->aux;
bb_info->earlyclobber = BITMAP_ALLOC (NULL);
bb_info->avloc = BITMAP_ALLOC (NULL);
bb_info->killed = BITMAP_ALLOC (NULL);
bb_info->live_pavin = BITMAP_ALLOC (NULL);
bb_info->live_pavout = BITMAP_ALLOC (NULL);
bitmap_copy (bb_info->live_pavin, init);
bitmap_copy (bb_info->live_pavout, init);
}
BITMAP_FREE (init);
}
static void
free_bb_info (void)
{
basic_block bb;
struct bb_info *bb_info;
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);
BITMAP_FREE (bb_info->live_pavout);
BITMAP_FREE (bb_info->live_pavin);
BITMAP_FREE (bb_info->killed);
BITMAP_FREE (bb_info->avloc);
BITMAP_FREE (bb_info->earlyclobber);
}
free_aux_for_blocks ();
}
static void
mark_reg_change (rtx reg, rtx setter, void *data)
{
int regno;
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
if (!REG_P (reg))
return;
regno = REGNO (reg);
bitmap_set_bit (bb_info->killed, regno);
if (GET_CODE (setter) != CLOBBER)
bitmap_set_bit (bb_info->avloc, regno);
else
bitmap_clear_bit (bb_info->avloc, regno);
}
static varray_type earlyclobber_regclass;
static bool
check_earlyclobber (rtx insn)
{
int opno;
bool found = false;
extract_insn (insn);
VARRAY_POP_ALL (earlyclobber_regclass);
for (opno = 0; opno < recog_data.n_operands; opno++)
{
char c;
bool amp_p;
int i;
enum reg_class class;
const char *p = recog_data.constraints[opno];
class = NO_REGS;
amp_p = false;
for (;;)
{
c = *p;
switch (c)
{
case '=': case '+': case '?':
case '#': case '!':
case '*': case '%':
case 'm': case '<': case '>': case 'V': case 'o':
case 'E': case 'F': case 'G': case 'H':
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P':
case 'X':
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
break;
case '&':
amp_p = true;
break;
case '\0':
case ',':
if (amp_p && class != NO_REGS)
{
found = true;
for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
i >= 0; i--)
if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
break;
if (i < 0)
VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
}
amp_p = false;
class = NO_REGS;
break;
case 'r':
class = GENERAL_REGS;
break;
default:
class = REG_CLASS_FROM_CONSTRAINT (c, p);
break;
}
if (c == '\0')
break;
p += CONSTRAINT_LEN (c, p);
}
}
return found;
}
static int
mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
{
enum reg_class pref_class, alt_class;
int i, regno;
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
{
regno = REGNO (*x);
if (bitmap_bit_p (bb_info->killed, regno)
|| bitmap_bit_p (bb_info->avloc, regno))
return 0;
pref_class = reg_preferred_class (regno);
alt_class = reg_alternate_class (regno);
for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
if (reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass, i),
pref_class)
|| (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
&& reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass,
i),
alt_class)))
{
bitmap_set_bit (bb_info->earlyclobber, regno);
break;
}
}
return 0;
}
static void
mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
{
for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
}
static void
calculate_local_reg_bb_info (void)
{
basic_block bb;
rtx insn, bound;
VARRAY_INT_INIT (earlyclobber_regclass, 20,
"classes of registers early clobbered in an insn");
FOR_EACH_BB (bb)
{
bound = NEXT_INSN (BB_END (bb));
for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
note_stores (PATTERN (insn), mark_reg_change, bb);
if (check_earlyclobber (insn))
note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
}
}
}
static void
set_up_bb_rts_numbers (void)
{
int i;
int *rts_order;
rts_order = xmalloc (sizeof (int) * n_basic_blocks);
flow_reverse_top_sort_order_compute (rts_order);
for (i = 0; i < n_basic_blocks; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
free (rts_order);
}
static int
rpost_cmp (const void *bb1, const void *bb2)
{
basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
}
static bitmap temp_bitmap;
static void
calculate_reg_pav (void)
{
basic_block bb, succ;
edge e;
int i, nel;
varray_type bbs, new_bbs, temp;
basic_block *bb_array;
sbitmap wset;
VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
temp_bitmap = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb)
{
VARRAY_PUSH_BB (bbs, bb);
}
wset = sbitmap_alloc (n_basic_blocks + 1);
while (VARRAY_ACTIVE_SIZE (bbs))
{
bb_array = &VARRAY_BB (bbs, 0);
nel = VARRAY_ACTIVE_SIZE (bbs);
qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
sbitmap_zero (wset);
for (i = 0; i < nel; i++)
{
edge_iterator ei;
struct bb_info *bb_info;
bitmap bb_live_pavin, bb_live_pavout;
bb = bb_array [i];
bb_info = BB_INFO (bb);
bb_live_pavin = bb_info->live_pavin;
bb_live_pavout = bb_info->live_pavout;
FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pred = e->src;
if (pred->index != ENTRY_BLOCK)
bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
}
bitmap_and_into (bb_live_pavin, bb->global_live_at_start);
bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
bb_live_pavin, bb_info->killed);
bitmap_and_into (temp_bitmap, bb->global_live_at_end);
if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
{
bitmap_copy (bb_live_pavout, temp_bitmap);
FOR_EACH_EDGE (e, ei, bb->succs)
{
succ = e->dest;
if (succ->index != EXIT_BLOCK
&& !TEST_BIT (wset, succ->index))
{
SET_BIT (wset, succ->index);
VARRAY_PUSH_BB (new_bbs, succ);
}
}
}
}
temp = bbs;
bbs = new_bbs;
new_bbs = temp;
VARRAY_POP_ALL (new_bbs);
}
sbitmap_free (wset);
BITMAP_FREE (temp_bitmap);
}
static void
modify_reg_pav (void)
{
basic_block bb;
struct bb_info *bb_info;
#ifdef STACK_REGS
int i;
HARD_REG_SET zero, stack_hard_regs, used;
bitmap stack_regs;
CLEAR_HARD_REG_SET (zero);
CLEAR_HARD_REG_SET (stack_hard_regs);
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
SET_HARD_REG_BIT(stack_hard_regs, i);
stack_regs = BITMAP_ALLOC (NULL);
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
{
COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
AND_HARD_REG_SET (used, stack_hard_regs);
GO_IF_HARD_REG_EQUAL(used, zero, skip);
bitmap_set_bit (stack_regs, i);
skip:
;
}
#endif
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);
bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
#ifdef STACK_REGS
bitmap_ior_into (bb_info->live_pavin, stack_regs);
#endif
}
#ifdef STACK_REGS
BITMAP_FREE (stack_regs);
#endif
}
static void
make_accurate_live_analysis (void)
{
basic_block bb;
struct bb_info *bb_info;
max_regno = max_reg_num ();
compact_blocks ();
allocate_bb_info ();
calculate_local_reg_bb_info ();
set_up_bb_rts_numbers ();
calculate_reg_pav ();
modify_reg_pav ();
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);
bitmap_and_into (bb->global_live_at_start, bb_info->live_pavin);
bitmap_and_into (bb->global_live_at_end, bb_info->live_pavout);
}
free_bb_info ();
}