#include "config.h"
#include "system.h"
#include "toplev.h"
#include "rtl.h"
#include "tm_p.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "function.h"
#include "regs.h"
#include "flags.h"
#include "output.h"
#include "resource.h"
#include "except.h"
#include "insn-attr.h"
#include "params.h"
struct target_info
{
int uid;
struct target_info *next;
HARD_REG_SET live_regs;
int block;
int bb_tick;
};
#define TARGET_HASH_PRIME 257
static struct resources start_of_epilogue_needs;
static struct resources end_of_function_needs;
static struct target_info **target_hash_table = NULL;
static int *bb_ticks;
static HARD_REG_SET current_live_regs;
static HARD_REG_SET pending_dead_regs;
static void update_live_status PARAMS ((rtx, rtx, void *));
static int find_basic_block PARAMS ((rtx, int));
static rtx next_insn_no_annul PARAMS ((rtx));
static rtx find_dead_or_set_registers PARAMS ((rtx, struct resources*,
rtx*, int, struct resources,
struct resources));
static void
update_live_status (dest, x, data)
rtx dest;
rtx x;
void *data ATTRIBUTE_UNUSED;
{
int first_regno, last_regno;
int i;
if (GET_CODE (dest) != REG
&& (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
return;
if (GET_CODE (dest) == SUBREG)
first_regno = subreg_regno (dest);
else
first_regno = REGNO (dest);
last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
if (GET_CODE (x) == CLOBBER)
for (i = first_regno; i < last_regno; i++)
CLEAR_HARD_REG_BIT (current_live_regs, i);
else
for (i = first_regno; i < last_regno; i++)
{
SET_HARD_REG_BIT (current_live_regs, i);
CLEAR_HARD_REG_BIT (pending_dead_regs, i);
}
}
static int
find_basic_block (insn, search_limit)
rtx insn;
int search_limit;
{
int i;
for (insn = prev_nonnote_insn (insn);
insn && GET_CODE (insn) != BARRIER && search_limit != 0;
insn = prev_nonnote_insn (insn), --search_limit)
;
if (search_limit == 0)
return -1;
else if (insn == 0)
return 0;
for (insn = next_nonnote_insn (insn);
insn && GET_CODE (insn) == CODE_LABEL;
insn = next_nonnote_insn (insn))
{
for (i = 0; i < n_basic_blocks; i++)
if (insn == BLOCK_HEAD (i))
return i;
}
return -1;
}
static rtx
next_insn_no_annul (insn)
rtx insn;
{
if (insn)
{
if (INSN_ANNULLED_BRANCH_P (insn)
&& NEXT_INSN (PREV_INSN (insn)) != insn)
while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
insn = NEXT_INSN (insn);
insn = NEXT_INSN (insn);
if (insn && GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
return insn;
}
void
mark_referenced_resources (x, res, include_delayed_effects)
rtx x;
struct resources *res;
int include_delayed_effects;
{
enum rtx_code code = GET_CODE (x);
int i, j;
unsigned int r;
const char *format_ptr;
switch (code)
{
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case PC:
case SYMBOL_REF:
case LABEL_REF:
return;
case SUBREG:
if (GET_CODE (SUBREG_REG (x)) != REG)
mark_referenced_resources (SUBREG_REG (x), res, 0);
else
{
unsigned int regno = subreg_regno (x);
unsigned int last_regno
= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
if (last_regno > FIRST_PSEUDO_REGISTER)
abort ();
for (r = regno; r < last_regno; r++)
SET_HARD_REG_BIT (res->regs, r);
}
return;
case REG:
{
unsigned int regno = REGNO (x);
unsigned int last_regno
= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
if (last_regno > FIRST_PSEUDO_REGISTER)
abort ();
for (r = regno; r < last_regno; r++)
SET_HARD_REG_BIT (res->regs, r);
}
return;
case MEM:
if (RTX_UNCHANGING_P (x))
res->unch_memory = 1;
else
res->memory = 1;
res->volatil |= MEM_VOLATILE_P (x);
mark_referenced_resources (XEXP (x, 0), res, 0);
return;
case CC0:
res->cc = 1;
return;
case UNSPEC_VOLATILE:
case ASM_INPUT:
res->volatil = 1;
return;
case TRAP_IF:
res->volatil = 1;
break;
case ASM_OPERANDS:
res->volatil |= MEM_VOLATILE_P (x);
for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
return;
case CALL:
mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
mark_referenced_resources (XEXP (x, 1), res, 0);
return;
case SET:
mark_referenced_resources (SET_SRC (x), res, 0);
x = SET_DEST (x);
if (GET_CODE (x) == SIGN_EXTRACT
|| GET_CODE (x) == ZERO_EXTRACT
|| GET_CODE (x) == STRICT_LOW_PART)
mark_referenced_resources (x, res, 0);
else if (GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
if (GET_CODE (x) == MEM)
mark_referenced_resources (XEXP (x, 0), res, 0);
return;
case CLOBBER:
return;
case CALL_INSN:
if (include_delayed_effects)
{
rtx insn = PREV_INSN (x);
rtx sequence = 0;
int seq_size = 0;
int i;
if (NEXT_INSN (insn) != x)
{
sequence = PATTERN (NEXT_INSN (insn));
seq_size = XVECLEN (sequence, 0);
if (GET_CODE (sequence) != SEQUENCE)
abort ();
}
res->memory = 1;
SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
if (frame_pointer_needed)
{
SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
#endif
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
SET_HARD_REG_BIT (res->regs, i);
if (find_reg_note (x, REG_SETJMP, NULL))
SET_HARD_REG_SET (res->regs);
{
rtx link;
for (link = CALL_INSN_FUNCTION_USAGE (x);
link;
link = XEXP (link, 1))
if (GET_CODE (XEXP (link, 0)) == USE)
{
for (i = 1; i < seq_size; i++)
{
rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
if (GET_CODE (slot_pat) == SET
&& rtx_equal_p (SET_DEST (slot_pat),
XEXP (XEXP (link, 0), 0)))
break;
}
if (i >= seq_size)
mark_referenced_resources (XEXP (XEXP (link, 0), 0),
res, 0);
}
}
}
case INSN:
case JUMP_INSN:
#ifdef INSN_REFERENCES_ARE_DELAYED
if (! include_delayed_effects
&& INSN_REFERENCES_ARE_DELAYED (x))
return;
#endif
mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
return;
default:
break;
}
format_ptr = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++)
switch (*format_ptr++)
{
case 'e':
mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
break;
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
mark_referenced_resources (XVECEXP (x, i, j), res,
include_delayed_effects);
break;
}
}
static rtx
find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
rtx target;
struct resources *res;
rtx *jump_target;
int jump_count;
struct resources set, needed;
{
HARD_REG_SET scratch;
rtx insn, next;
rtx jump_insn = 0;
int i;
for (insn = target; insn; insn = next)
{
rtx this_jump_insn = insn;
next = NEXT_INSN (insn);
if (can_throw_internal (insn))
break;
switch (GET_CODE (insn))
{
case CODE_LABEL:
AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
CLEAR_HARD_REG_SET (pending_dead_regs);
continue;
case BARRIER:
case NOTE:
continue;
case INSN:
if (GET_CODE (PATTERN (insn)) == USE)
{
if (INSN_P (XEXP (PATTERN (insn), 0)))
mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
MARK_SRC_DEST_CALL);
continue;
}
else if (GET_CODE (PATTERN (insn)) == CLOBBER)
continue;
else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
{
for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
{
this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
if (GET_CODE (this_jump_insn) == JUMP_INSN)
break;
}
}
default:
break;
}
if (GET_CODE (this_jump_insn) == JUMP_INSN)
{
if (jump_count++ < 10)
{
if (any_uncondjump_p (this_jump_insn)
|| GET_CODE (PATTERN (this_jump_insn)) == RETURN)
{
next = JUMP_LABEL (this_jump_insn);
if (jump_insn == 0)
{
jump_insn = insn;
if (jump_target)
*jump_target = JUMP_LABEL (this_jump_insn);
}
}
else if (any_condjump_p (this_jump_insn))
{
struct resources target_set, target_res;
struct resources fallthrough_res;
jump_count += 4;
if (jump_count >= 10)
break;
mark_referenced_resources (insn, &needed, 1);
if (GET_CODE (PATTERN (insn)) == SEQUENCE
&& INSN_ANNULLED_BRANCH_P (this_jump_insn))
{
for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
= ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
target_set = set;
mark_set_resources (insn, &target_set, 0,
MARK_SRC_DEST_CALL);
for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
= ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
}
else
{
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
target_set = set;
}
target_res = *res;
COPY_HARD_REG_SET (scratch, target_set.regs);
AND_COMPL_HARD_REG_SET (scratch, needed.regs);
AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
fallthrough_res = *res;
COPY_HARD_REG_SET (scratch, set.regs);
AND_COMPL_HARD_REG_SET (scratch, needed.regs);
AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
&target_res, 0, jump_count,
target_set, needed);
find_dead_or_set_registers (next,
&fallthrough_res, 0, jump_count,
set, needed);
IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
break;
}
else
break;
}
else
{
jump_insn = 0;
break;
}
}
mark_referenced_resources (insn, &needed, 1);
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
COPY_HARD_REG_SET (scratch, set.regs);
AND_COMPL_HARD_REG_SET (scratch, needed.regs);
AND_COMPL_HARD_REG_SET (res->regs, scratch);
}
return jump_insn;
}
void
mark_set_resources (x, res, in_dest, mark_type)
rtx x;
struct resources *res;
int in_dest;
enum mark_resource_type mark_type;
{
enum rtx_code code;
int i, j;
unsigned int r;
const char *format_ptr;
restart:
code = GET_CODE (x);
switch (code)
{
case NOTE:
case BARRIER:
case CODE_LABEL:
case USE:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case LABEL_REF:
case SYMBOL_REF:
case CONST:
case PC:
return;
case CC0:
if (in_dest)
res->cc = 1;
return;
case CALL_INSN:
if (mark_type == MARK_SRC_DEST_CALL)
{
rtx link;
res->cc = res->memory = 1;
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
if (call_used_regs[r] || global_regs[r])
SET_HARD_REG_BIT (res->regs, r);
for (link = CALL_INSN_FUNCTION_USAGE (x);
link; link = XEXP (link, 1))
if (GET_CODE (XEXP (link, 0)) == CLOBBER)
mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
MARK_SRC_DEST);
if (find_reg_note (x, REG_SETJMP, NULL))
SET_HARD_REG_SET (res->regs);
}
case JUMP_INSN:
case INSN:
#ifdef INSN_SETS_ARE_DELAYED
if (mark_type != MARK_SRC_DEST_CALL
&& INSN_SETS_ARE_DELAYED (x))
return;
#endif
x = PATTERN (x);
if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
goto restart;
return;
case SET:
mark_set_resources (SET_DEST (x), res,
(mark_type == MARK_SRC_DEST_CALL
|| GET_CODE (SET_SRC (x)) != CALL),
mark_type);
if (mark_type != MARK_DEST)
mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
return;
case CLOBBER:
mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
case SEQUENCE:
for (i = 0; i < XVECLEN (x, 0); i++)
if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
&& INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
return;
case POST_INC:
case PRE_INC:
case POST_DEC:
case PRE_DEC:
mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
case PRE_MODIFY:
case POST_MODIFY:
mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
return;
case SIGN_EXTRACT:
case ZERO_EXTRACT:
if (! (mark_type == MARK_DEST && in_dest))
{
mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
}
return;
case MEM:
if (in_dest)
{
res->memory = 1;
res->unch_memory |= RTX_UNCHANGING_P (x);
res->volatil |= MEM_VOLATILE_P (x);
}
mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
return;
case SUBREG:
if (in_dest)
{
if (GET_CODE (SUBREG_REG (x)) != REG)
mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
else
{
unsigned int regno = subreg_regno (x);
unsigned int last_regno
= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
if (last_regno > FIRST_PSEUDO_REGISTER)
abort ();
for (r = regno; r < last_regno; r++)
SET_HARD_REG_BIT (res->regs, r);
}
}
return;
case REG:
if (in_dest)
{
unsigned int regno = REGNO (x);
unsigned int last_regno
= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
if (last_regno > FIRST_PSEUDO_REGISTER)
abort ();
for (r = regno; r < last_regno; r++)
SET_HARD_REG_BIT (res->regs, r);
}
return;
case STRICT_LOW_PART:
if (! (mark_type == MARK_DEST && in_dest))
{
mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
return;
}
case UNSPEC_VOLATILE:
case ASM_INPUT:
res->volatil = 1;
return;
case TRAP_IF:
res->volatil = 1;
break;
case ASM_OPERANDS:
res->volatil |= MEM_VOLATILE_P (x);
for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
MARK_SRC_DEST);
return;
default:
break;
}
format_ptr = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++)
switch (*format_ptr++)
{
case 'e':
mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
break;
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
break;
}
}
void
mark_target_live_regs (insns, target, res)
rtx insns;
rtx target;
struct resources *res;
{
int b = -1;
unsigned int i;
struct target_info *tinfo = NULL;
rtx insn;
rtx jump_insn = 0;
rtx jump_target;
HARD_REG_SET scratch;
struct resources set, needed;
if (target == 0)
{
*res = end_of_function_needs;
return;
}
res->memory = 1;
res->volatil = res->unch_memory = 0;
res->cc = 0;
if (target_hash_table != NULL)
{
for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
tinfo; tinfo = tinfo->next)
if (tinfo->uid == INSN_UID (target))
break;
if (tinfo && tinfo->block != -1
&& ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
b = tinfo->block;
}
if (b == -1)
b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
if (target_hash_table != NULL)
{
if (tinfo)
{
if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
{
COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
return;
}
}
else
{
tinfo = (struct target_info *) xmalloc (sizeof (struct target_info));
tinfo->uid = INSN_UID (target);
tinfo->block = b;
tinfo->next
= target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
}
}
CLEAR_HARD_REG_SET (pending_dead_regs);
if (b != -1)
{
regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
unsigned int j;
unsigned int regno;
rtx start_insn, stop_insn;
REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
EXECUTE_IF_SET_IN_REG_SET
(regs_live, FIRST_PSEUDO_REGISTER, i,
{
if (reg_renumber[i] >= 0)
{
regno = reg_renumber[i];
for (j = regno;
j < regno + HARD_REGNO_NREGS (regno,
PSEUDO_REGNO_MODE (i));
j++)
SET_HARD_REG_BIT (current_live_regs, j);
}
});
start_insn = (b == 0 ? insns : BLOCK_HEAD (b));
stop_insn = target;
if (GET_CODE (start_insn) == INSN
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
if (GET_CODE (stop_insn) == INSN
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
stop_insn = next_insn (PREV_INSN (stop_insn));
for (insn = start_insn; insn != stop_insn;
insn = next_insn_no_annul (insn))
{
rtx link;
rtx real_insn = insn;
if (INSN_FROM_TARGET_P (insn))
continue;
if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
&& INSN_P (XEXP (PATTERN (insn), 0)))
real_insn = XEXP (PATTERN (insn), 0);
if (GET_CODE (real_insn) == CALL_INSN)
{
AND_COMPL_HARD_REG_SET (current_live_regs,
regs_invalidated_by_call);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
SET_HARD_REG_BIT (current_live_regs, i);
}
if ((GET_CODE (real_insn) == INSN
&& GET_CODE (PATTERN (real_insn)) != USE
&& GET_CODE (PATTERN (real_insn)) != CLOBBER)
|| GET_CODE (real_insn) == JUMP_INSN
|| GET_CODE (real_insn) == CALL_INSN)
{
for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD
&& GET_CODE (XEXP (link, 0)) == REG
&& REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
{
unsigned int first_regno = REGNO (XEXP (link, 0));
unsigned int last_regno
= (first_regno
+ HARD_REGNO_NREGS (first_regno,
GET_MODE (XEXP (link, 0))));
for (i = first_regno; i < last_regno; i++)
SET_HARD_REG_BIT (pending_dead_regs, i);
}
note_stores (PATTERN (real_insn), update_live_status, NULL);
for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_UNUSED
&& GET_CODE (XEXP (link, 0)) == REG
&& REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
{
unsigned int first_regno = REGNO (XEXP (link, 0));
unsigned int last_regno
= (first_regno
+ HARD_REGNO_NREGS (first_regno,
GET_MODE (XEXP (link, 0))));
for (i = first_regno; i < last_regno; i++)
CLEAR_HARD_REG_BIT (current_live_regs, i);
}
}
else if (GET_CODE (real_insn) == CODE_LABEL)
{
AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
CLEAR_HARD_REG_SET (pending_dead_regs);
}
else if (GET_CODE (real_insn) == NOTE
&& NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
}
COPY_HARD_REG_SET (res->regs, current_live_regs);
if (tinfo != NULL)
{
tinfo->block = b;
tinfo->bb_tick = bb_ticks[b];
}
}
else
SET_HARD_REG_SET (res->regs);
CLEAR_RESOURCE (&set);
CLEAR_RESOURCE (&needed);
jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
set, needed);
if (jump_insn)
{
struct resources new_resources;
rtx stop_insn = next_active_insn (jump_insn);
mark_target_live_regs (insns, next_active_insn (jump_target),
&new_resources);
CLEAR_RESOURCE (&set);
CLEAR_RESOURCE (&needed);
for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
{
mark_referenced_resources (insn, &needed, 1);
COPY_HARD_REG_SET (scratch, needed.regs);
AND_COMPL_HARD_REG_SET (scratch, set.regs);
IOR_HARD_REG_SET (new_resources.regs, scratch);
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
}
IOR_HARD_REG_SET (res->regs, new_resources.regs);
}
if (tinfo != NULL)
{
COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
}
}
void
init_resource_info (epilogue_insn)
rtx epilogue_insn;
{
int i;
end_of_function_needs.cc = 0;
end_of_function_needs.memory = 1;
end_of_function_needs.unch_memory = 0;
CLEAR_HARD_REG_SET (end_of_function_needs.regs);
if (frame_pointer_needed)
{
SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
#endif
#ifdef EXIT_IGNORE_STACK
if (! EXIT_IGNORE_STACK
|| current_function_sp_is_unchanging)
#endif
SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
}
else
SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
if (current_function_return_rtx != 0)
mark_referenced_resources (current_function_return_rtx,
&end_of_function_needs, 1);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i]
#ifdef EPILOGUE_USES
|| EPILOGUE_USES (i)
#endif
)
SET_HARD_REG_BIT (end_of_function_needs.regs, i);
start_of_epilogue_needs = end_of_function_needs;
while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
MARK_SRC_DEST_CALL);
target_hash_table = (struct target_info **)
xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int));
}
void
free_resource_info ()
{
if (target_hash_table != NULL)
{
int i;
for (i = 0; i < TARGET_HASH_PRIME; ++i)
{
struct target_info *ti = target_hash_table[i];
while (ti)
{
struct target_info *next = ti->next;
free (ti);
ti = next;
}
}
free (target_hash_table);
target_hash_table = NULL;
}
if (bb_ticks != NULL)
{
free (bb_ticks);
bb_ticks = NULL;
}
}
void
clear_hashed_info_for_insn (insn)
rtx insn;
{
struct target_info *tinfo;
if (target_hash_table != NULL)
{
for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
tinfo; tinfo = tinfo->next)
if (tinfo->uid == INSN_UID (insn))
break;
if (tinfo)
tinfo->block = -1;
}
}
void
incr_ticks_for_insn (insn)
rtx insn;
{
int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
if (b != -1)
bb_ticks[b]++;
}
void
mark_end_of_function_resources (trial, include_delayed_effects)
rtx trial;
int include_delayed_effects;
{
mark_referenced_resources (trial, &end_of_function_needs,
include_delayed_effects);
}