#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "tm_p.h"
#include "regs.h"
#include "flags.h"
#include "debug.h"
#include "insn-config.h"
#include "expr.h"
#include "output.h"
#include "recog.h"
#include "integrate.h"
#include "real.h"
#include "except.h"
#include "function.h"
#include "toplev.h"
#include "intl.h"
#include "params.h"
#include "ggc.h"
#include "target.h"
#include "langhooks.h"
#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
typedef struct initial_value_pair GTY(()) {
rtx hard_reg;
rtx pseudo;
} initial_value_pair;
typedef struct initial_value_struct GTY(()) {
int num_entries;
int max_entries;
initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
} initial_value_struct;
static void subst_constants (rtx *, rtx, struct inline_remap *, int);
static void set_block_origin_self (tree);
static void set_block_abstract_flags (tree, int);
static void mark_stores (rtx, rtx, void *);
rtx
get_label_from_map (struct inline_remap *map, int i)
{
rtx x = map->label_map[i];
if (x == NULL_RTX)
x = map->label_map[i] = gen_label_rtx ();
return x;
}
bool
function_attribute_inlinable_p (tree fndecl)
{
if (targetm.attribute_table)
{
tree a;
for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
{
tree name = TREE_PURPOSE (a);
int i;
for (i = 0; targetm.attribute_table[i].name != NULL; i++)
if (is_attribute_p (targetm.attribute_table[i].name, name))
return targetm.function_attribute_inlinable_p (fndecl);
}
}
return true;
}
tree
copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
{
tree copy;
if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
{
tree type = TREE_TYPE (decl);
copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
}
else
{
copy = copy_node (decl);
DECL_ABSTRACT (copy) = 0;
lang_hooks.dup_lang_specific_decl (copy);
if (TREE_CODE (copy) == LABEL_DECL)
{
TREE_ADDRESSABLE (copy) = 0;
}
}
DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
SET_DECL_RTL (copy, NULL_RTX);
TREE_USED (copy) = 1;
if (!DECL_CONTEXT (decl))
;
else if (DECL_CONTEXT (decl) != from_fn)
;
else if (TREE_STATIC (decl))
;
else
DECL_CONTEXT (copy) = to_fn;
return copy;
}
varray_type global_const_equiv_varray;
rtx
copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
{
rtx copy, temp;
int i, j;
RTX_CODE code;
enum machine_mode mode;
const char *format_ptr;
int regno;
if (orig == 0)
return 0;
code = GET_CODE (orig);
mode = GET_MODE (orig);
switch (code)
{
case REG:
regno = REGNO (orig);
if (regno <= LAST_VIRTUAL_REGISTER)
{
if (map->reg_map[regno] != 0)
return map->reg_map[regno];
else if (regno == VIRTUAL_STACK_VARS_REGNUM)
{
rtx loc, seq;
int size
= get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
#ifdef FRAME_GROWS_DOWNWARD
int alignment
= (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
/ BITS_PER_UNIT);
if (alignment)
size = CEIL_ROUND (size, alignment);
#endif
start_sequence ();
loc = assign_stack_temp (BLKmode, size, 1);
loc = XEXP (loc, 0);
#ifdef FRAME_GROWS_DOWNWARD
loc = plus_constant (loc, size);
#endif
map->reg_map[regno] = temp
= force_reg (Pmode, force_operand (loc, NULL_RTX));
#ifdef STACK_BOUNDARY
mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
#endif
SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
seq = get_insns ();
end_sequence ();
emit_insn_after (seq, map->insns_at_start);
return temp;
}
else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
{
rtx loc, seq;
int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
start_sequence ();
loc = assign_stack_temp (BLKmode, size, 1);
loc = XEXP (loc, 0);
#ifdef ARGS_GROW_DOWNWARD
loc = plus_constant (loc, size);
#endif
map->reg_map[regno] = temp
= force_reg (Pmode, force_operand (loc, NULL_RTX));
#ifdef STACK_BOUNDARY
mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
#endif
SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
seq = get_insns ();
end_sequence ();
emit_insn_after (seq, map->insns_at_start);
return temp;
}
else
return orig;
abort ();
}
if (map->reg_map[regno] == NULL)
{
map->reg_map[regno] = gen_reg_rtx (mode);
REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
if (REG_POINTER (map->x_regno_reg_rtx[regno]))
mark_reg_pointer (map->reg_map[regno],
map->regno_pointer_align[regno]);
}
return map->reg_map[regno];
case SUBREG:
copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
return simplify_gen_subreg (GET_MODE (orig), copy,
GET_MODE (SUBREG_REG (orig)),
SUBREG_BYTE (orig));
case USE:
case CLOBBER:
copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
copy = SUBREG_REG (copy);
return gen_rtx_fmt_e (code, VOIDmode, copy);
case NOTE:
if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
break;
case CODE_LABEL:
LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
= LABEL_PRESERVE_P (orig);
return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
case LABEL_REF:
copy
= gen_rtx_LABEL_REF
(mode,
LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
: get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
LABEL_REF_NONLOCAL_P (copy)
= (LABEL_REF_NONLOCAL_P (orig)
&& ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
&& CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
return copy;
case PC:
case CC0:
case CONST_INT:
case CONST_VECTOR:
return orig;
case SYMBOL_REF:
if (CONSTANT_POOL_ADDRESS_P (orig))
{
struct function *f = cfun;
rtx constant = get_pool_constant_for_function (f, orig);
if (GET_CODE (constant) == LABEL_REF)
return XEXP (force_const_mem
(GET_MODE (orig),
copy_rtx_and_substitute (constant, map, for_lhs)),
0);
}
return orig;
case CONST_DOUBLE:
if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
}
else
return immed_double_const (CONST_DOUBLE_LOW (orig),
CONST_DOUBLE_HIGH (orig), VOIDmode);
case CONST:
break;
case ASM_OPERANDS:
if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
{
copy = rtx_alloc (ASM_OPERANDS);
RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
PUT_MODE (copy, GET_MODE (orig));
ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
= ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
= map->copy_asm_constraints_vector;
#ifdef USE_MAPPED_LOCATION
ASM_OPERANDS_SOURCE_LOCATION (copy)
= ASM_OPERANDS_SOURCE_LOCATION (orig);
#else
ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
#endif
return copy;
}
break;
case CALL:
#ifndef NO_FUNCTION_CSE
if (! (optimize && ! flag_no_function_cse))
#endif
{
rtx copy
= gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
map, 0));
MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
return
gen_rtx_CALL (GET_MODE (orig), copy,
copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
}
break;
#if 0
case RETURN:
abort ();
#endif
case SET:
if (SET_DEST (orig) == virtual_stack_vars_rtx
|| SET_DEST (orig) == virtual_incoming_args_rtx)
{
rtx equiv_reg;
rtx equiv_loc;
HOST_WIDE_INT loc_offset;
copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
REGNO (equiv_reg)).rtx;
loc_offset
= REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
return gen_rtx_SET (VOIDmode, SET_DEST (orig),
force_operand
(plus_constant
(copy_rtx_and_substitute (SET_SRC (orig),
map, 0),
- loc_offset),
NULL_RTX));
}
else
return gen_rtx_SET (VOIDmode,
copy_rtx_and_substitute (SET_DEST (orig), map, 1),
copy_rtx_and_substitute (SET_SRC (orig), map, 0));
break;
case MEM:
copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
map, 0));
MEM_COPY_ATTRIBUTES (copy, orig);
return copy;
default:
break;
}
copy = rtx_alloc (code);
PUT_MODE (copy, mode);
RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
{
switch (*format_ptr++)
{
case '0':
X0ANY (copy, i) = X0ANY (orig, i);
break;
case 'e':
XEXP (copy, i)
= copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
break;
case 'u':
XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
break;
case 'E':
XVEC (copy, i) = XVEC (orig, i);
if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
{
XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
for (j = 0; j < XVECLEN (copy, i); j++)
XVECEXP (copy, i, j)
= copy_rtx_and_substitute (XVECEXP (orig, i, j),
map, for_lhs);
}
break;
case 'w':
XWINT (copy, i) = XWINT (orig, i);
break;
case 'i':
XINT (copy, i) = XINT (orig, i);
break;
case 's':
XSTR (copy, i) = XSTR (orig, i);
break;
case 't':
XTREE (copy, i) = XTREE (orig, i);
break;
default:
abort ();
}
}
if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
{
map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
map->copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
}
return copy;
}
void
try_constants (rtx insn, struct inline_remap *map)
{
int i;
map->num_sets = 0;
subst_constants (&PATTERN (insn), insn, map, 1);
apply_change_group ();
subst_constants (&PATTERN (insn), insn, map, 0);
apply_change_group ();
if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
{
subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
apply_change_group ();
}
note_stores (PATTERN (insn), mark_stores, NULL);
map->last_pc_value = 0;
#ifdef HAVE_cc0
map->last_cc0_value = 0;
#endif
for (i = 0; i < map->num_sets; i++)
{
if (REG_P (map->equiv_sets[i].dest))
{
int regno = REGNO (map->equiv_sets[i].dest);
MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
|| ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
regno).rtx,
map->equiv_sets[i].equiv))
SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
map->equiv_sets[i].equiv, map->const_age);
}
else if (map->equiv_sets[i].dest == pc_rtx)
map->last_pc_value = map->equiv_sets[i].equiv;
#ifdef HAVE_cc0
else if (map->equiv_sets[i].dest == cc0_rtx)
map->last_cc0_value = map->equiv_sets[i].equiv;
#endif
}
}
static void
subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
{
rtx x = *loc;
int i, j;
enum rtx_code code;
const char *format_ptr;
int num_changes = num_validated_changes ();
rtx new = 0;
enum machine_mode op0_mode = MAX_MACHINE_MODE;
code = GET_CODE (x);
switch (code)
{
case PC:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case SYMBOL_REF:
case CONST:
case LABEL_REF:
case ADDRESS:
return;
#ifdef HAVE_cc0
case CC0:
if (! memonly)
validate_change (insn, loc, map->last_cc0_value, 1);
return;
#endif
case USE:
case CLOBBER:
if (MEM_P (XEXP (x, 0)))
subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
return;
case REG:
if (! memonly)
{
int regno = REGNO (x);
struct const_equiv_data *p;
if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
&& (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
&& (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
p->rtx != 0)
&& p->age >= map->const_age)
validate_change (insn, loc, p->rtx, 1);
}
return;
case SUBREG:
if (! memonly && REG_P (SUBREG_REG (x)))
{
rtx inner = SUBREG_REG (x);
rtx new = 0;
subst_constants (&inner, NULL_RTX, map, 0);
new = simplify_gen_subreg (GET_MODE (x), inner,
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x));
if (new)
validate_change (insn, loc, new, 1);
else
cancel_changes (num_changes);
return;
}
break;
case MEM:
subst_constants (&XEXP (x, 0), insn, map, 0);
if (! memonly && insn != 0 && num_validated_changes () != num_changes
&& ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
cancel_changes (num_changes);
return;
case SET:
{
rtx *dest_loc = &SET_DEST (x);
rtx dest = *dest_loc;
rtx src, tem;
enum machine_mode compare_mode = VOIDmode;
if (GET_CODE (SET_SRC (x)) == COMPARE)
{
src = SET_SRC (x);
if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
|| CC0_P (dest))
{
compare_mode = GET_MODE (XEXP (src, 0));
if (compare_mode == VOIDmode)
compare_mode = GET_MODE (XEXP (src, 1));
}
}
subst_constants (&SET_SRC (x), insn, map, memonly);
src = SET_SRC (x);
while (GET_CODE (*dest_loc) == ZERO_EXTRACT
|| GET_CODE (*dest_loc) == SUBREG
|| GET_CODE (*dest_loc) == STRICT_LOW_PART)
{
if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
{
subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
}
dest_loc = &XEXP (*dest_loc, 0);
}
if (MEM_P (*dest_loc))
subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
if (GET_CODE (dest) == SUBREG
&& GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
&& GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
<= GET_MODE_SIZE (GET_MODE (dest)))
&& (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
src)))
src = tem, dest = SUBREG_REG (dest);
if ((map->num_sets < MAX_RECOG_OPERANDS)
&& (CONSTANT_P (src)
|| (REG_P (src)
&& (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
|| REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
|| (GET_CODE (src) == PLUS
&& REG_P (XEXP (src, 0))
&& (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
|| REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
&& CONSTANT_P (XEXP (src, 1)))
|| GET_CODE (src) == COMPARE
|| CC0_P (dest)
|| (dest == pc_rtx
&& (src == pc_rtx || GET_CODE (src) == RETURN
|| GET_CODE (src) == LABEL_REF))))
{
rtx src_copy = copy_rtx (src);
map->equiv_sets[map->num_sets].equiv = src_copy;
map->equiv_sets[map->num_sets++].dest = dest;
if (compare_mode != VOIDmode
&& GET_CODE (src) == COMPARE
&& (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
|| CC0_P (dest))
&& GET_MODE (XEXP (src, 0)) == VOIDmode
&& GET_MODE (XEXP (src, 1)) == VOIDmode)
{
map->compare_src = src_copy;
map->compare_mode = compare_mode;
}
}
}
return;
default:
break;
}
format_ptr = GET_RTX_FORMAT (code);
if (*format_ptr == 'e')
op0_mode = GET_MODE (XEXP (x, 0));
for (i = 0; i < GET_RTX_LENGTH (code); i++)
{
switch (*format_ptr++)
{
case '0':
break;
case 'e':
if (XEXP (x, i))
subst_constants (&XEXP (x, i), insn, map, memonly);
break;
case 'u':
case 'i':
case 's':
case 'w':
case 'n':
case 't':
case 'B':
break;
case 'E':
if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
for (j = 0; j < XVECLEN (x, i); j++)
subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
break;
default:
abort ();
}
}
if (! memonly
&& (GET_RTX_CLASS (code) == RTX_COMM_ARITH
|| GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
&& CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
{
rtx tem = XEXP (x, 0);
validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
validate_change (insn, &XEXP (x, 1), tem, 1);
}
if (! memonly)
switch (GET_RTX_CLASS (code))
{
case RTX_UNARY:
if (op0_mode == MAX_MACHINE_MODE)
abort ();
new = simplify_unary_operation (code, GET_MODE (x),
XEXP (x, 0), op0_mode);
break;
case RTX_COMPARE:
case RTX_COMM_COMPARE:
{
enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
if (op_mode == VOIDmode)
op_mode = GET_MODE (XEXP (x, 1));
new = simplify_relational_operation (code, GET_MODE (x), op_mode,
XEXP (x, 0), XEXP (x, 1));
break;
}
case RTX_BIN_ARITH:
case RTX_COMM_ARITH:
new = simplify_binary_operation (code, GET_MODE (x),
XEXP (x, 0), XEXP (x, 1));
break;
case RTX_BITFIELD_OPS:
case RTX_TERNARY:
if (op0_mode == MAX_MACHINE_MODE)
abort ();
if (code == IF_THEN_ELSE)
{
rtx op0 = XEXP (x, 0);
if (COMPARISON_P (op0)
&& GET_MODE (op0) == VOIDmode
&& ! side_effects_p (op0)
&& XEXP (op0, 0) == map->compare_src
&& GET_MODE (XEXP (op0, 1)) == VOIDmode)
{
rtx tem =
simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
map->compare_mode, XEXP (op0, 0),
XEXP (op0, 1));
if (GET_CODE (tem) != CONST_INT)
new = simplify_ternary_operation (code, GET_MODE (x),
op0_mode, tem, XEXP (x, 1),
XEXP (x, 2));
else if (tem == const0_rtx)
new = XEXP (x, 2);
else
new = XEXP (x, 1);
}
}
if (!new)
new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
XEXP (x, 0), XEXP (x, 1),
XEXP (x, 2));
break;
default:
break;
}
if (new)
validate_change (insn, loc, new, 1);
}
static void
mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
{
int regno = -1;
enum machine_mode mode = VOIDmode;
if (REG_P (dest))
regno = REGNO (dest), mode = GET_MODE (dest);
else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
{
regno = REGNO (SUBREG_REG (dest));
if (regno < FIRST_PSEUDO_REGISTER)
regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
GET_MODE (SUBREG_REG (dest)),
SUBREG_BYTE (dest),
GET_MODE (dest));
mode = GET_MODE (SUBREG_REG (dest));
}
if (regno >= 0)
{
unsigned int uregno = regno;
unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
: uregno + hard_regno_nregs[uregno][mode] - 1);
unsigned int i;
if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
&& uregno != VIRTUAL_STACK_VARS_REGNUM)
for (i = uregno; i <= last_reg; i++)
if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
}
}
static void
set_block_origin_self (tree stmt)
{
if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
{
BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
{
tree local_decl;
for (local_decl = BLOCK_VARS (stmt);
local_decl != NULL_TREE;
local_decl = TREE_CHAIN (local_decl))
set_decl_origin_self (local_decl);
}
{
tree subblock;
for (subblock = BLOCK_SUBBLOCKS (stmt);
subblock != NULL_TREE;
subblock = BLOCK_CHAIN (subblock))
set_block_origin_self (subblock);
}
}
}
void
set_decl_origin_self (tree decl)
{
if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
{
DECL_ABSTRACT_ORIGIN (decl) = decl;
if (TREE_CODE (decl) == FUNCTION_DECL)
{
tree arg;
for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
DECL_ABSTRACT_ORIGIN (arg) = arg;
if (DECL_INITIAL (decl) != NULL_TREE
&& DECL_INITIAL (decl) != error_mark_node)
set_block_origin_self (DECL_INITIAL (decl));
}
}
}
static void
set_block_abstract_flags (tree stmt, int setting)
{
tree local_decl;
tree subblock;
BLOCK_ABSTRACT (stmt) = setting;
for (local_decl = BLOCK_VARS (stmt);
local_decl != NULL_TREE;
local_decl = TREE_CHAIN (local_decl))
set_decl_abstract_flags (local_decl, setting);
for (subblock = BLOCK_SUBBLOCKS (stmt);
subblock != NULL_TREE;
subblock = BLOCK_CHAIN (subblock))
set_block_abstract_flags (subblock, setting);
}
void
set_decl_abstract_flags (tree decl, int setting)
{
DECL_ABSTRACT (decl) = setting;
if (TREE_CODE (decl) == FUNCTION_DECL)
{
tree arg;
for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
DECL_ABSTRACT (arg) = setting;
if (DECL_INITIAL (decl) != NULL_TREE
&& DECL_INITIAL (decl) != error_mark_node)
set_block_abstract_flags (DECL_INITIAL (decl), setting);
}
}
rtx
get_hard_reg_initial_reg (struct function *fun, rtx reg)
{
struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
int i;
if (ivs == 0)
return NULL_RTX;
for (i = 0; i < ivs->num_entries; i++)
if (rtx_equal_p (ivs->entries[i].pseudo, reg))
return ivs->entries[i].hard_reg;
return NULL_RTX;
}
rtx
has_func_hard_reg_initial_val (struct function *fun, rtx reg)
{
struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
int i;
if (ivs == 0)
return NULL_RTX;
for (i = 0; i < ivs->num_entries; i++)
if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
return ivs->entries[i].pseudo;
return NULL_RTX;
}
rtx
get_func_hard_reg_initial_val (struct function *fun, rtx reg)
{
struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
rtx rv = has_func_hard_reg_initial_val (fun, reg);
if (rv)
return rv;
if (ivs == 0)
{
fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
ivs = fun->hard_reg_initial_vals;
ivs->num_entries = 0;
ivs->max_entries = 5;
ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
}
if (ivs->num_entries >= ivs->max_entries)
{
ivs->max_entries += 5;
ivs->entries = ggc_realloc (ivs->entries,
ivs->max_entries
* sizeof (initial_value_pair));
}
ivs->entries[ivs->num_entries].hard_reg = reg;
ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
return ivs->entries[ivs->num_entries++].pseudo;
}
rtx
get_hard_reg_initial_val (enum machine_mode mode, int regno)
{
return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
}
rtx
has_hard_reg_initial_val (enum machine_mode mode, int regno)
{
return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
}
void
emit_initial_value_sets (void)
{
struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
int i;
rtx seq;
if (ivs == 0)
return;
start_sequence ();
for (i = 0; i < ivs->num_entries; i++)
emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
seq = get_insns ();
end_sequence ();
emit_insn_after (seq, entry_of_function ());
}
void
allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
{
#ifdef ALLOCATE_INITIAL_VALUE
struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
int i;
if (ivs == 0)
return;
for (i = 0; i < ivs->num_entries; i++)
{
int regno = REGNO (ivs->entries[i].pseudo);
rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
;
else if (MEM_P (x))
reg_equiv_memory_loc[regno] = x;
else if (REG_P (x))
{
reg_renumber[regno] = REGNO (x);
REGNO (ivs->entries[i].pseudo) = REGNO (x);
}
else abort ();
}
#endif
}
#include "gt-integrate.h"