tree-ssa-operands.c [plain text]
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
#include "cgraph.h"
#include "langhooks.h"
#define opf_none 0
#define opf_is_def (1 << 0)
#define opf_kill_def (1 << 1)
#define opf_no_vops (1 << 2)
static GTY (()) varray_type build_defs;
static GTY (()) varray_type build_uses;
static GTY (()) varray_type build_v_may_defs;
static GTY (()) varray_type build_vuses;
static GTY (()) varray_type build_v_must_defs;
#ifdef ENABLE_CHECKING
tree check_build_stmt;
#endif
def_operand_p NULL_DEF_OPERAND_P = { NULL };
use_operand_p NULL_USE_OPERAND_P = { NULL };
static void note_addressable (tree, stmt_ann_t);
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void get_indirect_ref_operands (tree, tree, int);
static void get_call_expr_operands (tree, tree);
static inline void append_def (tree *);
static inline void append_use (tree *);
static void append_v_may_def (tree);
static void append_v_must_def (tree);
static void add_call_clobber_ops (tree, tree);
static void add_call_read_ops (tree, tree);
static void add_stmt_operand (tree *, tree, int);
static inline def_optype
allocate_def_optype (unsigned num)
{
def_optype def_ops;
unsigned size;
size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
def_ops = ggc_alloc (size);
def_ops->num_defs = num;
return def_ops;
}
static inline use_optype
allocate_use_optype (unsigned num)
{
use_optype use_ops;
unsigned size;
size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
use_ops = ggc_alloc (size);
use_ops->num_uses = num;
return use_ops;
}
static inline v_may_def_optype
allocate_v_may_def_optype (unsigned num)
{
v_may_def_optype v_may_def_ops;
unsigned size;
size = sizeof (struct v_may_def_optype_d)
+ sizeof (v_def_use_operand_type_t) * (num - 1);
v_may_def_ops = ggc_alloc (size);
v_may_def_ops->num_v_may_defs = num;
return v_may_def_ops;
}
static inline vuse_optype
allocate_vuse_optype (unsigned num)
{
vuse_optype vuse_ops;
unsigned size;
size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
vuse_ops = ggc_alloc (size);
vuse_ops->num_vuses = num;
return vuse_ops;
}
static inline v_must_def_optype
allocate_v_must_def_optype (unsigned num)
{
v_must_def_optype v_must_def_ops;
unsigned size;
size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
v_must_def_ops = ggc_alloc (size);
v_must_def_ops->num_v_must_defs = num;
return v_must_def_ops;
}
static inline void
free_uses (use_optype *uses)
{
if (*uses)
{
ggc_free (*uses);
*uses = NULL;
}
}
static inline void
free_defs (def_optype *defs)
{
if (*defs)
{
ggc_free (*defs);
*defs = NULL;
}
}
static inline void
free_vuses (vuse_optype *vuses)
{
if (*vuses)
{
ggc_free (*vuses);
*vuses = NULL;
}
}
static inline void
free_v_may_defs (v_may_def_optype *v_may_defs)
{
if (*v_may_defs)
{
ggc_free (*v_may_defs);
*v_may_defs = NULL;
}
}
static inline void
free_v_must_defs (v_must_def_optype *v_must_defs)
{
if (*v_must_defs)
{
ggc_free (*v_must_defs);
*v_must_defs = NULL;
}
}
void
init_ssa_operands (void)
{
VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
}
void
fini_ssa_operands (void)
{
ggc_free (build_defs);
ggc_free (build_uses);
ggc_free (build_v_may_defs);
ggc_free (build_vuses);
ggc_free (build_v_must_defs);
build_defs = NULL;
build_uses = NULL;
build_v_may_defs = NULL;
build_vuses = NULL;
build_v_must_defs = NULL;
}
static def_optype
finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
{
unsigned num, x;
def_optype def_ops, old_ops;
bool build_diff;
num = VARRAY_ACTIVE_SIZE (build_defs);
if (num == 0)
return NULL;
gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
build_diff = true;
if (old_ops && old_ops->num_defs == num)
{
build_diff = false;
for (x = 0; x < num; x++)
if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
{
build_diff = true;
break;
}
}
if (!build_diff)
{
def_ops = old_ops;
*old_ops_p = NULL;
}
else
{
def_ops = allocate_def_optype (num);
for (x = 0; x < num ; x++)
def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
}
VARRAY_POP_ALL (build_defs);
return def_ops;
}
static use_optype
finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
{
unsigned num, x;
use_optype use_ops, old_ops;
bool build_diff;
num = VARRAY_ACTIVE_SIZE (build_uses);
if (num == 0)
return NULL;
#ifdef ENABLE_CHECKING
{
unsigned x;
for (x = 0; x < num; x++)
gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
}
#endif
old_ops = *old_ops_p;
build_diff = true;
if (old_ops && old_ops->num_uses == num)
{
build_diff = false;
for (x = 0; x < num; x++)
if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
{
build_diff = true;
break;
}
}
if (!build_diff)
{
use_ops = old_ops;
*old_ops_p = NULL;
}
else
{
use_ops = allocate_use_optype (num);
for (x = 0; x < num ; x++)
use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
}
VARRAY_POP_ALL (build_uses);
return use_ops;
}
static v_may_def_optype
finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
{
unsigned num, x, i, old_num;
v_may_def_optype v_may_def_ops, old_ops;
tree result, var;
bool build_diff;
num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
if (num == 0)
return NULL;
old_ops = *old_ops_p;
build_diff = true;
if (old_ops && old_ops->num_v_may_defs == num)
{
old_num = num;
build_diff = false;
for (x = 0; x < num; x++)
{
var = old_ops->v_may_defs[x].def;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (var != VARRAY_TREE (build_v_may_defs, x))
{
build_diff = true;
break;
}
}
}
else
old_num = (old_ops ? old_ops->num_v_may_defs : 0);
if (!build_diff)
{
v_may_def_ops = old_ops;
*old_ops_p = NULL;
}
else
{
v_may_def_ops = allocate_v_may_def_optype (num);
for (x = 0; x < num; x++)
{
var = VARRAY_TREE (build_v_may_defs, x);
for (i = 0; i < old_num; i++)
{
result = old_ops->v_may_defs[i].def;
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
break;
}
}
if (i == old_num)
{
v_may_def_ops->v_may_defs[x].def = var;
v_may_def_ops->v_may_defs[x].use = var;
}
}
}
return v_may_def_ops;
}
static vuse_optype
finalize_ssa_vuses (vuse_optype *old_ops_p)
{
unsigned num, x, i, num_v_may_defs, old_num;
vuse_optype vuse_ops, old_ops;
bool build_diff;
num = VARRAY_ACTIVE_SIZE (build_vuses);
if (num == 0)
{
VARRAY_POP_ALL (build_v_may_defs);
return NULL;
}
num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
if (num_v_may_defs > 0)
{
size_t i, j;
tree vuse;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
{
vuse = VARRAY_TREE (build_vuses, i);
for (j = 0; j < num_v_may_defs; j++)
{
if (vuse == VARRAY_TREE (build_v_may_defs, j))
break;
}
if (j != num_v_may_defs)
{
if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
{
VARRAY_TREE (build_vuses, i)
= VARRAY_TREE (build_vuses,
VARRAY_ACTIVE_SIZE (build_vuses) - 1);
}
VARRAY_POP (build_vuses);
i--;
}
}
}
num = VARRAY_ACTIVE_SIZE (build_vuses);
if (num == 0)
{
VARRAY_POP_ALL (build_v_may_defs);
return NULL;
}
old_ops = *old_ops_p;
build_diff = true;
if (old_ops && old_ops->num_vuses == num)
{
old_num = num;
build_diff = false;
for (x = 0; x < num ; x++)
{
tree v;
v = old_ops->vuses[x];
if (TREE_CODE (v) == SSA_NAME)
v = SSA_NAME_VAR (v);
if (v != VARRAY_TREE (build_vuses, x))
{
build_diff = true;
break;
}
}
}
else
old_num = (old_ops ? old_ops->num_vuses : 0);
if (!build_diff)
{
vuse_ops = old_ops;
*old_ops_p = NULL;
}
else
{
vuse_ops = allocate_vuse_optype (num);
for (x = 0; x < num; x++)
{
tree result, var = VARRAY_TREE (build_vuses, x);
for (i = 0; i < old_num; i++)
{
result = old_ops->vuses[i];
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
vuse_ops->vuses[x] = old_ops->vuses[i];
break;
}
}
if (i == old_num)
vuse_ops->vuses[x] = var;
}
}
VARRAY_POP_ALL (build_vuses);
VARRAY_POP_ALL (build_v_may_defs);
return vuse_ops;
}
static v_must_def_optype
finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
tree stmt ATTRIBUTE_UNUSED)
{
unsigned num, x, i, old_num = 0;
v_must_def_optype v_must_def_ops, old_ops;
bool build_diff;
num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
if (num == 0)
return NULL;
gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
build_diff = true;
if (old_ops && old_ops->num_v_must_defs == num)
{
old_num = num;
build_diff = false;
for (x = 0; x < num; x++)
{
tree var = old_ops->v_must_defs[x].def;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (var != VARRAY_TREE (build_v_must_defs, x))
{
build_diff = true;
break;
}
}
}
else
old_num = (old_ops ? old_ops->num_v_must_defs : 0);
if (!build_diff)
{
v_must_def_ops = old_ops;
*old_ops_p = NULL;
}
else
{
v_must_def_ops = allocate_v_must_def_optype (num);
for (x = 0; x < num ; x++)
{
tree result, var = VARRAY_TREE (build_v_must_defs, x);
for (i = 0; i < old_num; i++)
{
result = old_ops->v_must_defs[i].def;
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
break;
}
}
if (i == old_num)
{
v_must_def_ops->v_must_defs[x].def = var;
v_must_def_ops->v_must_defs[x].use = var;
}
}
}
VARRAY_POP_ALL (build_v_must_defs);
return v_must_def_ops;
}
static inline void
finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
stmt_operands_p new_ops)
{
new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
new_ops->v_must_def_ops
= finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
}
static inline void
start_ssa_stmt_operands (void)
{
gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
}
static inline void
append_def (tree *def_p)
{
VARRAY_PUSH_TREE_PTR (build_defs, def_p);
}
static inline void
append_use (tree *use_p)
{
VARRAY_PUSH_TREE_PTR (build_uses, use_p);
}
static inline void
append_v_may_def (tree var)
{
unsigned i;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
if (var == VARRAY_TREE (build_v_may_defs, i))
return;
VARRAY_PUSH_TREE (build_v_may_defs, var);
}
static inline void
append_vuse (tree var)
{
size_t i;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
if (var == VARRAY_TREE (build_vuses, i))
return;
VARRAY_PUSH_TREE (build_vuses, var);
}
static inline void
append_v_must_def (tree var)
{
unsigned i;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
if (var == VARRAY_TREE (build_v_must_defs, i))
return;
VARRAY_PUSH_TREE (build_v_must_defs, var);
}
void
build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
stmt_operands_p new_ops)
{
enum tree_code code;
tree_ann_t saved_ann = stmt->common.ann;
stmt->common.ann = (tree_ann_t) ann;
if (ann)
{
ann->has_volatile_ops = false;
ann->makes_aliased_stores = false;
ann->makes_aliased_loads = false;
}
start_ssa_stmt_operands ();
code = TREE_CODE (stmt);
switch (code)
{
case MODIFY_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
|| TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
|| TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
|| TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
|| TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
|| tree_could_throw_p (TREE_OPERAND (stmt, 1)))
get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
else
get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
opf_is_def | opf_kill_def);
break;
case COND_EXPR:
get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
break;
case SWITCH_EXPR:
get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
break;
case ASM_EXPR:
get_asm_expr_operands (stmt);
break;
case RETURN_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
break;
case GOTO_EXPR:
get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
break;
case LABEL_EXPR:
get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
break;
case BIND_EXPR:
case CASE_LABEL_EXPR:
case TRY_CATCH_EXPR:
case TRY_FINALLY_EXPR:
case EH_FILTER_EXPR:
case CATCH_EXPR:
case RESX_EXPR:
break;
default:
get_expr_operands (stmt, &stmt, opf_none);
break;
}
finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
stmt->common.ann = saved_ann;
}
static void
free_ssa_operands (stmt_operands_p ops)
{
if (ops->def_ops)
free_defs (&(ops->def_ops));
if (ops->use_ops)
free_uses (&(ops->use_ops));
if (ops->vuse_ops)
free_vuses (&(ops->vuse_ops));
if (ops->v_may_def_ops)
free_v_may_defs (&(ops->v_may_def_ops));
if (ops->v_must_def_ops)
free_v_must_defs (&(ops->v_must_def_ops));
}
void
get_stmt_operands (tree stmt)
{
stmt_ann_t ann;
stmt_operands_t old_operands;
gcc_assert (!SSA_VAR_P (stmt));
if (TREE_CODE (stmt) == ERROR_MARK)
return;
ann = get_stmt_ann (stmt);
if (!ann->modified)
return;
timevar_push (TV_TREE_OPS);
old_operands = ann->operands;
memset (&(ann->operands), 0, sizeof (stmt_operands_t));
build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
free_ssa_operands (&old_operands);
ann->modified = 0;
timevar_pop (TV_TREE_OPS);
}
static bool
function_ignores_memory_p (tree expr)
{
tree fndecl = get_callee_fndecl (expr);
enum built_in_function fcode;
if (!fndecl || !DECL_BUILT_IN (fndecl))
return false;
fcode = DECL_FUNCTION_CODE (fndecl);
switch (fcode)
{
case BUILT_IN_PREFETCH:
case BUILT_IN_MAYBE_INFINITE_LOOP:
return true;
default:
return false;
}
}
static void
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
enum tree_code_class class;
tree expr = *expr_p;
if (expr == NULL || expr == error_mark_node)
return;
code = TREE_CODE (expr);
class = TREE_CODE_CLASS (code);
switch (code)
{
case ADDR_EXPR:
add_stmt_operand (expr_p, stmt, 0);
if (is_gimple_min_invariant (expr))
return;
flags |= opf_no_vops;
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case SSA_NAME:
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
add_stmt_operand (expr_p, stmt, flags);
return;
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
get_indirect_ref_operands (stmt, expr, flags);
return;
case ARRAY_REF:
case ARRAY_RANGE_REF:
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
add_stmt_operand (expr_p, stmt, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
return;
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
add_stmt_operand (expr_p, stmt, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
return;
case WITH_SIZE_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case CALL_EXPR:
get_call_expr_operands (stmt, expr);
return;
case COND_EXPR:
case VEC_COND_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
return;
case MODIFY_EXPR:
{
int subflags;
tree op;
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
op = TREE_OPERAND (expr, 0);
if (TREE_CODE (op) == WITH_SIZE_EXPR)
op = TREE_OPERAND (expr, 0);
if (TREE_CODE (op) == ARRAY_REF
|| TREE_CODE (op) == ARRAY_RANGE_REF
|| TREE_CODE (op) == COMPONENT_REF
|| TREE_CODE (op) == REALPART_EXPR
|| TREE_CODE (op) == IMAGPART_EXPR)
subflags = opf_is_def;
else
subflags = opf_is_def | opf_kill_def;
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
return;
}
case CONSTRUCTOR:
{
tree t;
for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
return;
}
case TRUTH_NOT_EXPR:
case BIT_FIELD_REF:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
case COMPOUND_EXPR:
case OBJ_TYPE_REF:
do_binary:
{
tree op0 = TREE_OPERAND (expr, 0);
tree op1 = TREE_OPERAND (expr, 1);
if (tree_swap_operands_p (op0, op1, false))
{
if (code == LT_EXPR
|| code == GT_EXPR
|| code == LE_EXPR
|| code == GE_EXPR)
{
TREE_SET_CODE (expr, swap_tree_comparison (code));
TREE_OPERAND (expr, 0) = op1;
TREE_OPERAND (expr, 1) = op0;
}
else if (commutative_tree_code (code))
{
TREE_OPERAND (expr, 0) = op1;
TREE_OPERAND (expr, 1) = op0;
}
}
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
return;
}
case REALIGN_LOAD_EXPR:
{
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
return;
}
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
case FILTER_EXPR:
case LABEL_DECL:
return;
default:
if (class == tcc_unary)
goto do_unary;
if (class == tcc_binary || class == tcc_comparison)
goto do_binary;
if (class == tcc_constant || class == tcc_type)
return;
}
#ifdef ENABLE_CHECKING
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
internal_error ("internal error");
#endif
gcc_unreachable ();
}
static void
get_asm_expr_operands (tree stmt)
{
stmt_ann_t s_ann = stmt_ann (stmt);
int noutputs = list_length (ASM_OUTPUTS (stmt));
const char **oconstraints
= (const char **) alloca ((noutputs) * sizeof (const char *));
int i;
tree link;
const char *constraint;
bool allows_mem, allows_reg, is_inout;
for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
{
oconstraints[i] = constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
parse_output_constraint (&constraint, i, 0, 0,
&allows_mem, &allows_reg, &is_inout);
gcc_assert (!allows_reg || !is_inout);
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
if (t && DECL_P (t))
note_addressable (t, s_ann);
}
get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
}
for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
{
constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
if (t && DECL_P (t))
note_addressable (t, s_ann);
}
get_expr_operands (stmt, &TREE_VALUE (link), 0);
}
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
size_t i;
bitmap_iterator bi;
if (global_var)
add_stmt_operand (&global_var, stmt, opf_is_def);
else
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, stmt, opf_is_def);
}
EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, stmt, opf_is_def);
}
break;
}
}
static void
get_indirect_ref_operands (tree stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
tree ptr = *pptr;
stmt_ann_t ann = stmt_ann (stmt);
flags &= ~opf_kill_def;
if (REF_ORIGINAL (expr))
{
enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
if (ocode == ARRAY_REF
|| ocode == COMPONENT_REF
|| ocode == REALPART_EXPR
|| ocode == IMAGPART_EXPR)
flags &= ~opf_kill_def;
}
if (SSA_VAR_P (ptr))
{
struct ptr_info_def *pi = NULL;
if (TREE_CODE (ptr) == SSA_NAME
&& (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
&& pi->name_mem_tag)
{
add_stmt_operand (&pi->name_mem_tag, stmt, flags);
}
else
{
var_ann_t ann;
if (dump_file
&& TREE_CODE (ptr) == SSA_NAME
&& pi == NULL)
{
fprintf (dump_file,
"NOTE: no flow-sensitive alias info for ");
print_generic_expr (dump_file, ptr, dump_flags);
fprintf (dump_file, " in ");
print_generic_stmt (dump_file, stmt, dump_flags);
}
if (TREE_CODE (ptr) == SSA_NAME)
ptr = SSA_NAME_VAR (ptr);
ann = var_ann (ptr);
if (ann->type_mem_tag)
add_stmt_operand (&ann->type_mem_tag, stmt, flags);
}
}
else if (TREE_CODE (ptr) == INTEGER_CST)
{
if (ann)
ann->has_volatile_ops = true;
return;
}
else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
{
pptr = &TREE_OPERAND (ptr, 0);
add_stmt_operand (pptr, stmt, 0);
pptr = &TREE_OPERAND (*pptr, 0);
get_expr_operands (stmt, pptr, flags);
return;
}
else
gcc_unreachable ();
get_expr_operands (stmt, pptr, opf_none);
}
static void
get_call_expr_operands (tree stmt, tree expr)
{
tree op;
int call_flags = call_expr_flags (expr);
tree callee = get_callee_fndecl (expr);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
{
if (function_ignores_memory_p (expr))
;
else if (TREE_SIDE_EFFECTS (expr)
&& !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
add_call_clobber_ops (stmt, callee);
else if (!(call_flags & ECF_CONST))
add_call_read_ops (stmt, callee);
}
}
static void
add_stmt_operand (tree *var_p, tree stmt, int flags)
{
bool is_real_op;
tree var, sym;
stmt_ann_t s_ann = stmt_ann (stmt);
var_ann_t v_ann;
var = *var_p;
if (!var)
return;
STRIP_NOPS (var);
if (TREE_CODE (var) == ADDR_EXPR)
{
note_addressable (TREE_OPERAND (var, 0), s_ann);
return;
}
is_real_op = is_gimple_reg (var);
if (!is_real_op && !DECL_P (var))
var = get_virtual_var (var);
if (var == NULL_TREE || !SSA_VAR_P (var))
return;
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
v_ann = var_ann (sym);
if (TREE_THIS_VOLATILE (sym))
{
if (s_ann)
s_ann->has_volatile_ops = true;
return;
}
if (is_real_op)
{
if (flags & opf_is_def)
append_def (var_p);
else
append_use (var_p);
}
else
{
varray_type aliases;
if (flags & opf_no_vops)
return;
aliases = v_ann->may_aliases;
if (aliases == NULL)
{
if (flags & opf_is_def)
{
if (flags & opf_kill_def)
{
gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
append_v_must_def (var);
}
else
{
append_v_may_def (var);
}
}
else
{
append_vuse (var);
if (s_ann && v_ann->is_alias_tag)
s_ann->makes_aliased_loads = 1;
}
}
else
{
size_t i;
gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
if (flags & opf_is_def)
{
if (v_ann->is_alias_tag)
append_v_may_def (var);
for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
append_v_may_def (VARRAY_TREE (aliases, i));
if (s_ann)
s_ann->makes_aliased_stores = 1;
}
else
{
if (v_ann->is_alias_tag)
append_vuse (var);
for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
append_vuse (VARRAY_TREE (aliases, i));
if (s_ann)
s_ann->makes_aliased_loads = 1;
}
}
}
}
static void
note_addressable (tree var, stmt_ann_t s_ann)
{
if (!s_ann)
return;
var = get_base_address (var);
if (var && SSA_VAR_P (var))
{
if (s_ann->addresses_taken == NULL)
s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
}
}
static void
add_call_clobber_ops (tree stmt, tree callee)
{
if (stmt_ann (stmt))
stmt_ann (stmt)->makes_clobbering_call = true;
if (global_var)
add_stmt_operand (&global_var, stmt, opf_is_def);
else
{
size_t i;
bitmap not_read_b = NULL, not_written_b = NULL;
bitmap_iterator bi;
if (0 && callee)
{
not_read_b = get_global_statics_not_read (callee);
not_written_b = get_global_statics_not_written (callee);
}
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
bool not_read
= not_read_b ? bitmap_bit_p (not_read_b, i) : false;
bool not_written
= not_written_b ? bitmap_bit_p (not_written_b, i) : false;
if (not_read)
{
if (!not_written)
add_stmt_operand (&var, stmt, opf_is_def);
}
else
{
if (not_written)
add_stmt_operand (&var, stmt, opf_none);
else if (TREE_READONLY (var)
&& (TREE_STATIC (var) || DECL_EXTERNAL (var)))
add_stmt_operand (&var, stmt, opf_none);
else
add_stmt_operand (&var, stmt, opf_is_def);
}
}
}
}
static void
add_call_read_ops (tree stmt, tree callee)
{
bitmap_iterator bi;
if (global_var)
add_stmt_operand (&global_var, stmt, opf_none);
else
{
size_t i;
bitmap not_read_b = callee
? get_global_statics_not_read (callee) : NULL;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
bool not_read = not_read_b
? bitmap_bit_p(not_read_b, i) : false;
if (!not_read)
add_stmt_operand (&var, stmt, opf_none);
}
}
}
void
copy_virtual_operands (tree dst, tree src)
{
unsigned i;
vuse_optype vuses = STMT_VUSE_OPS (src);
v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
if (vuses)
{
*vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
for (i = 0; i < NUM_VUSES (vuses); i++)
SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
}
if (v_may_defs)
{
*v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
{
SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
V_MAY_DEF_RESULT (v_may_defs, i));
}
}
if (v_must_defs)
{
*v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
{
SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
}
}
}
void
create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
{
stmt_ann_t ann;
tree op;
stmt_operands_t tmp;
unsigned j;
memset (&tmp, 0, sizeof (stmt_operands_t));
ann = get_stmt_ann (new_stmt);
free_ssa_operands (&(ann->operands));
build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
free_vuses (&(ann->operands.vuse_ops));
free_v_may_defs (&(ann->operands.v_may_def_ops));
free_v_must_defs (&(ann->operands.v_must_def_ops));
for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
{
op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
append_vuse (op);
}
for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
{
op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
append_vuse (op);
}
ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
}
#include "gt-tree-ssa-operands.h"