#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
#include "flags.h"
#include "rtl.h"
#include "tm_p.h"
#include "ggc.h"
#include "basic-block.h"
#include "output.h"
#include "errors.h"
#include "expr.h"
#include "function.h"
#include "diagnostic.h"
#include "timevar.h"
#include "cfgloop.h"
#include "tree-dump.h"
#include "tree-flow.h"
#include "domwalk.h"
#include "real.h"
#include "tree-pass.h"
#include "tree-ssa-propagate.h"
#include "langhooks.h"
struct edge_info
{
tree lhs;
tree rhs;
tree *cond_equivalences;
unsigned int max_cond_equivalences;
edge redirection_target;
};
static htab_t avail_exprs;
static VEC(tree_on_heap) *avail_exprs_stack;
static VEC(tree_on_heap) *block_defs_stack;
static VEC(tree_on_heap) *stmts_to_rescan;
struct expr_hash_elt
{
tree lhs;
tree rhs;
stmt_ann_t ann;
hashval_t hash;
};
static VEC(tree_on_heap) *const_and_copies_stack;
static bitmap nonzero_vars;
static VEC(tree_on_heap) *nonzero_vars_stack;
static bool cfg_altered;
static bitmap need_eh_cleanup;
struct opt_stats_d
{
long num_stmts;
long num_exprs_considered;
long num_re;
};
static struct opt_stats_d opt_stats;
struct vrp_element
{
tree low;
tree high;
tree cond;
basic_block bb;
};
static htab_t vrp_data;
struct vrp_hash_elt
{
tree var;
varray_type records;
};
static VEC(tree_on_heap) *vrp_variables_stack;
struct eq_expr_value
{
tree src;
tree dst;
};
static void optimize_stmt (struct dom_walk_data *,
basic_block bb,
block_stmt_iterator);
static tree lookup_avail_expr (tree, bool);
static hashval_t vrp_hash (const void *);
static int vrp_eq (const void *, const void *);
static hashval_t avail_expr_hash (const void *);
static hashval_t real_avail_expr_hash (const void *);
static int avail_expr_eq (const void *, const void *);
static void htab_statistics (FILE *, htab_t);
static void record_cond (tree, tree);
static void record_const_or_copy (tree, tree);
static void record_equality (tree, tree);
static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
tree, int);
static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
static tree simplify_switch_and_lookup_avail_expr (tree, int);
static tree find_equivalent_equality_comparison (tree);
static void record_range (tree, basic_block);
static bool extract_range_from_cond (tree, tree *, tree *, int *);
static void record_equivalences_from_phis (basic_block);
static void record_equivalences_from_incoming_edge (basic_block);
static bool eliminate_redundant_computations (struct dom_walk_data *,
tree, stmt_ann_t);
static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
static void thread_across_edge (struct dom_walk_data *, edge);
static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
static void remove_local_expressions_from_table (void);
static void restore_vars_to_original_value (void);
static void restore_currdefs_to_original_value (void);
static void register_definitions_for_stmt (tree);
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void restore_nonzero_vars_to_original_value (void);
static inline bool unsafe_associative_fp_binop (tree);
static tree
local_fold (tree t)
{
t = fold (t);
STRIP_USELESS_TYPE_CONVERSION (t);
return t;
}
static struct edge_info *
allocate_edge_info (edge e)
{
struct edge_info *edge_info;
edge_info = xcalloc (1, sizeof (struct edge_info));
e->aux = edge_info;
return edge_info;
}
static void
free_all_edge_infos (void)
{
basic_block bb;
edge_iterator ei;
edge e;
FOR_EACH_BB (bb)
{
FOR_EACH_EDGE (e, ei, bb->preds)
{
struct edge_info *edge_info = e->aux;
if (edge_info)
{
e->aux = edge_info->redirection_target;
if (edge_info->cond_equivalences)
free (edge_info->cond_equivalences);
free (edge_info);
}
}
}
}
static void
tree_ssa_dominator_optimize (void)
{
struct dom_walk_data walk_data;
struct loops *loops;
unsigned int i;
loops = loop_optimizer_init (NULL);
memset (&opt_stats, 0, sizeof (opt_stats));
for (i = 0; i < num_referenced_vars; i++)
var_ann (referenced_var (i))->current_def = NULL;
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
avail_exprs_stack = VEC_alloc (tree_on_heap, 20);
block_defs_stack = VEC_alloc (tree_on_heap, 20);
const_and_copies_stack = VEC_alloc (tree_on_heap, 20);
nonzero_vars_stack = VEC_alloc (tree_on_heap, 20);
vrp_variables_stack = VEC_alloc (tree_on_heap, 20);
stmts_to_rescan = VEC_alloc (tree_on_heap, 20);
nonzero_vars = BITMAP_ALLOC (NULL);
need_eh_cleanup = BITMAP_ALLOC (NULL);
walk_data.walk_stmts_backward = false;
walk_data.dom_direction = CDI_DOMINATORS;
walk_data.initialize_block_local_data = NULL;
walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
walk_data.before_dom_children_walk_stmts = optimize_stmt;
walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
walk_data.after_dom_children_before_stmts = NULL;
walk_data.after_dom_children_walk_stmts = NULL;
walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
walk_data.global_data = NULL;
walk_data.block_local_data_size = 0;
init_walk_dominator_tree (&walk_data);
calculate_dominance_info (CDI_DOMINATORS);
do
{
cfg_altered = false;
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
if (!bitmap_empty_p (vars_to_rename))
{
rewrite_into_ssa (false);
bitmap_clear (vars_to_rename);
}
free_all_edge_infos ();
cfg_altered = thread_through_all_blocks ();
if (!bitmap_empty_p (need_eh_cleanup))
{
cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
bitmap_zero (need_eh_cleanup);
}
free_dominance_info (CDI_DOMINATORS);
cfg_altered = cleanup_tree_cfg ();
calculate_dominance_info (CDI_DOMINATORS);
rewrite_ssa_into_ssa ();
bitmap_clear (nonzero_vars);
htab_empty (avail_exprs);
htab_empty (vrp_data);
for (i = 0; i < num_referenced_vars; i++)
var_ann (referenced_var (i))->current_def = NULL;
for (i = 0; i < num_ssa_names; i++)
{
tree name = ssa_name (i);
tree value;
if (!name)
continue;
value = SSA_NAME_VALUE (name);
if (value && !is_gimple_min_invariant (value))
SSA_NAME_VALUE (name) = NULL;
}
}
while (optimize > 1 && cfg_altered);
loop_optimizer_finalize (loops, NULL);
if (dump_file && (dump_flags & TDF_STATS))
dump_dominator_optimization_stats (dump_file);
htab_delete (avail_exprs);
htab_delete (vrp_data);
fini_walk_dominator_tree (&walk_data);
BITMAP_FREE (nonzero_vars);
BITMAP_FREE (need_eh_cleanup);
VEC_free (tree_on_heap, block_defs_stack);
VEC_free (tree_on_heap, avail_exprs_stack);
VEC_free (tree_on_heap, const_and_copies_stack);
VEC_free (tree_on_heap, nonzero_vars_stack);
VEC_free (tree_on_heap, vrp_variables_stack);
VEC_free (tree_on_heap, stmts_to_rescan);
}
static bool
gate_dominator (void)
{
return flag_tree_dom != 0;
}
struct tree_opt_pass pass_dominator =
{
"dom",
gate_dominator,
tree_ssa_dominator_optimize,
NULL,
NULL,
0,
TV_TREE_SSA_DOMINATOR_OPTS,
PROP_cfg | PROP_ssa | PROP_alias,
0,
0,
0,
TODO_dump_func | TODO_rename_vars
| TODO_verify_ssa,
0
};
static void
thread_across_edge (struct dom_walk_data *walk_data, edge e)
{
block_stmt_iterator bsi;
tree stmt = NULL;
tree phi;
for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
{
tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
tree dst = PHI_RESULT (phi);
if (src != dst
&& TREE_CODE (src) == SSA_NAME
&& TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
&& bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
return;
record_const_or_copy (dst, src);
register_new_def (dst, &block_defs_stack);
}
for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
{
tree lhs, cached_lhs;
stmt = bsi_stmt (bsi);
if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
continue;
if (TREE_CODE (stmt) != MODIFY_EXPR
|| TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
break;
if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
cached_lhs = TREE_OPERAND (stmt, 1);
else
cached_lhs = lookup_avail_expr (stmt, false);
lhs = TREE_OPERAND (stmt, 0);
if (lhs == cached_lhs)
break;
if (!cached_lhs)
{
stmt_ann_t ann = stmt_ann (stmt);
use_optype uses = USE_OPS (ann);
vuse_optype vuses = VUSE_OPS (ann);
tree *uses_copy = xmalloc (NUM_USES (uses) * sizeof (tree));
tree *vuses_copy = xmalloc (NUM_VUSES (vuses) * sizeof (tree));
unsigned int i;
for (i = 0; i < NUM_USES (uses); i++)
{
tree tmp = NULL;
uses_copy[i] = USE_OP (uses, i);
if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
tmp = SSA_NAME_VALUE (USE_OP (uses, i));
if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
SET_USE_OP (uses, i, tmp);
}
for (i = 0; i < NUM_VUSES (vuses); i++)
{
tree tmp = NULL;
vuses_copy[i] = VUSE_OP (vuses, i);
if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
tmp = SSA_NAME_VALUE (VUSE_OP (vuses, i));
if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
SET_VUSE_OP (vuses, i, tmp);
}
cached_lhs = lookup_avail_expr (stmt, false);
for (i = 0; i < NUM_USES (uses); i++)
SET_USE_OP (uses, i, uses_copy[i]);
for (i = 0; i < NUM_VUSES (vuses); i++)
SET_VUSE_OP (vuses, i, vuses_copy[i]);
free (uses_copy);
free (vuses_copy);
if (! cached_lhs)
break;
}
if (TREE_CODE (cached_lhs) != SSA_NAME)
break;
if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs))
break;
if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs)
break;
record_const_or_copy (lhs, cached_lhs);
register_new_def (lhs, &block_defs_stack);
}
if (stmt
&& (TREE_CODE (stmt) == COND_EXPR
|| TREE_CODE (stmt) == SWITCH_EXPR))
{
tree cond, cached_lhs;
if (TREE_CODE (stmt) == COND_EXPR)
cond = COND_EXPR_COND (stmt);
else
cond = SWITCH_COND (stmt);
if (COMPARISON_CLASS_P (cond))
{
tree dummy_cond, op0, op1;
enum tree_code cond_code;
op0 = TREE_OPERAND (cond, 0);
op1 = TREE_OPERAND (cond, 1);
cond_code = TREE_CODE (cond);
if (TREE_CODE (op0) == SSA_NAME)
{
tree tmp = SSA_NAME_VALUE (op0);
if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
op0 = tmp;
}
if (TREE_CODE (op1) == SSA_NAME)
{
tree tmp = SSA_NAME_VALUE (op1);
if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
op1 = tmp;
}
dummy_cond = walk_data->global_data;
if (! dummy_cond)
{
dummy_cond = build (cond_code, boolean_type_node, op0, op1);
dummy_cond = build (COND_EXPR, void_type_node,
dummy_cond, NULL, NULL);
walk_data->global_data = dummy_cond;
}
else
{
TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
}
cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
if (! is_gimple_min_invariant (cached_lhs))
{
cached_lhs = lookup_avail_expr (dummy_cond, false);
if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
NULL,
false);
}
}
else if (TREE_CODE (cond) == SSA_NAME)
{
cached_lhs = cond;
cached_lhs = SSA_NAME_VALUE (cached_lhs);
if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
cached_lhs = 0;
}
else
cached_lhs = lookup_avail_expr (stmt, false);
if (cached_lhs)
{
edge taken_edge = find_taken_edge (e->dest, cached_lhs);
basic_block dest = (taken_edge ? taken_edge->dest : NULL);
if (dest == e->dest)
return;
if (dest)
{
struct edge_info *edge_info;
update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
e->count, taken_edge);
if (e->aux)
edge_info = e->aux;
else
edge_info = allocate_edge_info (e);
edge_info->redirection_target = taken_edge;
bb_ann (e->dest)->incoming_edge_threaded = true;
}
}
}
}
static void
dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, nonzero_vars_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, vrp_variables_stack, NULL_TREE);
record_equivalences_from_incoming_edge (bb);
record_equivalences_from_phis (bb);
}
static void
initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
{
if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
{
element->ann = NULL;
element->rhs = expr;
}
else if (TREE_CODE (expr) == COND_EXPR)
{
element->ann = stmt_ann (expr);
element->rhs = COND_EXPR_COND (expr);
}
else if (TREE_CODE (expr) == SWITCH_EXPR)
{
element->ann = stmt_ann (expr);
element->rhs = SWITCH_COND (expr);
}
else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
{
element->ann = stmt_ann (expr);
element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
}
else
{
element->ann = stmt_ann (expr);
element->rhs = TREE_OPERAND (expr, 1);
}
element->lhs = lhs;
element->hash = avail_expr_hash (element);
}
static void
remove_local_expressions_from_table (void)
{
while (VEC_length (tree_on_heap, avail_exprs_stack) > 0)
{
struct expr_hash_elt element;
tree expr = VEC_pop (tree_on_heap, avail_exprs_stack);
if (expr == NULL_TREE)
break;
initialize_hash_element (expr, NULL, &element);
htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
}
}
static void
restore_nonzero_vars_to_original_value (void)
{
while (VEC_length (tree_on_heap, nonzero_vars_stack) > 0)
{
tree name = VEC_pop (tree_on_heap, nonzero_vars_stack);
if (name == NULL)
break;
bitmap_clear_bit (nonzero_vars, SSA_NAME_VERSION (name));
}
}
static void
restore_vars_to_original_value (void)
{
while (VEC_length (tree_on_heap, const_and_copies_stack) > 0)
{
tree prev_value, dest;
dest = VEC_pop (tree_on_heap, const_and_copies_stack);
if (dest == NULL)
break;
prev_value = VEC_pop (tree_on_heap, const_and_copies_stack);
SSA_NAME_VALUE (dest) = prev_value;
}
}
static void
restore_currdefs_to_original_value (void)
{
while (VEC_length (tree_on_heap, block_defs_stack) > 0)
{
tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
tree saved_def, var;
if (tmp == NULL_TREE)
break;
if (TREE_CODE (tmp) == SSA_NAME)
{
saved_def = tmp;
var = SSA_NAME_VAR (saved_def);
}
else
{
saved_def = NULL;
var = tmp;
}
var_ann (var)->current_def = saved_def;
}
}
static void
dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
{
tree last;
if (EDGE_COUNT (bb->succs) == 1
&& (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
&& (get_immediate_dominator (CDI_DOMINATORS, EDGE_SUCC (bb, 0)->dest) != bb
|| phi_nodes (EDGE_SUCC (bb, 0)->dest)))
{
thread_across_edge (walk_data, EDGE_SUCC (bb, 0));
}
else if ((last = last_stmt (bb))
&& TREE_CODE (last) == COND_EXPR
&& (COMPARISON_CLASS_P (COND_EXPR_COND (last))
|| TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
&& EDGE_COUNT (bb->succs) == 2
&& (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
&& (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
{
edge true_edge, false_edge;
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
|| phi_nodes (true_edge->dest))
{
struct edge_info *edge_info;
unsigned int i;
VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
edge_info = true_edge->aux;
if (edge_info)
{
tree *cond_equivalences = edge_info->cond_equivalences;
tree lhs = edge_info->lhs;
tree rhs = edge_info->rhs;
if (lhs
&& TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME
&& TREE_CODE (edge_info->rhs) == SSA_NAME
&& SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs))
record_const_or_copy (lhs, rhs);
if (cond_equivalences)
for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
{
tree expr = cond_equivalences[i];
tree value = cond_equivalences[i + 1];
record_cond (expr, value);
}
}
thread_across_edge (walk_data, true_edge);
remove_local_expressions_from_table ();
restore_vars_to_original_value ();
restore_currdefs_to_original_value ();
}
if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
|| phi_nodes (false_edge->dest))
{
struct edge_info *edge_info;
unsigned int i;
edge_info = false_edge->aux;
if (edge_info)
{
tree *cond_equivalences = edge_info->cond_equivalences;
tree lhs = edge_info->lhs;
tree rhs = edge_info->rhs;
if (lhs
&& TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
record_const_or_copy (lhs, rhs);
if (cond_equivalences)
for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
{
tree expr = cond_equivalences[i];
tree value = cond_equivalences[i + 1];
record_cond (expr, value);
}
}
thread_across_edge (walk_data, false_edge);
}
}
remove_local_expressions_from_table ();
restore_nonzero_vars_to_original_value ();
restore_vars_to_original_value ();
restore_currdefs_to_original_value ();
while (VEC_length (tree_on_heap, vrp_variables_stack) > 0)
{
tree var = VEC_pop (tree_on_heap, vrp_variables_stack);
struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
void **slot;
varray_type var_vrp_records;
if (var == NULL)
break;
vrp_hash_elt.var = var;
vrp_hash_elt.records = NULL;
slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
var_vrp_records = vrp_hash_elt_p->records;
while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
{
struct vrp_element *element
= (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
if (element->bb != bb)
break;
VARRAY_POP (var_vrp_records);
}
}
while (VEC_length (tree_on_heap, stmts_to_rescan) > 0)
{
tree stmt = VEC_last (tree_on_heap, stmts_to_rescan);
basic_block stmt_bb = bb_for_stmt (stmt);
if (stmt_bb != bb)
break;
VEC_pop (tree_on_heap, stmts_to_rescan);
mark_new_vars_to_rename (stmt, vars_to_rename);
}
}
static void
record_equivalences_from_phis (basic_block bb)
{
tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
tree lhs = PHI_RESULT (phi);
tree rhs = NULL;
int i;
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
{
tree t = PHI_ARG_DEF (phi, i);
if (lhs == t)
continue;
if (rhs == NULL)
rhs = t;
else if (! operand_equal_for_phi_arg_p (rhs, t))
break;
}
if (!rhs)
rhs = lhs;
if (i == PHI_NUM_ARGS (phi)
&& may_propagate_copy (lhs, rhs))
SSA_NAME_VALUE (lhs) = rhs;
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
{
if (!PHI_ARG_NONZERO (phi, i))
break;
}
if (i == PHI_NUM_ARGS (phi))
bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
register_new_def (lhs, &block_defs_stack);
}
}
static edge
single_incoming_edge_ignoring_loop_edges (basic_block bb)
{
edge retval = NULL;
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
continue;
if (retval)
return NULL;
retval = e;
}
return retval;
}
static void
record_equivalences_from_incoming_edge (basic_block bb)
{
edge e;
basic_block parent;
struct edge_info *edge_info;
parent = get_immediate_dominator (CDI_DOMINATORS, bb);
e = single_incoming_edge_ignoring_loop_edges (bb);
if (e && e->src == parent)
{
unsigned int i;
edge_info = e->aux;
if (edge_info)
{
tree lhs = edge_info->lhs;
tree rhs = edge_info->rhs;
tree *cond_equivalences = edge_info->cond_equivalences;
if (lhs)
record_equality (lhs, rhs);
if (cond_equivalences)
{
bool recorded_range = false;
for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
{
tree expr = cond_equivalences[i];
tree value = cond_equivalences[i + 1];
record_cond (expr, value);
if (! recorded_range
&& COMPARISON_CLASS_P (expr)
&& value == boolean_true_node
&& TREE_CONSTANT (TREE_OPERAND (expr, 1)))
{
record_range (expr, bb);
recorded_range = true;
}
}
}
}
}
}
void
dump_dominator_optimization_stats (FILE *file)
{
long n_exprs;
fprintf (file, "Total number of statements: %6ld\n\n",
opt_stats.num_stmts);
fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
opt_stats.num_exprs_considered);
n_exprs = opt_stats.num_exprs_considered;
if (n_exprs == 0)
n_exprs = 1;
fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
opt_stats.num_re, PERCENT (opt_stats.num_re,
n_exprs));
fprintf (file, "\nHash table statistics:\n");
fprintf (file, " avail_exprs: ");
htab_statistics (file, avail_exprs);
}
void
debug_dominator_optimization_stats (void)
{
dump_dominator_optimization_stats (stderr);
}
static void
htab_statistics (FILE *file, htab_t htab)
{
fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
(long) htab_size (htab),
(long) htab_elements (htab),
htab_collisions (htab));
}
static void
record_var_is_nonzero (tree var)
{
int indx = SSA_NAME_VERSION (var);
if (bitmap_bit_p (nonzero_vars, indx))
return;
bitmap_set_bit (nonzero_vars, indx);
VEC_safe_push (tree_on_heap, nonzero_vars_stack, var);
}
static void
record_cond (tree cond, tree value)
{
struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
void **slot;
initialize_hash_element (cond, value, element);
slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
element->hash, INSERT);
if (*slot == NULL)
{
*slot = (void *) element;
VEC_safe_push (tree_on_heap, avail_exprs_stack, cond);
}
else
free (element);
}
static void
build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
{
*p = build2 (new_code, boolean_type_node, op0, op1);
p++;
*p = boolean_true_node;
}
static void
record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
{
tree op0, op1;
if (!COMPARISON_CLASS_P (cond))
return;
op0 = TREE_OPERAND (cond, 0);
op1 = TREE_OPERAND (cond, 1);
switch (TREE_CODE (cond))
{
case LT_EXPR:
case GT_EXPR:
edge_info->max_cond_equivalences = 12;
edge_info->cond_equivalences = xmalloc (12 * sizeof (tree));
build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
? LE_EXPR : GE_EXPR),
op0, op1, &edge_info->cond_equivalences[4]);
build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[8]);
build_and_record_new_cond (LTGT_EXPR, op0, op1,
&edge_info->cond_equivalences[10]);
break;
case GE_EXPR:
case LE_EXPR:
edge_info->max_cond_equivalences = 6;
edge_info->cond_equivalences = xmalloc (6 * sizeof (tree));
build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[4]);
break;
case EQ_EXPR:
edge_info->max_cond_equivalences = 10;
edge_info->cond_equivalences = xmalloc (10 * sizeof (tree));
build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[4]);
build_and_record_new_cond (LE_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
build_and_record_new_cond (GE_EXPR, op0, op1,
&edge_info->cond_equivalences[8]);
break;
case UNORDERED_EXPR:
edge_info->max_cond_equivalences = 16;
edge_info->cond_equivalences = xmalloc (16 * sizeof (tree));
build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]);
build_and_record_new_cond (UNLE_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
build_and_record_new_cond (UNGE_EXPR, op0, op1,
&edge_info->cond_equivalences[8]);
build_and_record_new_cond (UNEQ_EXPR, op0, op1,
&edge_info->cond_equivalences[10]);
build_and_record_new_cond (UNLT_EXPR, op0, op1,
&edge_info->cond_equivalences[12]);
build_and_record_new_cond (UNGT_EXPR, op0, op1,
&edge_info->cond_equivalences[14]);
break;
case UNLT_EXPR:
case UNGT_EXPR:
edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
? UNLE_EXPR : UNGE_EXPR),
op0, op1, &edge_info->cond_equivalences[4]);
build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
break;
case UNEQ_EXPR:
edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
build_and_record_new_cond (UNLE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]);
build_and_record_new_cond (UNGE_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
break;
case LTGT_EXPR:
edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]);
build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[6]);
break;
default:
edge_info->max_cond_equivalences = 4;
edge_info->cond_equivalences = xmalloc (4 * sizeof (tree));
break;
}
edge_info->cond_equivalences[0] = cond;
edge_info->cond_equivalences[1] = boolean_true_node;
edge_info->cond_equivalences[2] = inverted;
edge_info->cond_equivalences[3] = boolean_false_node;
}
static void
record_const_or_copy_1 (tree x, tree y, tree prev_x)
{
SSA_NAME_VALUE (x) = y;
VEC_safe_push (tree_on_heap, const_and_copies_stack, prev_x);
VEC_safe_push (tree_on_heap, const_and_copies_stack, x);
}
static int
loop_depth_of_name (tree x)
{
tree defstmt;
basic_block defbb;
if (TREE_CODE (x) != SSA_NAME)
return 0;
defstmt = SSA_NAME_DEF_STMT (x);
defbb = bb_for_stmt (defstmt);
if (!defbb)
return 0;
return defbb->loop_depth;
}
static void
record_const_or_copy (tree x, tree y)
{
tree prev_x = SSA_NAME_VALUE (x);
if (TREE_CODE (y) == SSA_NAME)
{
tree tmp = SSA_NAME_VALUE (y);
if (tmp)
y = tmp;
}
record_const_or_copy_1 (x, y, prev_x);
}
static void
record_equality (tree x, tree y)
{
tree prev_x = NULL, prev_y = NULL;
if (TREE_CODE (x) == SSA_NAME)
prev_x = SSA_NAME_VALUE (x);
if (TREE_CODE (y) == SSA_NAME)
prev_y = SSA_NAME_VALUE (y);
if (TREE_INVARIANT (y))
;
else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
prev_x = x, x = y, y = prev_x, prev_x = prev_y;
else if (prev_x && TREE_INVARIANT (prev_x))
x = y, y = prev_x, prev_x = prev_y;
else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
y = prev_y;
if (TREE_CODE (x) != SSA_NAME)
return;
if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
&& (TREE_CODE (y) != REAL_CST
|| REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
return;
record_const_or_copy_1 (x, y, prev_x);
}
static inline bool
unsafe_associative_fp_binop (tree exp)
{
enum tree_code code = TREE_CODE (exp);
return !(!flag_unsafe_math_optimizations
&& (code == MULT_EXPR || code == PLUS_EXPR
|| code == MINUS_EXPR)
&& FLOAT_TYPE_P (TREE_TYPE (exp)));
}
static tree
simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
tree stmt, int insert)
{
tree rhs = TREE_OPERAND (stmt, 1);
enum tree_code rhs_code = TREE_CODE (rhs);
tree result = NULL;
if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
{
tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
&& TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code
&& loop_containing_stmt (rhs_def_stmt) == loop_containing_stmt (stmt))
{
tree rhs_def_operand;
rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
if (TREE_CODE (rhs_def_operand) == SSA_NAME
&& ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
result = update_rhs_and_lookup_avail_expr (stmt,
rhs_def_operand,
insert);
}
}
if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
&& is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
{
tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
&& TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code
&& loop_containing_stmt (rhs_def_stmt) == loop_containing_stmt (stmt))
{
tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
if ((rhs_code == rhs_def_code && unsafe_associative_fp_binop (rhs))
|| (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
|| (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
{
tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
if (TREE_CODE (def_stmt_op0) == SSA_NAME
&& ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
&& is_gimple_min_invariant (def_stmt_op1))
{
tree outer_const = TREE_OPERAND (rhs, 1);
tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
tree t;
if (FLOAT_TYPE_P (type)
&& !flag_unsafe_math_optimizations
&& (rhs_def_code == PLUS_EXPR
|| rhs_def_code == MINUS_EXPR))
{
bool neg = false;
neg ^= (rhs_code == MINUS_EXPR);
neg ^= (rhs_def_code == MINUS_EXPR);
neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
if (neg)
goto dont_fold_assoc;
}
if (rhs_def_code != rhs_code)
{
if (rhs_def_code == MINUS_EXPR)
t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
else
t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
rhs_code = PLUS_EXPR;
}
else if (rhs_def_code == MINUS_EXPR)
t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
else
t = build (rhs_def_code, type, def_stmt_op1, outer_const);
t = local_fold (t);
t = build (rhs_code, type, def_stmt_op0, t);
t = local_fold (t);
if (TREE_CODE (t) == SSA_NAME
|| (UNARY_CLASS_P (t)
&& TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
|| ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
&& TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
&& is_gimple_val (TREE_OPERAND (t, 1))))
result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
}
}
}
dont_fold_assoc:;
}
if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
&& integer_pow2p (TREE_OPERAND (rhs, 1)))
{
tree val;
tree op = TREE_OPERAND (rhs, 0);
if (TYPE_UNSIGNED (TREE_TYPE (op)))
{
val = integer_one_node;
}
else
{
tree dummy_cond = walk_data->global_data;
if (! dummy_cond)
{
dummy_cond = build (GT_EXPR, boolean_type_node,
op, integer_zero_node);
dummy_cond = build (COND_EXPR, void_type_node,
dummy_cond, NULL, NULL);
walk_data->global_data = dummy_cond;
}
else
{
TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GT_EXPR);
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= integer_zero_node;
}
val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
}
if (val && integer_onep (val))
{
tree t;
tree op0 = TREE_OPERAND (rhs, 0);
tree op1 = TREE_OPERAND (rhs, 1);
if (rhs_code == TRUNC_DIV_EXPR)
t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
build_int_cst (NULL_TREE, tree_log2 (op1)));
else
t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
op1, integer_one_node)));
result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
}
}
if (rhs_code == ABS_EXPR
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
{
tree val;
tree op = TREE_OPERAND (rhs, 0);
tree type = TREE_TYPE (op);
if (TYPE_UNSIGNED (type))
{
val = integer_zero_node;
}
else
{
tree dummy_cond = walk_data->global_data;
if (! dummy_cond)
{
dummy_cond = build (LE_EXPR, boolean_type_node,
op, integer_zero_node);
dummy_cond = build (COND_EXPR, void_type_node,
dummy_cond, NULL, NULL);
walk_data->global_data = dummy_cond;
}
else
{
TREE_SET_CODE (COND_EXPR_COND (dummy_cond), LE_EXPR);
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= build_int_cst (type, 0);
}
val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
if (!val)
{
TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GE_EXPR);
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= build_int_cst (type, 0);
val = simplify_cond_and_lookup_avail_expr (dummy_cond,
NULL, false);
if (val)
{
if (integer_zerop (val))
val = integer_one_node;
else if (integer_onep (val))
val = integer_zero_node;
}
}
}
if (val
&& (integer_onep (val) || integer_zerop (val)))
{
tree t;
if (integer_onep (val))
t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
else
t = op;
result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
}
}
if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
{
tree t = fold_read_from_constant_string (rhs);
if (t)
result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
}
return result;
}
static tree
find_equivalent_equality_comparison (tree cond)
{
tree op0 = TREE_OPERAND (cond, 0);
tree op1 = TREE_OPERAND (cond, 1);
tree def_stmt = SSA_NAME_DEF_STMT (op0);
if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
{
tree def_rhs = TREE_OPERAND (def_stmt, 1);
if ((POINTER_TYPE_P (TREE_TYPE (op0))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
|| (POINTER_TYPE_P (TREE_TYPE (op1))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
return NULL;
if ((TREE_CODE (def_rhs) == NOP_EXPR
|| TREE_CODE (def_rhs) == CONVERT_EXPR)
&& TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
{
tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
tree new;
if (TYPE_PRECISION (def_rhs_inner_type)
> TYPE_PRECISION (TREE_TYPE (def_rhs)))
return NULL;
if (POINTER_TYPE_P (def_rhs_inner_type)
&& TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
return NULL;
new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
new = local_fold (new);
if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
return build (TREE_CODE (cond), TREE_TYPE (cond),
def_rhs_inner, new);
}
}
return NULL;
}
static tree
simplify_cond_and_lookup_avail_expr (tree stmt,
stmt_ann_t ann,
int insert)
{
tree cond = COND_EXPR_COND (stmt);
if (COMPARISON_CLASS_P (cond))
{
tree op0 = TREE_OPERAND (cond, 0);
tree op1 = TREE_OPERAND (cond, 1);
if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
{
int limit;
tree low, high, cond_low, cond_high;
int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
varray_type vrp_records;
struct vrp_element *element;
struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
void **slot;
if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
{
tree new_cond = find_equivalent_equality_comparison (cond);
if (new_cond)
{
COND_EXPR_COND (stmt) = new_cond;
if (ann)
modify_stmt (stmt);
new_cond = lookup_avail_expr (stmt, insert);
if (new_cond)
return new_cond;
op0 = TREE_OPERAND (cond, 0);
op1 = TREE_OPERAND (cond, 1);
}
}
vrp_hash_elt.var = op0;
vrp_hash_elt.records = NULL;
slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
if (slot == NULL)
return NULL;
vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
vrp_records = vrp_hash_elt_p->records;
if (vrp_records == NULL)
return NULL;
limit = VARRAY_ACTIVE_SIZE (vrp_records);
if (limit == 0
|| ! extract_range_from_cond (cond, &cond_high,
&cond_low, &cond_inverted))
return NULL;
element
= (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
if (element->high && element->low)
{
low = element->low;
high = element->high;
}
else
{
tree tmp_high, tmp_low;
int dummy;
if (! extract_range_from_cond (element->cond, &tmp_high,
&tmp_low, &dummy))
gcc_unreachable ();
else
gcc_assert (dummy == 0);
if (limit == 1)
{
low = tmp_low;
high = tmp_high;
}
else
{
struct vrp_element *prev
= (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
limit - 2);
low = prev->low;
high = prev->high;
low = (tree_int_cst_compare (low, tmp_low) == 1
? low : tmp_low);
high = (tree_int_cst_compare (high, tmp_high) == -1
? high : tmp_high);
}
element->low = low;
element->high = high;
}
lowequal = tree_int_cst_equal (low, cond_low);
highequal = tree_int_cst_equal (high, cond_high);
if (lowequal && highequal)
return (cond_inverted ? boolean_false_node : boolean_true_node);
swapped = 0;
if (tree_int_cst_compare (low, cond_low) == 1
|| (lowequal
&& tree_int_cst_compare (cond_high, high) == 1))
{
tree temp;
swapped = 1;
temp = low;
low = cond_low;
cond_low = temp;
temp = high;
high = cond_high;
cond_high = temp;
}
no_overlap = tree_int_cst_lt (high, cond_low);
subset = tree_int_cst_compare (cond_high, high) != 1;
if (no_overlap)
return (cond_inverted ? boolean_true_node : boolean_false_node);
if (subset && swapped)
return (cond_inverted ? boolean_false_node : boolean_true_node);
low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
if (TREE_CODE (cond) != EQ_EXPR
&& TREE_CODE (cond) != NE_EXPR
&& tree_int_cst_equal (low, high))
{
TREE_SET_CODE (cond, EQ_EXPR);
TREE_OPERAND (cond, 1) = high;
}
}
}
return 0;
}
static tree
simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
{
tree cond = SWITCH_COND (stmt);
tree def, to, ti;
if (TREE_CODE (cond) == SSA_NAME)
{
def = SSA_NAME_DEF_STMT (cond);
if (TREE_CODE (def) == MODIFY_EXPR)
{
def = TREE_OPERAND (def, 1);
if (TREE_CODE (def) == NOP_EXPR)
{
int need_precision;
bool fail;
def = TREE_OPERAND (def, 0);
#ifdef ENABLE_CHECKING
gcc_assert (is_gimple_val (def));
#endif
to = TREE_TYPE (cond);
ti = TREE_TYPE (def);
need_precision = TYPE_PRECISION (ti);
fail = false;
if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
fail = true;
else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
need_precision += 1;
if (TYPE_PRECISION (to) < need_precision)
fail = true;
if (!fail)
{
SWITCH_COND (stmt) = def;
modify_stmt (stmt);
return lookup_avail_expr (stmt, insert);
}
}
}
}
return 0;
}
static void
cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->succs)
{
tree phi;
int indx;
if (e->flags & EDGE_ABNORMAL)
continue;
phi = phi_nodes (e->dest);
if (! phi)
continue;
indx = e->dest_idx;
for ( ; phi; phi = PHI_CHAIN (phi))
{
tree new;
use_operand_p orig_p;
tree orig;
orig_p = PHI_ARG_DEF_PTR (phi, indx);
orig = USE_FROM_PTR (orig_p);
if (TREE_CODE (orig) != SSA_NAME)
continue;
if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
PHI_ARG_NONZERO (phi, indx) = true;
new = SSA_NAME_VALUE (orig);
if (new
&& (TREE_CODE (new) == SSA_NAME
|| is_gimple_min_invariant (new))
&& may_propagate_copy (orig, new))
{
propagate_value (orig_p, new);
}
}
}
}
static void
record_edge_info (basic_block bb)
{
block_stmt_iterator bsi = bsi_last (bb);
struct edge_info *edge_info;
if (! bsi_end_p (bsi))
{
tree stmt = bsi_stmt (bsi);
if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
{
tree cond = SWITCH_COND (stmt);
if (TREE_CODE (cond) == SSA_NAME)
{
tree labels = SWITCH_LABELS (stmt);
int i, n_labels = TREE_VEC_LENGTH (labels);
tree *info = xcalloc (n_basic_blocks, sizeof (tree));
edge e;
edge_iterator ei;
for (i = 0; i < n_labels; i++)
{
tree label = TREE_VEC_ELT (labels, i);
basic_block target_bb = label_to_block (CASE_LABEL (label));
if (CASE_HIGH (label)
|| !CASE_LOW (label)
|| info[target_bb->index])
info[target_bb->index] = error_mark_node;
else
info[target_bb->index] = label;
}
FOR_EACH_EDGE (e, ei, bb->succs)
{
basic_block target_bb = e->dest;
tree node = info[target_bb->index];
if (node != NULL && node != error_mark_node)
{
tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
edge_info = allocate_edge_info (e);
edge_info->lhs = cond;
edge_info->rhs = x;
}
}
free (info);
}
}
if (stmt && TREE_CODE (stmt) == COND_EXPR)
{
tree cond = COND_EXPR_COND (stmt);
edge true_edge;
edge false_edge;
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
if (SSA_VAR_P (cond))
{
struct edge_info *edge_info;
edge_info = allocate_edge_info (true_edge);
edge_info->lhs = cond;
edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
edge_info = allocate_edge_info (false_edge);
edge_info->lhs = cond;
edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
}
else if (COMPARISON_CLASS_P (cond))
{
tree op0 = TREE_OPERAND (cond, 0);
tree op1 = TREE_OPERAND (cond, 1);
if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
&& TREE_CODE (op0) == SSA_NAME
&& TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
&& is_gimple_min_invariant (op1))
{
if (TREE_CODE (cond) == EQ_EXPR)
{
edge_info = allocate_edge_info (true_edge);
edge_info->lhs = op0;
edge_info->rhs = (integer_zerop (op1)
? boolean_false_node
: boolean_true_node);
edge_info = allocate_edge_info (false_edge);
edge_info->lhs = op0;
edge_info->rhs = (integer_zerop (op1)
? boolean_true_node
: boolean_false_node);
}
else
{
edge_info = allocate_edge_info (true_edge);
edge_info->lhs = op0;
edge_info->rhs = (integer_zerop (op1)
? boolean_true_node
: boolean_false_node);
edge_info = allocate_edge_info (false_edge);
edge_info->lhs = op0;
edge_info->rhs = (integer_zerop (op1)
? boolean_false_node
: boolean_true_node);
}
}
else if (is_gimple_min_invariant (op0)
&& (TREE_CODE (op1) == SSA_NAME
|| is_gimple_min_invariant (op1)))
{
tree inverted = invert_truthvalue (cond);
struct edge_info *edge_info;
edge_info = allocate_edge_info (true_edge);
record_conditions (edge_info, cond, inverted);
if (TREE_CODE (cond) == EQ_EXPR)
{
edge_info->lhs = op1;
edge_info->rhs = op0;
}
edge_info = allocate_edge_info (false_edge);
record_conditions (edge_info, inverted, cond);
if (TREE_CODE (cond) == NE_EXPR)
{
edge_info->lhs = op1;
edge_info->rhs = op0;
}
}
else if (TREE_CODE (op0) == SSA_NAME
&& (is_gimple_min_invariant (op1)
|| TREE_CODE (op1) == SSA_NAME))
{
tree inverted = invert_truthvalue (cond);
struct edge_info *edge_info;
edge_info = allocate_edge_info (true_edge);
record_conditions (edge_info, cond, inverted);
if (TREE_CODE (cond) == EQ_EXPR)
{
edge_info->lhs = op0;
edge_info->rhs = op1;
}
edge_info = allocate_edge_info (false_edge);
record_conditions (edge_info, inverted, cond);
if (TREE_CODE (cond) == NE_EXPR)
{
edge_info->lhs = op0;
edge_info->rhs = op1;
}
}
}
}
}
}
static void
propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb)
{
record_edge_info (bb);
cprop_into_successor_phis (bb, nonzero_vars);
}
static bool
eliminate_redundant_computations (struct dom_walk_data *walk_data,
tree stmt, stmt_ann_t ann)
{
v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann);
tree *expr_p, def = NULL_TREE;
bool insert = true;
tree cached_lhs;
bool retval = false;
if (TREE_CODE (stmt) == MODIFY_EXPR)
def = TREE_OPERAND (stmt, 0);
if (ann->makes_aliased_stores
|| ! def
|| TREE_CODE (def) != SSA_NAME
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
|| NUM_V_MAY_DEFS (v_may_defs) != 0)
insert = false;
cached_lhs = lookup_avail_expr (stmt, insert);
if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert);
else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
opt_stats.num_exprs_considered++;
if (TREE_CODE (stmt) == COND_EXPR)
expr_p = &COND_EXPR_COND (stmt);
else if (TREE_CODE (stmt) == SWITCH_EXPR)
expr_p = &SWITCH_COND (stmt);
else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
else
expr_p = &TREE_OPERAND (stmt, 1);
if (cached_lhs
&& (TREE_CODE (cached_lhs) != SSA_NAME
|| may_propagate_copy (*expr_p, cached_lhs)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Replaced redundant expr '");
print_generic_expr (dump_file, *expr_p, dump_flags);
fprintf (dump_file, "' with '");
print_generic_expr (dump_file, cached_lhs, dump_flags);
fprintf (dump_file, "'\n");
}
opt_stats.num_re++;
#if defined ENABLE_CHECKING
gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
|| is_gimple_min_invariant (cached_lhs));
#endif
if (TREE_CODE (cached_lhs) == ADDR_EXPR
|| (POINTER_TYPE_P (TREE_TYPE (*expr_p))
&& is_gimple_min_invariant (cached_lhs)))
retval = true;
propagate_tree_value (expr_p, cached_lhs);
modify_stmt (stmt);
}
return retval;
}
static void
record_equivalences_from_stmt (tree stmt,
int may_optimize_p,
stmt_ann_t ann)
{
tree lhs = TREE_OPERAND (stmt, 0);
enum tree_code lhs_code = TREE_CODE (lhs);
int i;
if (lhs_code == SSA_NAME)
{
tree rhs = TREE_OPERAND (stmt, 1);
STRIP_USELESS_TYPE_CONVERSION (rhs);
if (may_optimize_p
&& (TREE_CODE (rhs) == SSA_NAME
|| is_gimple_min_invariant (rhs)))
SSA_NAME_VALUE (lhs) = rhs;
while (TREE_CODE (rhs) == NOP_EXPR
|| TREE_CODE (rhs) == CONVERT_EXPR)
rhs = TREE_OPERAND (rhs, 0);
if (alloca_call_p (rhs)
|| (TREE_CODE (rhs) == ADDR_EXPR
&& DECL_P (TREE_OPERAND (rhs, 0))
&& ! DECL_WEAK (TREE_OPERAND (rhs, 0))))
record_var_is_nonzero (lhs);
if (TREE_CODE (rhs) == BIT_IOR_EXPR
&& integer_nonzerop (TREE_OPERAND (rhs, 1)))
record_var_is_nonzero (lhs);
}
if (flag_delete_null_pointer_checks)
for (i = 0; i < 2; i++)
{
tree t = TREE_OPERAND (stmt, i);
while (TREE_CODE (t) == COMPONENT_REF)
t = TREE_OPERAND (t, 0);
if (INDIRECT_REF_P (t))
{
tree op = TREE_OPERAND (t, 0);
while (TREE_CODE (op) == SSA_NAME)
{
tree def = SSA_NAME_DEF_STMT (op);
record_var_is_nonzero (op);
if (def
&& TREE_CODE (def) == MODIFY_EXPR
&& TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
else
break;
}
}
}
if (!ann->has_volatile_ops
&& (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
|| is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
&& !is_gimple_reg (lhs))
{
tree rhs = TREE_OPERAND (stmt, 1);
tree new;
if (lhs_code == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
{
if (TREE_CONSTANT (rhs))
rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
else
rhs = NULL;
if (rhs && ! is_gimple_min_invariant (rhs))
rhs = NULL;
}
if (rhs)
{
new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
create_ssa_artficial_load_stmt (&(ann->operands), new);
lookup_avail_expr (new, true);
}
}
}
static bool
cprop_operand (tree stmt, use_operand_p op_p)
{
bool may_have_exposed_new_symbols = false;
tree val;
tree op = USE_FROM_PTR (op_p);
val = SSA_NAME_VALUE (op);
if (val && TREE_CODE (val) != VALUE_HANDLE)
{
tree op_type, val_type;
if (!is_gimple_reg (op)
&& (get_virtual_var (val) != get_virtual_var (op)
|| TREE_CODE (val) != SSA_NAME))
return false;
if (TREE_CODE (stmt) == ASM_EXPR
&& !may_propagate_copy_into_asm (op))
return false;
op_type = TREE_TYPE (op);
val_type = TREE_TYPE (val);
while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
{
op_type = TREE_TYPE (op_type);
val_type = TREE_TYPE (val_type);
}
if (TREE_CODE (val) != SSA_NAME)
{
if (!lang_hooks.types_compatible_p (op_type, val_type))
{
val = fold_convert (TREE_TYPE (op), val);
if (!is_gimple_min_invariant (val))
return false;
}
}
else if (!may_propagate_copy (op, val))
return false;
if (loop_depth_of_name (val) > loop_depth_of_name (op))
return false;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Replaced '");
print_generic_expr (dump_file, op, dump_flags);
fprintf (dump_file, "' with %s '",
(TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
print_generic_expr (dump_file, val, dump_flags);
fprintf (dump_file, "'\n");
}
if (TREE_CODE (val) == ADDR_EXPR
|| (POINTER_TYPE_P (TREE_TYPE (op))
&& is_gimple_min_invariant (val)))
may_have_exposed_new_symbols = true;
propagate_value (op_p, val);
modify_stmt (stmt);
}
return may_have_exposed_new_symbols;
}
static bool
cprop_into_stmt (tree stmt)
{
bool may_have_exposed_new_symbols = false;
use_operand_p op_p;
ssa_op_iter iter;
tree rhs;
FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
{
if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
}
if (may_have_exposed_new_symbols)
{
rhs = get_rhs (stmt);
if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
recompute_tree_invarant_for_addr_expr (rhs);
}
return may_have_exposed_new_symbols;
}
static void
optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
block_stmt_iterator si)
{
stmt_ann_t ann;
tree stmt;
bool may_optimize_p;
bool may_have_exposed_new_symbols = false;
stmt = bsi_stmt (si);
get_stmt_operands (stmt);
ann = stmt_ann (stmt);
opt_stats.num_stmts++;
may_have_exposed_new_symbols = false;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Optimizing statement ");
print_generic_stmt (dump_file, stmt, TDF_SLIM);
}
may_have_exposed_new_symbols = cprop_into_stmt (stmt);
if (ann->modified)
{
if (fold_stmt (bsi_stmt_ptr (si)))
{
stmt = bsi_stmt (si);
ann = stmt_ann (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Folded to: ");
print_generic_stmt (dump_file, stmt, TDF_SLIM);
}
}
may_have_exposed_new_symbols = true;
}
may_optimize_p = (!ann->has_volatile_ops
&& ((TREE_CODE (stmt) == RETURN_EXPR
&& TREE_OPERAND (stmt, 0)
&& TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
&& ! (TREE_SIDE_EFFECTS
(TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
|| (TREE_CODE (stmt) == MODIFY_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
|| TREE_CODE (stmt) == COND_EXPR
|| TREE_CODE (stmt) == SWITCH_EXPR));
if (may_optimize_p)
may_have_exposed_new_symbols
|= eliminate_redundant_computations (walk_data, stmt, ann);
if (TREE_CODE (stmt) == MODIFY_EXPR)
record_equivalences_from_stmt (stmt,
may_optimize_p,
ann);
register_definitions_for_stmt (stmt);
if (ann->modified)
{
tree val = NULL;
if (TREE_CODE (stmt) == COND_EXPR)
val = COND_EXPR_COND (stmt);
else if (TREE_CODE (stmt) == SWITCH_EXPR)
val = SWITCH_COND (stmt);
if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
cfg_altered = true;
if (maybe_clean_eh_stmt (stmt))
{
bitmap_set_bit (need_eh_cleanup, bb->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
}
if (may_have_exposed_new_symbols)
VEC_safe_push (tree_on_heap, stmts_to_rescan, bsi_stmt (si));
}
static tree
update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
{
tree cached_lhs = NULL;
if (insert)
{
struct expr_hash_elt element;
initialize_hash_element (stmt, NULL, &element);
htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
}
TREE_OPERAND (stmt, 1) = new_rhs;
cached_lhs = lookup_avail_expr (stmt, insert);
if (insert)
VEC_pop (tree_on_heap, avail_exprs_stack);
modify_stmt (stmt);
return cached_lhs;
}
static tree
lookup_avail_expr (tree stmt, bool insert)
{
void **slot;
tree lhs;
tree temp;
struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
initialize_hash_element (stmt, lhs, element);
if (TREE_CODE (element->rhs) == SSA_NAME
|| is_gimple_min_invariant (element->rhs))
{
free (element);
return NULL_TREE;
}
if ((TREE_CODE (element->rhs) == EQ_EXPR
|| TREE_CODE (element->rhs) == NE_EXPR)
&& TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
&& integer_zerop (TREE_OPERAND (element->rhs, 1)))
{
int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
if (bitmap_bit_p (nonzero_vars, indx))
{
tree t = element->rhs;
free (element);
if (TREE_CODE (t) == EQ_EXPR)
return boolean_false_node;
else
return boolean_true_node;
}
}
slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
(insert ? INSERT : NO_INSERT));
if (slot == NULL)
{
free (element);
return NULL_TREE;
}
if (*slot == NULL)
{
*slot = (void *) element;
VEC_safe_push (tree_on_heap, avail_exprs_stack,
stmt ? stmt : element->rhs);
return NULL_TREE;
}
lhs = ((struct expr_hash_elt *)*slot)->lhs;
if (TREE_CODE (lhs) == SSA_NAME)
{
temp = SSA_NAME_VALUE (lhs);
if (temp && TREE_CODE (temp) != VALUE_HANDLE)
lhs = temp;
}
free (element);
return lhs;
}
static bool
extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
{
tree op1 = TREE_OPERAND (cond, 1);
tree high, low, type;
int inverted;
type = TREE_TYPE (op1);
if (TREE_CODE (type) != INTEGER_TYPE
|| TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
|| TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
return 0;
switch (TREE_CODE (cond))
{
case EQ_EXPR:
high = low = op1;
inverted = 0;
break;
case NE_EXPR:
high = low = op1;
inverted = 1;
break;
case GE_EXPR:
low = op1;
high = TYPE_MAX_VALUE (type);
inverted = 0;
break;
case GT_EXPR:
high = TYPE_MAX_VALUE (type);
if (!tree_int_cst_lt (op1, high))
return 0;
low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
inverted = 0;
break;
case LE_EXPR:
high = op1;
low = TYPE_MIN_VALUE (type);
inverted = 0;
break;
case LT_EXPR:
low = TYPE_MIN_VALUE (type);
if (!tree_int_cst_lt (low, op1))
return 0;
high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
inverted = 0;
break;
default:
return 0;
}
*hi_p = high;
*lo_p = low;
*inverted_p = inverted;
return 1;
}
static void
record_range (tree cond, basic_block bb)
{
enum tree_code code = TREE_CODE (cond);
if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
|| code == GE_EXPR || code == EQ_EXPR)
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
{
struct vrp_hash_elt *vrp_hash_elt;
struct vrp_element *element;
varray_type *vrp_records_p;
void **slot;
vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt));
vrp_hash_elt->var = TREE_OPERAND (cond, 0);
vrp_hash_elt->records = NULL;
slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
if (*slot == NULL)
*slot = (void *) vrp_hash_elt;
else
free (vrp_hash_elt);
vrp_hash_elt = (struct vrp_hash_elt *) *slot;
vrp_records_p = &vrp_hash_elt->records;
element = ggc_alloc (sizeof (struct vrp_element));
element->low = NULL;
element->high = NULL;
element->cond = cond;
element->bb = bb;
if (*vrp_records_p == NULL)
VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
VEC_safe_push (tree_on_heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
}
}
static hashval_t
vrp_hash (const void *p)
{
tree var = ((struct vrp_hash_elt *)p)->var;
return SSA_NAME_VERSION (var);
}
static int
vrp_eq (const void *p1, const void *p2)
{
tree var1 = ((struct vrp_hash_elt *)p1)->var;
tree var2 = ((struct vrp_hash_elt *)p2)->var;
return var1 == var2;
}
static hashval_t
avail_expr_hash (const void *p)
{
stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann;
tree rhs = ((struct expr_hash_elt *)p)->rhs;
hashval_t val = 0;
size_t i;
vuse_optype vuses;
val = iterative_hash_expr (rhs, val);
if (!ann)
return val;
vuses = VUSE_OPS (ann);
for (i = 0; i < NUM_VUSES (vuses); i++)
val = iterative_hash_expr (VUSE_OP (vuses, i), val);
return val;
}
static hashval_t
real_avail_expr_hash (const void *p)
{
return ((const struct expr_hash_elt *)p)->hash;
}
static int
avail_expr_eq (const void *p1, const void *p2)
{
stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann;
tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann;
tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
if (rhs1 == rhs2 && ann1 == ann2)
return true;
if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
return false;
if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
|| lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
&& operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
{
vuse_optype ops1 = NULL;
vuse_optype ops2 = NULL;
size_t num_ops1 = 0;
size_t num_ops2 = 0;
size_t i;
if (ann1)
{
ops1 = VUSE_OPS (ann1);
num_ops1 = NUM_VUSES (ops1);
}
if (ann2)
{
ops2 = VUSE_OPS (ann2);
num_ops2 = NUM_VUSES (ops2);
}
if (num_ops1 != num_ops2)
return false;
for (i = 0; i < num_ops1; i++)
if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i))
return false;
gcc_assert (((struct expr_hash_elt *)p1)->hash
== ((struct expr_hash_elt *)p2)->hash);
return true;
}
return false;
}
static void
register_definitions_for_stmt (tree stmt)
{
tree def;
ssa_op_iter iter;
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
{
register_new_def (def, &block_defs_stack);
}
}