@@ -0,0 +1,36 @@
+/* PR tree-optimization/72835. */
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+struct struct_1 {
+ unsigned int m1 : 6 ;
+ unsigned int m2 : 24 ;
+ unsigned int m3 : 6 ;
+};
+
+unsigned short var_32 = 0x2d10;
+
+struct struct_1 s1;
+
+void init ()
+{
+ s1.m1 = 4;
+ s1.m2 = 0x7ca4b8;
+ s1.m3 = 24;
+}
+
+void foo ()
+{
+ unsigned int c
+ = ((unsigned int) s1.m2) * (-((unsigned int) s1.m3))
+ + (var_32) * (-((unsigned int) (s1.m1)));
+ if (c != 4098873984)
+ __builtin_abort ();
+}
+
+int main ()
+{
+ init ();
+ foo ();
+ return 0;
+}
@@ -1039,7 +1039,7 @@ eliminate_using_constants (enum tree_code opcode,
static void linearize_expr_tree (vec<operand_entry *> *, gimple *,
- bool, bool);
+ bool, bool, bool *);
/* Structure for tracking and counting operands. */
struct oecount {
@@ -1183,7 +1183,7 @@ propagate_op_to_single_use (tree op, gimple *stmt, tree *def)
is updated if there is only one operand but no operation left. */
static void
-zero_one_operation (tree *def, enum tree_code opcode, tree op)
+zero_one_operation (tree *def, enum tree_code opcode, tree op, bool ops_changed)
{
gimple *stmt = SSA_NAME_DEF_STMT (*def);
@@ -1193,6 +1193,27 @@ zero_one_operation (tree *def, enum tree_code opcode, tree op)
if (opcode == MULT_EXPR)
{
+ /* In this case, the result in the *def will be different as
+ compared to how it was. Therefore, to avoid having SSA
+ which will have range_info and debug that reflects old
+ operation, create a new SSA and use it (PR72835). */
+ if (ops_changed)
+ {
+ imm_use_iterator iter;
+ use_operand_p use_p;
+ gimple *use_stmt;
+ tree lhs = gimple_assign_lhs (stmt);
+ tree new_lhs = make_ssa_name (TREE_TYPE (lhs));
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, new_lhs);
+ update_stmt (use_stmt);
+ }
+ if (*def == lhs)
+ *def = new_lhs;
+ gimple_set_lhs (stmt, new_lhs);
+ }
if (stmt_is_power_of_op (stmt, op))
{
if (decrement_power (stmt) == 1)
@@ -1241,6 +1262,26 @@ zero_one_operation (tree *def, enum tree_code opcode, tree op)
&& has_single_use (gimple_assign_rhs2 (stmt)))
{
gimple *stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+ /* In this case the result in the op will be
+ different as compared to how it was. Therefore, to avoid
+ having SSA which will have range_info and debug that
+ reflects old operation, create a new SSA and use
+ it (PR72835). */
+ if (ops_changed)
+ {
+ imm_use_iterator iter;
+ use_operand_p use_p;
+ gimple *use_stmt;
+ tree lhs = gimple_assign_lhs (stmt2);
+ tree new_lhs = make_ssa_name (TREE_TYPE (lhs));
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, new_lhs);
+ update_stmt (use_stmt);
+ }
+ gimple_set_lhs (stmt2, new_lhs);
+ }
if (stmt_is_power_of_op (stmt2, op))
{
if (decrement_power (stmt2) == 1)
@@ -1453,7 +1494,8 @@ build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
static bool
undistribute_ops_list (enum tree_code opcode,
- vec<operand_entry *> *ops, struct loop *loop)
+ vec<operand_entry *> *ops, struct loop *loop,
+ bool *ops_changed)
{
unsigned int length = ops->length ();
operand_entry *oe1;
@@ -1521,7 +1563,7 @@ undistribute_ops_list (enum tree_code opcode,
oedef = SSA_NAME_DEF_STMT ((*ops)[i]->op);
oecode = gimple_assign_rhs_code (oedef);
linearize_expr_tree (&subops[i], oedef,
- associative_tree_code (oecode), false);
+ associative_tree_code (oecode), false, ops_changed);
FOR_EACH_VEC_ELT (subops[i], j, oe1)
{
@@ -1617,7 +1659,7 @@ undistribute_ops_list (enum tree_code opcode,
fprintf (dump_file, "Building (");
print_generic_expr (dump_file, oe1->op, 0);
}
- zero_one_operation (&oe1->op, c->oecode, c->op);
+ zero_one_operation (&oe1->op, c->oecode, c->op, *ops_changed);
EXECUTE_IF_SET_IN_BITMAP (candidates2, first+1, i, sbi0)
{
gimple *sum;
@@ -1627,7 +1669,7 @@ undistribute_ops_list (enum tree_code opcode,
fprintf (dump_file, " + ");
print_generic_expr (dump_file, oe2->op, 0);
}
- zero_one_operation (&oe2->op, c->oecode, c->op);
+ zero_one_operation (&oe2->op, c->oecode, c->op, *ops_changed);
sum = build_and_add_sum (TREE_TYPE (oe1->op),
oe1->op, oe2->op, opcode);
oe2->op = build_zero_cst (TREE_TYPE (oe2->op));
@@ -4456,12 +4498,16 @@ acceptable_pow_call (gcall *stmt, tree *base, HOST_WIDE_INT *exponent)
}
/* Try to derive and add operand entry for OP to *OPS. Return false if
- unsuccessful. */
+ unsuccessful. If we changed the operands such that the (intermediate)
+ results can be different (as in the case of NEGATE_EXPR converted to
+ multiplication by -1), set ops_changed to true so that we will not
+ reuse the SSA (PR72835). */
static bool
try_special_add_to_ops (vec<operand_entry *> *ops,
enum tree_code code,
- tree op, gimple* def_stmt)
+ tree op, gimple* def_stmt,
+ bool *ops_changed)
{
tree base = NULL_TREE;
HOST_WIDE_INT exponent = 0;
@@ -4492,6 +4538,8 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
add_to_ops_vec (ops, rhs1);
add_to_ops_vec (ops, cst);
gimple_set_visited (def_stmt, true);
+ if (ops_changed)
+ *ops_changed = true;
return true;
}
@@ -4499,11 +4547,12 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
}
/* Recursively linearize a binary expression that is the RHS of STMT.
- Place the operands of the expression tree in the vector named OPS. */
+ Place the operands of the expression tree in the vector named OPS.
+ Return TRUE if try_special_add_to_ops has set ops_changed to TRUE. */
static void
linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
- bool is_associative, bool set_visited)
+ bool is_associative, bool set_visited, bool *ops_changed)
{
tree binlhs = gimple_assign_rhs1 (stmt);
tree binrhs = gimple_assign_rhs2 (stmt);
@@ -4547,10 +4596,12 @@ linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
if (!binrhsisreassoc)
{
- if (!try_special_add_to_ops (ops, rhscode, binrhs, binrhsdef))
+ if (!try_special_add_to_ops (ops, rhscode, binrhs,
+ binrhsdef, ops_changed))
add_to_ops_vec (ops, binrhs);
- if (!try_special_add_to_ops (ops, rhscode, binlhs, binlhsdef))
+ if (!try_special_add_to_ops (ops, rhscode, binlhs,
+ binlhsdef, ops_changed))
add_to_ops_vec (ops, binlhs);
return;
@@ -4588,9 +4639,9 @@ linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
|| !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
rhscode, loop));
linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
- is_associative, set_visited);
+ is_associative, set_visited, ops_changed);
- if (!try_special_add_to_ops (ops, rhscode, binrhs, binrhsdef))
+ if (!try_special_add_to_ops (ops, rhscode, binrhs, binrhsdef, ops_changed))
add_to_ops_vec (ops, binrhs);
}
@@ -5322,12 +5373,20 @@ reassociate_bb (basic_block bb)
if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
continue;
+ bool ops_changed = false;
gimple_set_visited (stmt, true);
- linearize_expr_tree (&ops, stmt, true, true);
+ linearize_expr_tree (&ops, stmt, true, true, NULL);
ops.qsort (sort_by_operand_rank);
optimize_ops_list (rhs_code, &ops);
+ /* While in undistribute_ops_list, NEGATE_EXPR is factored out,
+ operands to the reassociated stmts will be different
+ compared to how it was. In this case, to avoid having SSA
+ which will have range_info and debug that reflects old
+ operation, rewrite_expr_tree has to be called with
+ changed = true (PR72835). */
if (undistribute_ops_list (rhs_code, &ops,
- loop_containing_stmt (stmt)))
+ loop_containing_stmt (stmt),
+ &ops_changed))
{
ops.qsort (sort_by_operand_rank);
optimize_ops_list (rhs_code, &ops);
@@ -5415,7 +5474,8 @@ reassociate_bb (basic_block bb)
new_lhs = rewrite_expr_tree (stmt, 0, ops,
powi_result != NULL
- || negate_result);
+ || negate_result
+ || ops_changed);
}
/* If we combined some repeated factors into a