===================================================================
@@ -11821,10 +11821,9 @@ simplify_comparison (enum rtx_code code,
if (paradoxical_subreg_p (inner_op0)
&& GET_CODE (inner_op1) == SUBREG
+ && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0)))
&& (GET_MODE (SUBREG_REG (inner_op0))
== GET_MODE (SUBREG_REG (inner_op1)))
- && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (inner_op0)))
- <= HOST_BITS_PER_WIDE_INT)
&& (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
GET_MODE (SUBREG_REG (inner_op0)))))
&& (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
@@ -13158,7 +13157,7 @@ record_promoted_value (rtx_insn *insn, r
unsigned int regno = REGNO (SUBREG_REG (subreg));
machine_mode mode = GET_MODE (subreg);
- if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
+ if (!HWI_COMPUTABLE_MODE_P (mode))
return;
for (links = LOG_LINKS (insn); links;)
===================================================================
@@ -8455,7 +8455,7 @@ #define REDUCE_BIT_FIELD(expr) (reduce_b
if (modifier == EXPAND_STACK_PARM)
target = 0;
if (TREE_CODE (treeop0) == INTEGER_CST
- && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& TREE_CONSTANT (treeop1))
{
rtx constant_part;
@@ -8478,7 +8478,7 @@ #define REDUCE_BIT_FIELD(expr) (reduce_b
}
else if (TREE_CODE (treeop1) == INTEGER_CST
- && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
+ && HWI_COMPUTABLE_MODE_P (mode)
&& TREE_CONSTANT (treeop0))
{
rtx constant_part;
===================================================================
@@ -268,7 +268,7 @@ update_reg_equal_equiv_notes (rtx_insn *
/* Update equivalency constants. Recall that RTL constants are
sign-extended. */
if (GET_CODE (orig_src) == CONST_INT
- && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (new_mode))
+ && HWI_COMPUTABLE_MODE_P (new_mode))
{
if (INTVAL (orig_src) >= 0 || code == SIGN_EXTEND)
/* Nothing needed. */;
@@ -336,7 +336,7 @@ combine_set_extension (ext_cand *cand, r
/* Merge constants by directly moving the constant into the register under
some conditions. Recall that RTL constants are sign-extended. */
if (GET_CODE (orig_src) == CONST_INT
- && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (cand->mode))
+ && HWI_COMPUTABLE_MODE_P (cand->mode))
{
if (INTVAL (orig_src) >= 0 || cand->code == SIGN_EXTEND)
new_set = gen_rtx_SET (new_reg, orig_src);
===================================================================
@@ -5782,7 +5782,7 @@ low_bitmask_len (machine_mode mode, unsi
{
if (mode != VOIDmode)
{
- if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
+ if (!HWI_COMPUTABLE_MODE_P (mode))
return -1;
m &= GET_MODE_MASK (mode);
}
===================================================================
@@ -62,7 +62,7 @@ neg_const_int (machine_mode mode, const_
{
unsigned HOST_WIDE_INT val = -UINTVAL (i);
- if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
+ if (!HWI_COMPUTABLE_MODE_P (mode)
&& val == UINTVAL (i))
return simplify_const_unary_operation (NEG, mode, CONST_CAST_RTX (i),
mode);
@@ -3351,7 +3351,8 @@ simplify_binary_operation_1 (enum rtx_co
if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
return op0;
/* Rotating ~0 always results in ~0. */
- if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
+ if (CONST_INT_P (trueop0)
+ && HWI_COMPUTABLE_MODE_P (mode)
&& UINTVAL (trueop0) == GET_MODE_MASK (mode)
&& ! side_effects_p (op1))
return op0;
@@ -3433,7 +3434,7 @@ simplify_binary_operation_1 (enum rtx_co
goto canonicalize_shift;
case SMIN:
- if (width <= HOST_BITS_PER_WIDE_INT
+ if (HWI_COMPUTABLE_MODE_P (mode)
&& mode_signbit_p (mode, trueop1)
&& ! side_effects_p (op0))
return op1;
@@ -3445,7 +3446,7 @@ simplify_binary_operation_1 (enum rtx_co
break;
case SMAX:
- if (width <= HOST_BITS_PER_WIDE_INT
+ if (HWI_COMPUTABLE_MODE_P (mode)
&& CONST_INT_P (trueop1)
&& (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1)
&& ! side_effects_p (op0))