summaryrefslogtreecommitdiffstats
path: root/gcc/recog.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/recog.c')
-rw-r--r--gcc/recog.c54
1 files changed, 27 insertions, 27 deletions
diff --git a/gcc/recog.c b/gcc/recog.c
index ad2dd9a49cd..2d9e472d6cf 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -233,7 +233,7 @@ validate_change (rtx object, rtx *loc, rtx new, int in_group)
changes[num_changes].loc = loc;
changes[num_changes].old = old;
- if (object && GET_CODE (object) != MEM)
+ if (object && !MEM_P (object))
{
/* Set INSN_CODE to force rerecognition of insn. Save old code in
case invalid. */
@@ -338,7 +338,7 @@ apply_change_group (void)
if (object == 0 || object == last_validated)
continue;
- if (GET_CODE (object) == MEM)
+ if (MEM_P (object))
{
if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
break;
@@ -433,7 +433,7 @@ cancel_changes (int num)
for (i = num_changes - 1; i >= num; i--)
{
*changes[i].loc = changes[i].old;
- if (changes[i].object && GET_CODE (changes[i].object) != MEM)
+ if (changes[i].object && !MEM_P (changes[i].object))
INSN_CODE (changes[i].object) = changes[i].old_code;
}
num_changes = num;
@@ -586,7 +586,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
likely to be an insertion operation; if it was, nothing bad will
happen, we might just fail in some cases). */
- if (GET_CODE (XEXP (x, 0)) == MEM
+ if (MEM_P (XEXP (x, 0))
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& GET_CODE (XEXP (x, 2)) == CONST_INT
&& !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
@@ -942,7 +942,7 @@ general_operand (rtx op, enum machine_mode mode)
#ifdef INSN_SCHEDULING
/* On machines that have insn scheduling, we want all memory
reference to be explicit, so outlaw paradoxical SUBREGs. */
- if (GET_CODE (sub) == MEM
+ if (MEM_P (sub)
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
return 0;
#endif
@@ -953,7 +953,7 @@ general_operand (rtx op, enum machine_mode mode)
??? This is a kludge. */
if (!reload_completed && SUBREG_BYTE (op) != 0
- && GET_CODE (sub) == MEM)
+ && MEM_P (sub))
return 0;
/* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
@@ -1039,7 +1039,7 @@ register_operand (rtx op, enum machine_mode mode)
(Ideally, (SUBREG (MEM)...) should not exist after reload,
but currently it does result from (SUBREG (REG)...) where the
reg went on the stack.) */
- if (! reload_completed && GET_CODE (sub) == MEM)
+ if (! reload_completed && MEM_P (sub))
return general_operand (op, mode);
#ifdef CANNOT_CHANGE_MODE_CLASS
@@ -1202,7 +1202,7 @@ nonmemory_operand (rtx op, enum machine_mode mode)
(Ideally, (SUBREG (MEM)...) should not exist after reload,
but currently it does result from (SUBREG (REG)...) where the
reg went on the stack.) */
- if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
+ if (! reload_completed && MEM_P (SUBREG_REG (op)))
return general_operand (op, mode);
op = SUBREG_REG (op);
}
@@ -1229,7 +1229,7 @@ push_operand (rtx op, enum machine_mode mode)
rounded_size = PUSH_ROUNDING (rounded_size);
#endif
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return 0;
if (mode != VOIDmode && GET_MODE (op) != mode)
@@ -1269,7 +1269,7 @@ push_operand (rtx op, enum machine_mode mode)
int
pop_operand (rtx op, enum machine_mode mode)
{
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return 0;
if (mode != VOIDmode && GET_MODE (op) != mode)
@@ -1312,7 +1312,7 @@ memory_operand (rtx op, enum machine_mode mode)
if (! reload_completed)
/* Note that no SUBREG is a memory operand before end of reload pass,
because (SUBREG (MEM...)) forces reloading into a register. */
- return GET_CODE (op) == MEM && general_operand (op, mode);
+ return MEM_P (op) && general_operand (op, mode);
if (mode != VOIDmode && GET_MODE (op) != mode)
return 0;
@@ -1321,7 +1321,7 @@ memory_operand (rtx op, enum machine_mode mode)
if (GET_CODE (inner) == SUBREG)
inner = SUBREG_REG (inner);
- return (GET_CODE (inner) == MEM && general_operand (op, mode));
+ return (MEM_P (inner) && general_operand (op, mode));
}
/* Return 1 if OP is a valid indirect memory reference with mode MODE;
@@ -1332,7 +1332,7 @@ indirect_operand (rtx op, enum machine_mode mode)
{
/* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
if (! reload_completed
- && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
+ && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
{
int offset = SUBREG_BYTE (op);
rtx inner = SUBREG_REG (op);
@@ -1352,7 +1352,7 @@ indirect_operand (rtx op, enum machine_mode mode)
&& general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
}
- return (GET_CODE (op) == MEM
+ return (MEM_P (op)
&& memory_operand (op, mode)
&& general_operand (XEXP (op, 0), Pmode));
}
@@ -1653,7 +1653,7 @@ asm_operand_ok (rtx op, const char *constraint)
Match any memory and hope things are resolved after reload. */
- if (GET_CODE (op) == MEM
+ if (MEM_P (op)
&& (1
|| GET_CODE (XEXP (op, 0)) == PRE_DEC
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
@@ -1661,7 +1661,7 @@ asm_operand_ok (rtx op, const char *constraint)
break;
case '>':
- if (GET_CODE (op) == MEM
+ if (MEM_P (op)
&& (1
|| GET_CODE (XEXP (op, 0)) == PRE_INC
|| GET_CODE (XEXP (op, 0)) == POST_INC))
@@ -1856,7 +1856,7 @@ find_constant_term_loc (rtx *p)
int
offsettable_memref_p (rtx op)
{
- return ((GET_CODE (op) == MEM)
+ return ((MEM_P (op))
&& offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
}
@@ -1866,7 +1866,7 @@ offsettable_memref_p (rtx op)
int
offsettable_nonstrict_memref_p (rtx op)
{
- return ((GET_CODE (op) == MEM)
+ return ((MEM_P (op))
&& offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
}
@@ -2432,7 +2432,7 @@ constrain_operands (int strict)
case 'm':
/* Memory operands must be valid, to the extent
required by STRICT. */
- if (GET_CODE (op) == MEM)
+ if (MEM_P (op))
{
if (strict > 0
&& !strict_memory_address_p (GET_MODE (op),
@@ -2453,14 +2453,14 @@ constrain_operands (int strict)
break;
case '<':
- if (GET_CODE (op) == MEM
+ if (MEM_P (op)
&& (GET_CODE (XEXP (op, 0)) == PRE_DEC
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
win = 1;
break;
case '>':
- if (GET_CODE (op) == MEM
+ if (MEM_P (op)
&& (GET_CODE (XEXP (op, 0)) == PRE_INC
|| GET_CODE (XEXP (op, 0)) == POST_INC))
win = 1;
@@ -2512,10 +2512,10 @@ constrain_operands (int strict)
break;
case 'V':
- if (GET_CODE (op) == MEM
+ if (MEM_P (op)
&& ((strict > 0 && ! offsettable_memref_p (op))
|| (strict < 0
- && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
+ && !(CONSTANT_P (op) || MEM_P (op)))
|| (reload_in_progress
&& !(REG_P (op)
&& REGNO (op) >= FIRST_PSEUDO_REGISTER))))
@@ -2527,7 +2527,7 @@ constrain_operands (int strict)
|| (strict == 0 && offsettable_nonstrict_memref_p (op))
/* Before reload, accept what reload can handle. */
|| (strict < 0
- && (CONSTANT_P (op) || GET_CODE (op) == MEM))
+ && (CONSTANT_P (op) || MEM_P (op)))
/* During reload, accept a pseudo */
|| (reload_in_progress && REG_P (op)
&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
@@ -2557,7 +2557,7 @@ constrain_operands (int strict)
else if (EXTRA_MEMORY_CONSTRAINT (c, p)
/* Every memory operand can be reloaded to fit. */
- && ((strict < 0 && GET_CODE (op) == MEM)
+ && ((strict < 0 && MEM_P (op))
/* Before reload, accept what reload can turn
into mem. */
|| (strict < 0 && CONSTANT_P (op))
@@ -2598,7 +2598,7 @@ constrain_operands (int strict)
if (earlyclobber[eopno]
&& REG_P (recog_data.operand[eopno]))
for (opno = 0; opno < recog_data.n_operands; opno++)
- if ((GET_CODE (recog_data.operand[opno]) == MEM
+ if ((MEM_P (recog_data.operand[opno])
|| recog_data.operand_type[opno] != OP_OUT)
&& opno != eopno
/* Ignore things like match_operator operands. */
@@ -3309,7 +3309,7 @@ store_data_bypass_p (rtx out_insn, rtx in_insn)
if (! in_set)
abort ();
- if (GET_CODE (SET_DEST (in_set)) != MEM)
+ if (!MEM_P (SET_DEST (in_set)))
return false;
out_set = single_set (out_insn);
OpenPOWER on IntegriCloud