summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Target
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Target')
-rw-r--r--llvm/lib/Target/ARM/ARMISelDAGToDAG.cpp2
-rw-r--r--llvm/lib/Target/ARM/ARMISelLowering.cpp8
-rw-r--r--llvm/lib/Target/CellSPU/SPUISelLowering.cpp4
-rw-r--r--llvm/lib/Target/IA64/IA64ISelDAGToDAG.cpp4
-rw-r--r--llvm/lib/Target/PowerPC/PPCISelDAGToDAG.cpp2
-rw-r--r--llvm/lib/Target/PowerPC/PPCISelLowering.cpp6
-rw-r--r--llvm/lib/Target/TargetSelectionDAG.td58
7 files changed, 42 insertions, 42 deletions
diff --git a/llvm/lib/Target/ARM/ARMISelDAGToDAG.cpp b/llvm/lib/Target/ARM/ARMISelDAGToDAG.cpp
index 9b6825d1b2d..ff299006a61 100644
--- a/llvm/lib/Target/ARM/ARMISelDAGToDAG.cpp
+++ b/llvm/lib/Target/ARM/ARMISelDAGToDAG.cpp
@@ -659,7 +659,7 @@ SDNode *ARMDAGToDAGISel::Select(SDOperand Op) {
case ISD::LOAD: {
LoadSDNode *LD = cast<LoadSDNode>(Op);
ISD::MemIndexedMode AM = LD->getAddressingMode();
- MVT::ValueType LoadedVT = LD->getLoadedVT();
+ MVT::ValueType LoadedVT = LD->getMemoryVT();
if (AM != ISD::UNINDEXED) {
SDOperand Offset, AMOpc;
bool isPre = (AM == ISD::PRE_INC) || (AM == ISD::PRE_DEC);
diff --git a/llvm/lib/Target/ARM/ARMISelLowering.cpp b/llvm/lib/Target/ARM/ARMISelLowering.cpp
index d62e4f473b4..a0278a4b62f 100644
--- a/llvm/lib/Target/ARM/ARMISelLowering.cpp
+++ b/llvm/lib/Target/ARM/ARMISelLowering.cpp
@@ -1729,11 +1729,11 @@ ARMTargetLowering::getPreIndexedAddressParts(SDNode *N, SDOperand &Base,
bool isSEXTLoad = false;
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N)) {
Ptr = LD->getBasePtr();
- VT = LD->getLoadedVT();
+ VT = LD->getMemoryVT();
isSEXTLoad = LD->getExtensionType() == ISD::SEXTLOAD;
} else if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
Ptr = ST->getBasePtr();
- VT = ST->getStoredVT();
+ VT = ST->getMemoryVT();
} else
return false;
@@ -1762,10 +1762,10 @@ bool ARMTargetLowering::getPostIndexedAddressParts(SDNode *N, SDNode *Op,
SDOperand Ptr;
bool isSEXTLoad = false;
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N)) {
- VT = LD->getLoadedVT();
+ VT = LD->getMemoryVT();
isSEXTLoad = LD->getExtensionType() == ISD::SEXTLOAD;
} else if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
- VT = ST->getStoredVT();
+ VT = ST->getMemoryVT();
} else
return false;
diff --git a/llvm/lib/Target/CellSPU/SPUISelLowering.cpp b/llvm/lib/Target/CellSPU/SPUISelLowering.cpp
index 33261a607b1..c7d7f978216 100644
--- a/llvm/lib/Target/CellSPU/SPUISelLowering.cpp
+++ b/llvm/lib/Target/CellSPU/SPUISelLowering.cpp
@@ -542,7 +542,7 @@ static SDOperand
LowerLOAD(SDOperand Op, SelectionDAG &DAG, const SPUSubtarget *ST) {
LoadSDNode *LN = cast<LoadSDNode>(Op);
SDOperand the_chain = LN->getChain();
- MVT::ValueType VT = LN->getLoadedVT();
+ MVT::ValueType VT = LN->getMemoryVT();
MVT::ValueType OpVT = Op.Val->getValueType(0);
ISD::LoadExtType ExtType = LN->getExtensionType();
unsigned alignment = LN->getAlignment();
@@ -652,7 +652,7 @@ LowerSTORE(SDOperand Op, SelectionDAG &DAG, const SPUSubtarget *ST) {
StoreSDNode *SN = cast<StoreSDNode>(Op);
SDOperand Value = SN->getValue();
MVT::ValueType VT = Value.getValueType();
- MVT::ValueType StVT = (!SN->isTruncatingStore() ? VT : SN->getStoredVT());
+ MVT::ValueType StVT = (!SN->isTruncatingStore() ? VT : SN->getMemoryVT());
MVT::ValueType PtrVT = DAG.getTargetLoweringInfo().getPointerTy();
unsigned alignment = SN->getAlignment();
diff --git a/llvm/lib/Target/IA64/IA64ISelDAGToDAG.cpp b/llvm/lib/Target/IA64/IA64ISelDAGToDAG.cpp
index af31aaf555e..338733a8c44 100644
--- a/llvm/lib/Target/IA64/IA64ISelDAGToDAG.cpp
+++ b/llvm/lib/Target/IA64/IA64ISelDAGToDAG.cpp
@@ -466,7 +466,7 @@ SDNode *IA64DAGToDAGISel::Select(SDOperand Op) {
AddToISelQueue(Chain);
AddToISelQueue(Address);
- MVT::ValueType TypeBeingLoaded = LD->getLoadedVT();
+ MVT::ValueType TypeBeingLoaded = LD->getMemoryVT();
unsigned Opc;
switch (TypeBeingLoaded) {
default:
@@ -528,7 +528,7 @@ SDNode *IA64DAGToDAGISel::Select(SDOperand Op) {
case MVT::f64: Opc = IA64::STF8; break;
}
} else { // Truncating store
- switch(ST->getStoredVT()) {
+ switch(ST->getMemoryVT()) {
default: assert(0 && "unknown type in truncstore");
case MVT::i8: Opc = IA64::ST1; break;
case MVT::i16: Opc = IA64::ST2; break;
diff --git a/llvm/lib/Target/PowerPC/PPCISelDAGToDAG.cpp b/llvm/lib/Target/PowerPC/PPCISelDAGToDAG.cpp
index df1d9b5d25b..09fef251964 100644
--- a/llvm/lib/Target/PowerPC/PPCISelDAGToDAG.cpp
+++ b/llvm/lib/Target/PowerPC/PPCISelDAGToDAG.cpp
@@ -921,7 +921,7 @@ SDNode *PPCDAGToDAGISel::Select(SDOperand Op) {
case ISD::LOAD: {
// Handle preincrement loads.
LoadSDNode *LD = cast<LoadSDNode>(Op);
- MVT::ValueType LoadedVT = LD->getLoadedVT();
+ MVT::ValueType LoadedVT = LD->getMemoryVT();
// Normal loads are handled by code generated from the .td file.
if (LD->getAddressingMode() != ISD::PRE_INC)
diff --git a/llvm/lib/Target/PowerPC/PPCISelLowering.cpp b/llvm/lib/Target/PowerPC/PPCISelLowering.cpp
index 1c8c0a6c95f..905236ab6e1 100644
--- a/llvm/lib/Target/PowerPC/PPCISelLowering.cpp
+++ b/llvm/lib/Target/PowerPC/PPCISelLowering.cpp
@@ -963,12 +963,12 @@ bool PPCTargetLowering::getPreIndexedAddressParts(SDNode *N, SDOperand &Base,
MVT::ValueType VT;
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N)) {
Ptr = LD->getBasePtr();
- VT = LD->getLoadedVT();
+ VT = LD->getMemoryVT();
} else if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ST = ST;
Ptr = ST->getBasePtr();
- VT = ST->getStoredVT();
+ VT = ST->getMemoryVT();
} else
return false;
@@ -992,7 +992,7 @@ bool PPCTargetLowering::getPreIndexedAddressParts(SDNode *N, SDOperand &Base,
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N)) {
// PPC64 doesn't have lwau, but it does have lwaux. Reject preinc load of
// sext i32 to i64 when addr mode is r+i.
- if (LD->getValueType(0) == MVT::i64 && LD->getLoadedVT() == MVT::i32 &&
+ if (LD->getValueType(0) == MVT::i64 && LD->getMemoryVT() == MVT::i32 &&
LD->getExtensionType() == ISD::SEXTLOAD &&
isa<ConstantSDNode>(Offset))
return false;
diff --git a/llvm/lib/Target/TargetSelectionDAG.td b/llvm/lib/Target/TargetSelectionDAG.td
index 2560d86f52d..a31ef2d3681 100644
--- a/llvm/lib/Target/TargetSelectionDAG.td
+++ b/llvm/lib/Target/TargetSelectionDAG.td
@@ -444,42 +444,42 @@ def extloadi1 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def extloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def extloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def extloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
def extloadf32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::f32;
+ LD->getMemoryVT() == MVT::f32;
return false;
}]>;
def extloadf64 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::f64;
+ LD->getMemoryVT() == MVT::f64;
return false;
}]>;
@@ -487,28 +487,28 @@ def sextloadi1 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def sextloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def sextloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def sextloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
@@ -516,28 +516,28 @@ def zextloadi1 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def zextloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def zextloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def zextloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
@@ -554,35 +554,35 @@ def store : PatFrag<(ops node:$val, node:$ptr),
def truncstorei8 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstorei16 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstorei32 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstoref32 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstoref64 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::f64 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f64 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
@@ -603,7 +603,7 @@ def pre_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i1;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i1;
}
return false;
}]>;
@@ -612,7 +612,7 @@ def pre_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8;
}
return false;
}]>;
@@ -621,7 +621,7 @@ def pre_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16;
}
return false;
}]>;
@@ -630,7 +630,7 @@ def pre_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32;
}
return false;
}]>;
@@ -639,7 +639,7 @@ def pre_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32;
}
return false;
}]>;
@@ -659,7 +659,7 @@ def post_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i1;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i1;
}
return false;
}]>;
@@ -668,7 +668,7 @@ def post_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8;
}
return false;
}]>;
@@ -677,7 +677,7 @@ def post_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16;
}
return false;
}]>;
@@ -686,7 +686,7 @@ def post_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32;
}
return false;
}]>;
@@ -695,7 +695,7 @@ def post_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset),
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32;
}
return false;
}]>;
OpenPOWER on IntegriCloud