HomeSort by: relevance | last modified time | path
    Searched defs:Store (Results 1 - 25 of 49) sorted by relevancy

1 2

  /src/external/apache2/llvm/dist/llvm/lib/Target/Sparc/
SparcInstrInfo.h 33 Store = (1<<2),
60 /// store to a stack slot, return the virtual or physical register number of
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/Scalar/
LoopLoadElimination.cpp 13 // transformation. The source value of each store then propagated to the user
88 /// Represent a store-to-forwarding candidate.
91 StoreInst *Store;
93 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store)
94 : Load(Load), Store(Store) {}
96 /// Return true if the dependence from the store to the load has a
101 Value *StorePtr = Store->getPointerOperand();
136 OS << *Cand.Store << " -->\n";
145 /// Check if the store dominates all latches, so as long as there is n
    [all...]
GVNHoist.cpp 33 // 2. geps when corresponding load/store cannot be hoisted.
197 // Records all store instructions candidate for code hoisting.
202 // Insert the Store and a hash number of the store address and the stored
204 void insert(StoreInst *Store, GVN::ValueTable &VN) {
205 if (!Store->isSimple())
207 // Hash the store address and the stored value.
208 Value *Ptr = Store->getPointerOperand();
209 Value *Val = Store->getValueOperand();
210 VNtoStores[{VN.lookupOrAdd(Ptr), VN.lookupOrAdd(Val)}].push_back(Store);
    [all...]
  /src/external/apache2/llvm/dist/clang/lib/StaticAnalyzer/Core/
Store.cpp 1 //===- Store.cpp - Interface for maps from Locations to Values ------------===//
9 // This file defined the types Store and StoreManager.
13 #include "clang/StaticAnalyzer/Core/PathSensitive/Store.h"
46 StoreRef StoreManager::enterStackFrame(Store OldStore,
49 StoreRef Store = StoreRef(OldStore, *this);
55 Store = Bind(Store.getStore(), I.first.castAs<Loc>(), I.second);
57 return Store;
510 Store store,
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/X86/
X86CallFrameOptimization.cpp 403 // We only handle a simple case - a sequence of store instructions that
418 // We know the instruction has a supported store opcode.
504 // Now, iterate through the vector in reverse order, and replace the store to
508 MachineBasicBlock::iterator Store = *Context.ArgStoreVector[Idx];
509 const MachineOperand &PushOp = Store->getOperand(X86::AddrNumOperands);
512 switch (Store->getOpcode()) {
534 Push->cloneMemRefs(MF, *Store);
542 if (Is64Bit && Store->getOpcode() == X86::MOV32mr) {
566 Push->cloneMergedMemRefs(MF, {DefMov, &*Store});
573 Push->cloneMemRefs(MF, *Store);
    [all...]
X86LowerAMXType.cpp 9 /// \file Pass to transform <256 x i32> load/store
14 /// load/store <256 x i32> instruction to AMX load/store. If the bitcast can
15 /// not be combined with load/store, we transform the bitcast to amx load/store
16 /// and <256 x i32> store/load.
190 // store <256 x i32> %13, <256 x i32>* %addr, align 64
213 // store <256 x i32> %13, <256 x i32>* %addr, align 64
225 // transform bitcast to <store, load> instructions.
242 // store <256 x i32> %src, <256 x i32>* %addr, align 6
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/ObjCARC/
ObjCARCContract.cpp 201 StoreInst *Store = nullptr;
208 // Walk down to find the store and the release, which may be in either order.
212 // If we found the store we were looking for and saw the release,
214 if (Store && SawRelease)
217 // Now we know that we have not seen either the store or the release. If I
225 // Otherwise, we check if Inst is a "good" store. Grab the instruction class
236 // If we have seen the store, but not the release...
237 if (Store) {
239 // current position to the store. This implies proving that any
240 // instruction in between Store and the Release conservatively can not us
    [all...]
  /src/external/gpl3/gcc/dist/libsanitizer/sanitizer_common/
sanitizer_stack_store.cpp 40 StackStore::Id StackStore::Store(const StackTrace &trace, uptr *pack) {
131 uptr *StackStore::BlockInfo::Create(StackStore *store) {
135 ptr = reinterpret_cast<uptr *>(store->Map(kBlockSizeBytes, "StackStore"));
141 uptr *StackStore::BlockInfo::GetOrCreate(StackStore *store) {
145 return Create(store);
253 uptr *StackStore::BlockInfo::GetOrUnpack(StackStore *store) {
274 reinterpret_cast<uptr *>(store->Map(kBlockSizeBytes, "StackStoreUnpack"));
295 store->Unmap(ptr, packed_size_aligned);
301 uptr StackStore::BlockInfo::Pack(Compression type, StackStore *store) {
319 reinterpret_cast<u8 *>(store->Map(kBlockSizeBytes, "StackStorePack"))
    [all...]
  /src/external/apache2/llvm/dist/clang/utils/analyzer/
exploded-graph-rewriter.py 209 class Store:
267 'store': None,
277 self.store = Store(json_ps['store']) \
278 if json_ps['store'] is not None else None
661 st = s.store
662 prev_st = prev_s.store if prev_s is not None else None
666 self._dump('<hr /><tr><td align="left"><b>Store: </b>')
675 if s.store.is_different(prev_st)
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/CodeGen/SelectionDAG/
LegalizeTypes.cpp 899 // Emit a store to the stack slot.
900 SDValue Store = DAG.getStore(DAG.getEntryNode(), dl, Op, StackPtr,
903 return DAG.getLoad(DestVT, dl, Store, StackPtr, MachinePointerInfo(), Align);
LegalizeTypesGeneric.cpp 154 // Lower the bit-convert to a store/load from the stack.
170 // Emit a store to the stack slot.
171 SDValue Store = DAG.getStore(DAG.getEntryNode(), dl, InOp, StackPtr, PtrInfo);
174 Lo = DAG.getLoad(NOutVT, dl, Store, StackPtr, PtrInfo, NOutAlign);
182 Hi = DAG.getLoad(NOutVT, dl, Store, StackPtr,
367 // Otherwise, store to a temporary and load out again as the new type.
  /src/external/apache2/llvm/dist/llvm/lib/IR/
Metadata.cpp 117 auto &Store = Context.pImpl->MetadataAsValues;
118 return Store.lookup(MD);
124 auto &Store = Context.pImpl->MetadataAsValues;
127 Store.erase(this->MD);
132 auto *&Entry = Store[MD];
389 auto &Store = V->getType()->getContext().pImpl->ValuesAsMetadata;
390 auto I = Store.find(V);
391 if (I == Store.end())
398 Store.erase(I);
412 auto &Store = Context.pImpl->ValuesAsMetadata
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/Hexagon/MCTargetDesc/
HexagonShuffler.h 87 // Flag whether the insn is a load or a store.
88 bool Load, Store;
94 void setStore(bool f = true) { Store = f; }
104 bool mayStore() const { return Store; }
  /src/external/apache2/llvm/dist/llvm/lib/Target/XCore/
XCoreFrameLowering.cpp 440 auto Store = MI;
441 --Store;
442 XFI->getSpillLabels().push_back(std::make_pair(Store, *it));
  /src/external/apache2/llvm/dist/clang/lib/CodeGen/
CGBuiltin.cpp 375 // Store the result as an outparameter.
423 // Build a plain volatile store.
432 llvm::StoreInst *Store =
434 Store->setVolatile(true);
435 return Store;
597 // read as an i128. The "store" will put the higher-order double in the
959 // Emit a combined atomicrmw load/store operation for the interlocked
974 // Don't store anything.
3595 // Store the frame pointer to the setjmp buffer.
3601 // Store the stack pointer to the setjmp buffer
    [all...]
CGAtomic.cpp 322 /// Does a store of the given IR type modify the full expected width?
337 // For scalars and complexes, check whether the store size of the
387 // This basic block is used to hold the store instruction if the operation
582 llvm::StoreInst *Store = CGF.Builder.CreateStore(LoadVal1, Ptr);
583 Store->setAtomic(Order, Scope);
584 Store->setVolatile(E->isVolatile());
1635 // Okay, store the rvalue in.
1688 // Do the atomic store.
1808 // Store new value in the corresponding memory area.
1901 // Store new value in the corresponding memory area
2044 llvm::StoreInst *store = Builder.CreateStore(intValue, addr); local
    [all...]
  /src/external/apache2/llvm/dist/llvm/include/llvm/ADT/
SmallBitVector.h 48 // A few more bits are used to store the size of the bit set in small mode.
55 // The remaining bits are used to store the actual set in small mode.
671 ArrayRef<uintptr_t> getData(uintptr_t &Store) const {
674 Store = getSmallBits();
675 return makeArrayRef(Store);
723 uintptr_t Store;
725 std::make_pair(V.size(), V.getData(Store)));
  /src/external/apache2/llvm/dist/llvm/lib/Target/AMDGPU/Utils/
AMDGPUBaseInfo.h 282 bool Store;
  /src/external/apache2/llvm/dist/llvm/lib/Target/ARC/
ARCISelLowering.cpp 113 setOperationAction(ISD::STORE, MVT::i32, Legal);
296 SDValue Store =
298 MemOpChains.push_back(Store);
303 // Transform all store nodes into one single node because
304 // all store nodes are independent of each other.
544 SDValue Store =
546 MemOps.push_back(Store);
646 // Create a SelectionDAG node corresponding to a store
654 // Transform all store nodes into one single node because
697 /// target, for a load/store of the specified type
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/ARM/
MVEGatherScatterLowering.cpp 287 // This can be a 32bit load/store scaled by 4, a 16bit load/store scaled by 2,
288 // or a 8bit, 16bit or 32bit load/store scaled by 1
537 Value *Store = tryCreateMaskedScatterOffset(I, Ptr, Builder);
538 if (!Store)
539 Store = tryCreateMaskedScatterBase(I, Ptr, Builder);
540 if (!Store)
544 << *Store << "\n");
546 return Store;
629 Value *Store
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/RISCV/
RISCVISelDAGToDAG.cpp 318 MachineSDNode *Store =
322 CurDAG->setNodeMemRefs(Store, {MemOp->getMemOperand()});
324 ReplaceNode(Node, Store);
355 MachineSDNode *Store =
359 CurDAG->setNodeMemRefs(Store, {MemOp->getMemOperand()});
361 ReplaceNode(Node, Store);
969 Operands.push_back(Node->getOperand(CurOp++)); // Store value.
985 MachineSDNode *Store =
989 CurDAG->setNodeMemRefs(Store, {MemOp->getMemOperand()});
991 ReplaceNode(Node, Store);
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/WebAssembly/
WebAssemblyFastISel.cpp 1242 const auto *Store = cast<StoreInst>(I);
1243 if (Store->isAtomic())
1245 if (!WebAssembly::isDefaultAddressSpace(Store->getPointerAddressSpace()))
1248 Store->getValueOperand()->getType()->isVectorTy())
1252 if (!computeAddress(Store->getPointerOperand(), Addr))
1258 switch (getSimpleType(Store->getValueOperand()->getType())) {
1286 unsigned ValueReg = getRegForValue(Store->getValueOperand());
1290 ValueReg = maskI1Value(ValueReg, Store->getValueOperand());
1294 addLoadStoreOperands(Addr, MIB, createMachineMemOperandFor(Store));
1414 case Instruction::Store
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/Instrumentation/
InstrProfiling.cpp 186 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
201 Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
226 Instruction *Store;
712 auto *Store = Builder.CreateStore(Count, Addr);
714 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/Vectorize/
LoadStoreVectorizer.cpp 1 //===- LoadStoreVectorizer.cpp - GPU Load & Store Vectorizer --------------===//
91 #define DEBUG_TYPE "load-store-vectorizer"
169 /// Collects load and store instructions to vectorize.
184 /// Vectorizes the store instructions in Chain.
189 /// Check if this load/store access is misaligned accesses.
205 return "GPU Load and Store Vectorizer";
223 "Vectorize load and Store instructions", false, false)
231 "Vectorize load and store instructions", false, false)
705 // vectorized. Find and store the first such "conflicting" instruction.
716 // We can ignore the alias if the we have a load store pair and the loa
    [all...]
  /src/external/apache2/llvm/dist/clang/include/clang/Analysis/Analyses/
ThreadSafetyTIL.h 1056 /// Store a value to memory.
1057 /// The destination is a pointer to a field, the source is the value to store.
1058 class Store : public SExpr {
1060 Store(SExpr *P, SExpr *V) : SExpr(COP_Store), Dest(P), Source(V) {}
1061 Store(const Store &S, SExpr *P, SExpr *V) : SExpr(S), Dest(P), Source(V) {}
1065 SExpr *destination() { return Dest; } // Address to store to
1068 SExpr *source() { return Source; } // Value to store
1079 typename C::CType compare(const Store* E, C& Cmp) const {
1821 SExpr *condition() { return Condition; } // Address to store t
    [all...]

Completed in 51 milliseconds

1 2