void main() { Dictionary x; x.Add("omar","*****@*****.**"); x.Add("hassan","*****@*****.**"); cout<<"Before Deleting Hassan"<<endl; x.printentries(); x.Remove("hassan"); cout<<"After Deleting Hassan"<<endl; x.printentries(); }
void CollectSubType(Ptr<ParsingTreeObject> type, Dictionary<ParsingTreeNode*, TypeSymbol*>& nodeTypeMap) { Ptr<ParsingTreeToken> name=type->GetMember(L"name").Cast<ParsingTreeToken>(); if(name && !subTypes.Keys().Contains(name->GetValue())) { Ptr<TypeSymbol> symbol=new TypeSymbol; symbol->typeName=name->GetValue(); symbol->parent=this; subTypes.Add(symbol->typeName, symbol); symbol->CollectSubTypes(type->GetMember(L"subTypes").Cast<ParsingTreeArray>(), nodeTypeMap); nodeTypeMap.Add(type.Obj(), symbol.Obj()); } }
ShaderCompilerImpl() { if (compilerInstances == 0) { BasicExpressionType::Init(); } compilerInstances++; backends.Add("glsl", CreateGLSLCodeGen()); backends.Add("hlsl", CreateHLSLCodeGen()); backends.Add("spirv", CreateSpirVCodeGen()); backends.Add("glsl_vk", CreateGLSL_VulkanCodeGen()); backends.Add("glsl_vk_onedesc", CreateGLSL_VulkanOneDescCodeGen()); }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - void RecursionElimination::ReplaceParameters() { // As it is now, the function still uses the parameters // in SSA form directly, but now it should use // the 'phi' instructions from the old entry block. // We use a dictionary to map from parameter to 'phi' result. Dictionary<Parameter*, Operand*> paramToPhi; DebugValidator::AreEqual(funct_->ParameterCount(), parameterPhis_.Count()); for(int i = 0; i < funct_->ParameterCount(); i++) { paramToPhi.Add(funct_->GetParameter(i), parameterPhis_[i]->ResultOp()); } // Scan all instructions and do the replacements. auto oldEntryBlock = oldEntryBlock_; funct_->ForEachInstruction([¶mToPhi, oldEntryBlock] (Instruction* instr) -> bool { if(instr->IsPhi() && (instr->ParentBlock() == oldEntryBlock)) { // The 'phi's int the old entry block // shouldn't have their operands changed. return true; } for(int i = 0; i < instr->SourceOpCount(); i++) { if(auto parameter = instr->GetSourceOp(i)->As<Parameter>()) { DebugValidator::IsTrue(paramToPhi.ContainsKey(parameter)); instr->ReplaceSourceOp(i, paramToPhi[parameter]); } } return true; }); }
Tweakable& AddTweakableValue( const char* _pFilename, size_t _Counter ) { // First, see if this file is in the files list U32 Key = DictionaryString<int>::Hash( _pFilename ); TweakableSourceFile* pFileEntry = g_TweakableFiles.Get( Key ); if ( pFileEntry == NULL ) { // if it's not found, add to the list of tweakable files, assume it's unmodified since the program has been built TweakableSourceFile& Value = g_TweakableFiles.Add( Key ); // strcpy( Value.pFilename, _pFilename ); Value.pFilename = _pFilename; Value.LastModificationTime = GetFileModTime( _pFilename ); } // Add to the tweakables Key = HashKey( _pFilename, _Counter ); return g_TweakableValues.Add( Key ); }
int main (int argc, char *argv[]) { std::cout << std::endl; std::cout << "Hasher" << std::endl; bool timing = false; DictFiles files; CommandLine cmd; cmd.Usage ("Find hash collisions in the dictionary."); cmd.AddValue ("dict", "Dictionary file to hash", MakeCallback(&DictFiles::Add, &files)); cmd.AddValue ("time", "Run timing test", timing); cmd.Parse (argc, argv); Dictionary dict; dict.Add ( Collider ("FNV1a", Hasher ( Create<Hash::Function::Fnv1a> () ), Collider::Bits32)); dict.Add ( Collider ("FNV1a", Hasher ( Create<Hash::Function::Fnv1a> () ), Collider::Bits64)); dict.Add ( Collider ("Murmur3", Hasher ( Create<Hash::Function::Murmur3> () ), Collider::Bits32)); dict.Add ( Collider ("Murmur3", Hasher ( Create<Hash::Function::Murmur3> () ), Collider::Bits64)); files.ReadInto (dict); dict.Report (); if (timing) { dict.Time (); } // if (timing) } // main
Deserializer HTKFeatureDeserializer(const std::vector<HTKFeatureConfiguration>& streams) { Deserializer htk; Dictionary input; for (const auto& s : streams) { const auto& key = s.m_streamName; Dictionary stream; std::vector<DictionaryValue> ctxWindow = { DictionaryValue(s.m_left), DictionaryValue(s.m_right) }; stream.Add(L"scpFile", s.m_scp, L"dim", s.m_dim, L"contextWindow", ctxWindow, L"expandToUtterance", s.m_broadcast); stream[L"definesMBSize"] = s.m_definesMbSize; input[key] = stream; } htk.Add(L"type", L"HTKFeatureDeserializer", L"input", input); return htk; }
Deserializer BuildImageDeserializer(const std::wstring deserializer, const std::wstring& fileName, const std::wstring& labelStreamName, size_t numLabels, const std::wstring& imageStreamName, const std::vector<ImageTransform>& transforms) { Deserializer img; std::vector<DictionaryValue> actualTransforms; std::transform(transforms.begin(), transforms.end(), std::back_inserter(actualTransforms), [](ImageTransform t) { return static_cast<DictionaryValue>(t); }); Dictionary labeldim; labeldim[L"labelDim"] = numLabels; Dictionary xforms; xforms[L"transforms"] = actualTransforms; Dictionary input; input.Add(imageStreamName.c_str(), xforms, labelStreamName.c_str(), labeldim); img.Add(L"type", deserializer, L"file", fileName, L"input", input); return img; }
/** * Add phrases from the files into the dict * * \param [in,out] dict the Dictionary to add words to */ void ReadInto (Dictionary & dict) { if (m_files.size () == 0) { Add ("/usr/share/dict/web2"); } std::cout << "Hashing the dictionar" << (m_files.size () == 1 ? "y" : "ies") << std::endl; for (std::vector <std::string>::const_iterator it = m_files.begin (); it != m_files.end (); ++it) { std::string dictFile = *it; std::cout << "Dictionary file: " << dictFile << std::endl; // Find collisions // Open the file std::ifstream dictStream; dictStream.open (dictFile.c_str () ); if (! dictStream.is_open () ) { std::cerr << "Failed to open dictionary file." << "'" << dictFile << "'" << std::endl; continue; } while (dictStream.good () ) { std::string phrase; getline (dictStream, phrase); dict.Add (phrase); } // while dictStream.close (); } // for m_files } // ReadInto
ParserDecl(Ptr<ParsingTreeObject> parserDecl) { nodeTypeMap.Add(parserDecl.Obj(), this); CollectTypes(parserDecl->GetMember(L"types").Cast<ParsingTreeArray>(), nodeTypeMap); { Ptr<ParsingTreeArray> items=parserDecl->GetMember(L"tokens").Cast<ParsingTreeArray>(); if(items) { for(int i=0;i<items->Count();i++) { Ptr<ParsingTreeObject> type=items->GetItem(i).Cast<ParsingTreeObject>(); if(type) { Ptr<ParsingTreeToken> name=type->GetMember(L"name").Cast<ParsingTreeToken>(); if(name) { tokens.Add(name->GetValue()); } } } } } { Ptr<ParsingTreeArray> items=parserDecl->GetMember(L"rules").Cast<ParsingTreeArray>(); if(items) { for(int i=0;i<items->Count();i++) { Ptr<ParsingTreeObject> type=items->GetItem(i).Cast<ParsingTreeObject>(); if(type) { Ptr<ParsingTreeToken> name=type->GetMember(L"name").Cast<ParsingTreeToken>(); if(name) { rules.Add(name->GetValue()); } } } } } }
MinibatchSourcePtr CreateMinibatchSource(size_t featureDim, size_t numOutputClasses, const Dictionary& readModeConfig, size_t epochSize, bool randomize = true) { auto featuresFilePath = L"glob_0000.scp"; auto labelsFilePath = L"glob_0000.mlf"; auto labelMappingFile = L"state.list"; Dictionary featuresStreamConfig; featuresStreamConfig[L"dim"] = featureDim; featuresStreamConfig[L"scpFile"] = featuresFilePath; CNTK::Dictionary featInputStreamsConfig; featInputStreamsConfig[L"features"] = featuresStreamConfig; CNTK::Dictionary featDeserializerConfiguration; featDeserializerConfiguration[L"type"] = L"HTKFeatureDeserializer"; featDeserializerConfiguration[L"input"] = featInputStreamsConfig; Dictionary labelsStreamConfig; labelsStreamConfig[L"dim"] = numOutputClasses; labelsStreamConfig[L"mlfFile"] = labelsFilePath; labelsStreamConfig[L"labelMappingFile"] = labelMappingFile; labelsStreamConfig[L"scpFile"] = featuresFilePath; CNTK::Dictionary labelsInputStreamsConfig; labelsInputStreamsConfig[L"labels"] = labelsStreamConfig; CNTK::Dictionary labelsDeserializerConfiguration; labelsDeserializerConfiguration[L"type"] = L"HTKMLFDeserializer"; labelsDeserializerConfiguration[L"input"] = labelsInputStreamsConfig; Dictionary minibatchSourceConfiguration; if (randomize) minibatchSourceConfiguration[L"randomize"] = true; minibatchSourceConfiguration[L"epochSize"] = epochSize; minibatchSourceConfiguration[L"deserializers"] = std::vector<DictionaryValue>({ featDeserializerConfiguration, labelsDeserializerConfiguration }); minibatchSourceConfiguration.Add(readModeConfig); return CreateCompositeMinibatchSource(minibatchSourceConfiguration); }
Deserializer HTKMLFDeserializer(const std::wstring& streamName, const std::wstring& labelMappingFile, size_t dimension, const std::vector<std::wstring>& mlfFiles, bool phoneBoundaries) { Deserializer htk; Dictionary stream; Dictionary labels; labels.Add(L"labelMappingFile", labelMappingFile, L"dim", dimension); std::vector<DictionaryValue> actualFiles; std::transform(mlfFiles.begin(), mlfFiles.end(), std::back_inserter(actualFiles), [](const std::wstring& s) {return static_cast<DictionaryValue>(s); }); if (actualFiles.size() > 1) labels[L"mlfFileList"] = actualFiles; else if (actualFiles.size() == 1) labels[L"mlfFile"] = actualFiles[0]; else LogicError("HTKMLFDeserializer: No mlf files were specified"); if (phoneBoundaries) labels[L"phoneBoundaries"] = L"true"; else labels[L"phoneBoundaries"] = L"false"; stream[streamName] = labels; htk.Add(L"type", L"HTKMLFDeserializer", L"input", stream); return htk; }
ParserDecl(Ptr<ParsingTreeObject> parserDecl) { nodeTypeMap.Add(parserDecl.Obj(), this); Ptr<ParsingTreeArray> defs=parserDecl->GetMember(L"definitions").Cast<ParsingTreeArray>(); if(defs) { vint count=defs->Count(); for(vint i=0;i<count;i++) { Ptr<ParsingTreeObject> defObject=defs->GetItem(i).Cast<ParsingTreeObject>(); if(defObject) { if(defObject->GetType()==L"TokenDef") { Ptr<ParsingTreeToken> name=defObject->GetMember(L"name").Cast<ParsingTreeToken>(); if(name) { tokens.Add(name->GetValue()); } } else if(defObject->GetType()==L"RuleDef") { Ptr<ParsingTreeToken> name=defObject->GetMember(L"name").Cast<ParsingTreeToken>(); if(name) { rules.Add(name->GetValue()); } } else { CollectSubType(defObject, nodeTypeMap); } } } } }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - void VariableAnalysis::ComputeLiveSets() { // Compute the local information for each block. // We use a worklist-based algorithm to propagate the changes. Dictionary<Block*, BitVector> killSets; Dictionary<Block*, BitVector> exposedSets; List<Block*> worklist; // If we don't have any local variable or parameter // nothing needs to be done (we always load from global variables). int varCount = funct_->VariableCount() + funct_->ParameterCount(); if(varCount == 0) { return; } int bitCount = funct_->PeekNextVariableId(); for(auto block = funct_->FirstBlock(); block; block = block->NextBlock()) { killSets.Add(block, BitVector(bitCount)); exposedSets.Add(block, BitVector(bitCount)); ComputeBlockExposedAndKillSets(block, exposedSets[block], killSets[block]); worklist.Add(block); } // Propagate the live sets to all related blocks. // The data-flow equations used: // LiveOut(block) = U LiveIn(predecessor) // LiveIn(block) = (LiveOut(block) - Killed(block)) U Exposed(block) int lastCount = 0; while(worklist.Count() != lastCount) { int currentCount = worklist.Count(); for(int i = worklist.Count() - 1; i >= lastCount; i--) { BitVector liveOutSet(bitCount); auto block = worklist[i]; auto successorEnum = block->GetSuccessorEnum(); while(successorEnum.IsValid()) { auto successorBlock = successorEnum.Next(); liveOutSet.Or(exposedSets[successorBlock]); } // Compute the new exposed variables set. liveOutSet.Difference(killSets[block]); liveOutSet.Or(exposedSets[block]); if(liveOutSet != exposedSets[block]) { // The information has changed and must be updated. // All the predecessors must be reprocessed. exposedSets[block] = liveOutSet; auto predecessorEnum = block->GetPredecessorEnum(); while(predecessorEnum.IsValid()) { worklist.Add(predecessorEnum.Next()); } } } lastCount = currentCount; } ComputeLiveBlocks(exposedSets); }
// Sets the number of volatile operations for the specified function. void SetVolatileCount(Function* function, int value) { if(volatileCount_.ContainsKey(function)) { volatileCount_[function] = value; } else volatileCount_.Add(function, value); }
const Object *ObjReader::ReadObj(void) { unsigned int nOffset, nOffTmp; int c, n; Object *pObj; Dictionary *pDict; const Object *pcObj; char str[32 * 1024], *ptr; int nParentheses; nOffset = m_pSource->Position(); c = m_pSource->Read(); switch (c) { case '/': //name pObj = new Name(); m_pSource->ReadStr(str, sizeof(str)); ((Name *)pObj)->SetValue(str); break; case '(': //string pObj = new String(); ptr = str; nParentheses = 0; while (true) { c = m_pSource->Read(); if (c == '\\') { *ptr++ = c; c = m_pSource->Read(); } else if (c == '(') ++nParentheses; else if (c == ')') { if (--nParentheses <= 0) break; } *ptr++ = c; } *ptr = '\0'; ((String *)pObj)->SetValue(str, ptr - str, String::LITERAL); break; case '<': c = m_pSource->Read(); if (c == '<') //dictionary { pDict = new Dictionary(); while (true) { m_pSource->Skip(); c = m_pSource->Read(); if (c == '>') { m_pSource->Read(); //> m_pSource->Skip(); m_pSource->Read(str, 6); if (strncmp(str, "stream", 6) == 0) //stream { pcObj = pDict->GetValue("Length"); if (pcObj->GetType() == Object::OBJ_REFERENCE) { nOffTmp = m_pSource->Position(); pcObj = ReadIndirectObj(((const Reference *)pcObj)->GetObjNum(), ((const Reference *)pcObj)->GetGeneration()); m_pSource->Seek(nOffTmp, SEEK_SET); n = ((const Numeric *)pcObj)->GetValue(); delete pcObj; } else n = ((const Numeric *)pcObj)->GetValue(); pObj = new Stream(pDict); m_pSource->Skip(); ptr = new char[n]; m_pSource->Read(ptr, n); ((Stream *)pObj)->SetValue((unsigned char *)ptr, n); delete[] ptr; } else { pObj = pDict; m_pSource->Seek(-6, SEEK_CUR); } break; } else { m_pSource->Seek(-1, SEEK_CUR); pcObj = ReadObj(); pDict->Add(pcObj, ReadObj()); } } } else //hexadecimal string { m_pSource->Seek(-1, SEEK_CUR); pObj = new String(); n = m_pSource->ReadStr(str, sizeof(str)); ((String *)pObj)->SetValue(str, n, String::HEXADECIMAL); m_pSource->Read(); //> } break; case '[': //array pObj = new Array(); while (true) { m_pSource->Skip(); c = m_pSource->Read(); if (c == ']') break; m_pSource->Seek(-1, SEEK_CUR); ((Array *)pObj)->Add(ReadObj()); } break; case EOF: pObj = NULL; break; default: *str = c; m_pSource->ReadStr(str + 1, sizeof(str) - 1); if ((c >= '0' && c <= '9') || c == '-' || c == '+' || c == '.') { pObj = new Numeric(); ((Numeric *)pObj)->SetValue(atof(str)); } else { if (strcmp(str, "R") == 0) pObj = new Reference(); else if (strcmp(str, "true") == 0) { pObj = new Boolean(); ((Boolean *)pObj)->SetValue(true); } else if (strcmp(str, "false") == 0) { pObj = new Boolean(); ((Boolean *)pObj)->SetValue(false); } else if (strcmp(str, "null") == 0) pObj = new Null(); else if (*str == ' ') // is this normal? pObj = new Object(); // invalid object else { pObj = new Operator(); ((Operator *)pObj)->SetValue(str); } } break; } m_pSource->Skip(); if (pObj != NULL) pObj->SetOffset(nOffset); return pObj; }
void SimpleDeadCodeElimination::Execute(Function* function) { StaticList<Instruction*, 512> worklist; Dictionary<Instruction*, bool> inWorklist; // If an instruction is in the worklist. // Try to remove 'store' instructions that have no effect. // The algorithm is really simple, but should catch cases like // arrays initialized with constants that were propagated already. RemoveDeadStores(function); // Try to remove copy/set operations that are unused, // because the aggregates they target are never referenced. RemoveDeadCopyOperations(function); // We process the blocks from last to first, and the instructions in the block // from last to first too; this allows removing more instructions on each // iteration that the usual first-last order. for(auto block = function->LastBlock(); block; block = block->PreviousBlock()) { // If the block is unreachable we remove all instructions from it, // but don't remove the block; this will be handled by the CFG Simplifier, // which knows how to repair the Dominator Tree. if(block->IsUnreachable() && (block->IsEmpty() == false)) { CleanUnreachableBlock(block); continue; } for(auto instr = block->LastInstruction(); instr; instr = instr->PreviousInstruction()) { if(GetSafetyInfo()->IsDefinitelyDead(instr)) { worklist.Add(instr); inWorklist.Add(instr, true); } } } // Process while we have instructions in the worklist. while(worklist.IsNotEmpty()) { auto instr = worklist.RemoveLast(); inWorklist.Remove(instr); // Remove the instruction if it's dead. if(GetSafetyInfo()->IsDefinitelyDead(instr)) { // All the instructions that where used by this one // may be dead now, add them to the worklist. for(int i = 0; i < instr->SourceOpCount(); i++) { auto sourceOp = instr->GetSourceOp(i); // Make sure we don't add an instruction in the worklist twice. if(auto definingInstr = sourceOp->DefiningInstruction()) { if(inWorklist.ContainsKey(definingInstr) == false) { worklist.Add(definingInstr); inWorklist.Add(definingInstr, true); } } } InstructionRemoved(instr); instr->RemoveFromBlock(true /* free */); } } // If there were removed stored we may now have variables // that are not used by any instruction. RemoveDeadVariables(function); }
void MarkProcessedInstruction(Instruction* instr) { processedInstrs_.Add(instr, true); }