void FinishDeferredSF (void) { if (deferSetFormula->lLength) { SortLists (deferSetFormula, &deferIsConstant); _SimpleList tcache; long iv, i = variableNames.Traverser (tcache,iv,variableNames.GetRoot()); for (; i >= 0; i = variableNames.Traverser (tcache,iv)) { _Variable* theV = FetchVar(i); if (theV->IsContainer()) { ((_VariableContainer*)theV)->SetMDependance (*deferSetFormula); } } for (long j = 0; j<likeFuncList.lLength; j++) if (((_String*)likeFuncNamesList(j))->sLength) { _LikelihoodFunction * lf = (_LikelihoodFunction*)likeFuncList(j); for (long k = 0; k < deferSetFormula->lLength; k++) { lf->UpdateIndependent(deferSetFormula->lData[k],deferIsConstant.lData[k]); } } } DeleteObject (deferSetFormula); deferSetFormula = nil; deferIsConstant.Clear(); }
//__________________________________________________________________________________ void RenameVariable (_String* oldName, _String* newName) { _String oldNamePrefix (*oldName&'.'), newNamePrefix (*newName&'.'); _List toRename; _SimpleList xtras, traverser; long f = variableNames.Find (oldName, traverser); if (f>=0) { toRename << oldName; xtras << variableNames.GetXtra (f); f = variableNames.Next (f, traverser); for (; f>=0 && ((_String*)variableNames.Retrieve (f))->startswith (oldNamePrefix); f = variableNames.Next (f, traverser)) { toRename << variableNames.Retrieve (f); xtras << variableNames.GetXtra (f); } } for (long k = 0; k < toRename.lLength; k++) { _Variable * thisVar = FetchVar (xtras.lData[k]); thisVar->GetName()->nInstances --; if (k) { thisVar->theName = new _String(thisVar->GetName()->Replace(oldNamePrefix,newNamePrefix,true)); } else { thisVar->theName = new _String(*newName); } variableNames.Delete (toRename (k), true); variableNames.Insert (thisVar->GetName(),xtras.lData[k]); thisVar->GetName()->nInstances++; } }
//__________________________________________________________________________________ _Variable* CheckReceptacleCommandID (_String* name, const long id, bool checkValid, bool isGlobal) { if (checkValid && (!name->IsValidIdentifier())) { WarnError (_String ("'") & *name & "' is not a variable identifier in call to " & _HY_ValidHBLExpressions.RetrieveKeyByPayload(id) & '.'); return nil; } long f = LocateVarByName (*name); if (f<0) { _Variable dummy (*name, isGlobal); f = LocateVarByName (*name); } return FetchVar(f); }
//__________________________________________________________________________________ _Variable* CheckReceptacle (_String* name, _String fID, bool checkValid, bool isGlobal) { if (checkValid && (!name->IsValidIdentifier())) { _String errMsg = *name & " is not a variable identifier in call to " & fID; WarnError (errMsg); return nil; } long f = LocateVarByName (*name); if (f<0) { _Variable dummy (*name, isGlobal); f = LocateVarByName (*name); } return FetchVar(f); }
//__________________________________________________________________________________ _PMathObj FetchObjectFromVariableByTypeIndex (long idx, const unsigned long objectClass, long command_id, _String *errMsg) { _Variable * v = FetchVar (idx); if (v && (objectClass == HY_ANY_OBJECT || v->ObjectClass () == objectClass)) { return v->GetValue(); } if (command_id >= 0 || errMsg) { if (command_id >= 0) { WarnError (_String ("'") & *v->GetName() & ("' must refer to a ") & FetchObjectNameFromType (objectClass) & " in call to " &_HY_ValidHBLExpressions.RetrieveKeyByPayload(command_id) & '.'); } else { WarnError (errMsg->Replace ("_VAR_NAME_ID_", *v->GetName(), true)); } } return nil; }
void _Variable::CompileListOfDependents (_SimpleList& rec) { _SimpleList tcache; long iv, i = variableNames.Traverser (tcache,iv,variableNames.GetRoot()); for (; i >= 0; i = variableNames.Traverser (tcache,iv)) { _Variable* thisVar = FetchVar (i); if (!thisVar->IsIndependent()) { if (thisVar->CheckFForDependence (theIndex)) { long f = thisVar->GetAVariable(); if (rec.Find(f)<0) { rec<<f; } } } } }
//__________________________________________________________________________________ _Variable* CheckReceptacleCommandID (_String* name, const long id, bool checkValid, bool isGlobal, _ExecutionList* context) { if (checkValid && (!name->IsValidIdentifier())) { _String errMsg = _String ("'") & *name & "' is not a valid variable identifier in call to " & _HY_ValidHBLExpressions.RetrieveKeyByPayload(id) & '.'; if (context) { context->ReportAnExecutionError(errMsg); } else { WarnError (errMsg); } return nil; } long f = LocateVarByName (*name); if (f<0) { _Variable dummy (*name, isGlobal); f = LocateVarByName (*name); } return FetchVar(f); }
//__________________________________________________________________________________ _String* FetchMathObjectNameOfTypeByIndex (const unsigned long objectClass, const long objectIndex) { if (objectIndex >=0 && objectIndex < variableNames.countitems()) { long tc = 0; _SimpleList nts; long rt, vi = variableNames.Traverser (nts, rt, variableNames.GetRoot()); for (; vi >= 0; vi = variableNames.Traverser (nts, rt)) if (FetchVar(variableNames.GetXtra (vi))->ObjectClass () == objectClass) { if (tc==objectIndex) { return (_String*)variableNames.Retrieve(vi); break; } else { tc++; } } } return nil; }
void TrainModelNN (_String* model, _String* matrix) { _String errMsg; long modelIdx = modelNames.Find(model); _Parameter verbI; checkParameter (VerbosityLevelString, verbI, 0.0); char buffer [128]; if (modelIdx < 0) { errMsg = *model & " did not refer to an existring model"; } else { _Variable* boundsMatrix = FetchVar (LocateVarByName (*matrix)); if (boundsMatrix && (boundsMatrix->ObjectClass() == MATRIX)) { _Matrix * bmatrix = (_Matrix*) boundsMatrix->GetValue (); if (bmatrix->IsAStringMatrix() && (bmatrix->GetVDim () == 3)) { _Variable* modelMatrix = LocateVar (modelMatrixIndices.lData[modelIdx]); _SimpleList modelVariableList; { _AVLList mvla (&modelVariableList); modelMatrix->ScanForVariables (mvla, true); mvla.ReorderList(); } if (bmatrix->GetHDim () == modelVariableList.lLength) { // now map model variables to bounds matrix _SimpleList variableMap; _String *myName; for (long k = 0; k < modelVariableList.lLength; k++) { myName = ((_FString*)bmatrix->GetFormula(k,0)->Compute())->theString; long vID = LocateVarByName (*myName); if (vID < 0) { break; } vID = variableNames.GetXtra (vID); vID = modelVariableList.Find(vID); if (vID < 0) { break; } variableMap << vID; } if (variableMap.lLength == modelVariableList.lLength) { _Matrix vBounds (variableMap.lLength,2, false, true); long k2 = 0; for (; k2 < variableMap.lLength; k2++) { _Parameter lb = ((_FString*)bmatrix->GetFormula(k2,1)->Compute())->theString->toNum(), ub = ((_FString*)bmatrix->GetFormula(k2,2)->Compute())->theString->toNum(); if ( ub>lb || k2) { vBounds.Store (k2,0,lb); vBounds.Store (k2,1,ub); if (ub<=lb && vBounds (k2-1,0) <= vBounds (k2-1,1) && (!CheckEqual(vBounds (k2-1,0),0.0) || !CheckEqual(vBounds (k2-1,1),1.0))) { break; } } } if (k2 == modelVariableList.lLength) { // set up the sampling now _String fName = ProcessLiteralArgument (&ModelNNFile,nil); FILE* nnFile = doFileOpen (fName.getStr(), "w"); if (nnFile) { _Matrix* modelMatrix = (_Matrix*) LocateVar(modelMatrixIndices.lData[modelIdx])->GetValue(); _Parameter mainSteps, checkSteps, errorTerm, loopMax, hiddenNodes, absError, nn1, nn2; long fullDimension = modelMatrix->GetHDim() * modelMatrix->GetVDim(); checkParameter (ModelNNTrainingSteps, mainSteps, 10000.0); checkParameter (ModelNNVerificationSample, checkSteps, 500.0); checkParameter (ModelNNPrecision, errorTerm, 0.01); checkParameter (ModelNNTrainingSteps, loopMax, 10); checkParameter (ModelNNHiddenNodes, hiddenNodes, 5); checkParameter (ModelNNLearningRate, nn1, .3); checkParameter (ModelNNPersistenceRate, nn2, .1); Net** matrixNet = new Net* [fullDimension] ; for (long i = 0; i < fullDimension; i++) { checkPointer (matrixNet [i] = new Net (variableMap.lLength,(long)hiddenNodes,1,errorTerm,nn1,nn2,100,200,true)); //matrixNet[i]->verbose = true; } checkPointer (matrixNet); _List tIn, tOut; FILE* varSamples = doFileOpen ("variableSamples.out", "w"); fprintf (varSamples, "%s" ,LocateVar(modelVariableList.lData[0])->GetName()->getStr()); for (long vc = 1; vc < modelVariableList.lLength; vc++) { fprintf (varSamples, ",%s" ,LocateVar(modelVariableList.lData[variableMap.lData[vc]])->GetName()->getStr()); } fprintf (varSamples, "\n"); for (long itCount = 0; itCount < loopMax; itCount ++) { if (verbI > 5) { snprintf (buffer, sizeof(buffer), "\nNeural Network Pass %ld. Building a training set...\n", itCount); BufferToConsole (buffer); } while (tIn.countitems() < mainSteps) { NNMatrixSampler (0, vBounds, modelVariableList, variableMap, modelMatrix, tIn, tOut); } _Matrix inData (mainSteps, variableMap.lLength, false, true); _Parameter *md = inData.theData; for (long matrixC = 0; matrixC < mainSteps; matrixC++) { _Parameter * ed = ((_Matrix*)tIn (matrixC))->theData; fprintf (varSamples, "\n%g",*ed); *md = *ed; ed++; md++; for (long entryC = 1; entryC < variableMap.lLength; entryC++, ed++, md++) { *md = *ed; fprintf (varSamples, ",%g", *md); } } tIn.Clear(); if (verbI > 5) { BufferToConsole ( "Done Building Training Set. Training...\n"); } long lastDone = 0; for (long cellCount = 0; cellCount < fullDimension; cellCount++) { Net* thisCell = matrixNet[cellCount]; _Matrix outVector (mainSteps, 1, false, true); for (long oc = 0; oc < mainSteps; oc++) { outVector.theData[oc] = ((_Matrix*)tOut(oc))->theData[cellCount]; } thisCell->studyAll (inData.theData, outVector.theData, mainSteps); long nowDone = (cellCount+1)*100./fullDimension; if (nowDone > lastDone) { snprintf (buffer, sizeof(buffer),"%ld%% done\n", lastDone = nowDone); BufferToConsole (buffer); } } tOut.Clear(); if (verbI > 5) { BufferToConsole ( "Done Training. Resampling...\n"); } _PMathObj tObj = _Constant(0).Time(); _Parameter time1 = tObj->Value(), time2; while (tIn.countitems() < checkSteps) { NNMatrixSampler (0, vBounds, modelVariableList, variableMap, modelMatrix, tIn, tOut); } absError = 0.0; DeleteObject (tObj); tObj = _Constant(0).Time(); time2 = tObj->Value(); if (verbI > 5) { snprintf (buffer, sizeof(buffer),"Done Resampling in %g seconds. Computing Error...\n", time2-time1); BufferToConsole (buffer); } _Parameter maxValT, maxValE; for (long verCount = 0; verCount < checkSteps; verCount++) { _Parameter* inData = ((_Matrix*)tIn(verCount))->theData, * outData = ((_Matrix*)tOut(verCount))->theData; for (long cellCount = 0; cellCount < fullDimension; cellCount++) { Net *thisCell = matrixNet[cellCount]; _Parameter estVal = thisCell->eval(inData)[0], trueVal = outData[cellCount], localError; localError = estVal-trueVal; if (localError < 0) { localError = -localError; } if (absError < localError) { maxValT = trueVal; maxValE = estVal; absError = localError; } } } DeleteObject (tObj); tObj = _Constant(0).Time(); time1 = tObj->Value(); DeleteObject (tObj); if (verbI > 5) { snprintf (buffer, sizeof(buffer), "Done Error Checking in %g seconds. Got max abs error %g on the pair %g %g\n", time1-time2, absError, maxValT, maxValE); BufferToConsole (buffer); } if (absError <= errorTerm) { break; } } if (absError > errorTerm) { ReportWarning (_String("Couldn't achive desired precision in TrainModelNN. Achieved error of ") & absError); } fclose (varSamples); fprintf (nnFile,"{{\n\"%s\"", LocateVar(modelVariableList.lData[0])->GetName()->getStr()); _Matrix newBounds (modelVariableList.lLength, 2, false, true); if (vBounds(0,0)>vBounds(0,1)) { newBounds.Store (variableMap.lData[0],0,0.); newBounds.Store (variableMap.lData[0],1,1.); } else { newBounds.Store (variableMap.lData[0],0,vBounds(0,0)); newBounds.Store (variableMap.lData[0],1,vBounds(0,1)); } for (long varCounter = 1; varCounter < modelVariableList.lLength; varCounter ++) { fprintf (nnFile,",\n\"%s\"", LocateVar(modelVariableList.lData[varCounter])->GetName()->getStr()); if (vBounds(varCounter,0)>vBounds(varCounter,1)) { newBounds.Store (variableMap.lData[varCounter],0,0.); newBounds.Store (variableMap.lData[varCounter],1,1.); } else { newBounds.Store (variableMap.lData[varCounter],0,vBounds(varCounter,0)); newBounds.Store (variableMap.lData[varCounter],1,vBounds(varCounter,1)); } } fprintf (nnFile,"\n}}\n"); newBounds.toFileStr (nnFile); for (long i2 = 0; i2 < fullDimension; i2++) { matrixNet[i2]->save(nnFile); delete matrixNet [i2]; } fclose (nnFile); delete matrixNet; } else { errMsg = _String ("Failed to open ") & fName & " for writing"; } } else { errMsg = _String ("Invalid variable bounds in row ") & (k2+1) & " of the bounds matrix"; } } else { errMsg = *myName & " was not one of the model parameters"; } } else { errMsg = *matrix & " must be a have the same number of rows as the number of model parameters"; } } else { errMsg = *matrix & " must be a string matrix with 3 columns"; } } else { errMsg = *matrix & " was not the identifier of a valid matrix variable"; } } if (errMsg.sLength) { errMsg = errMsg & _String(" in call to TrainModelNN."); WarnError (errMsg); } }
//__________________________________________________________________________________ void mpiNormalLoop (int rank, int size, _String & baseDir) { long senderID = 0; ReportWarning ("[MPI] Entered mpiNormalLoop"); _String* theMessage = MPIRecvString (-1,senderID), // listen for messages from any node * resStr = nil; while (theMessage->sLength) { setParameter (mpiNodeID, (_Parameter)rank); setParameter (mpiNodeCount, (_Parameter)size); //ReportWarning (*theMessage); DeleteObject (resStr); resStr = nil; if (theMessage->startswith (mpiLoopSwitchToOptimize) ) { hyphyMPIOptimizerMode = theMessage->Cut(mpiLoopSwitchToOptimize.sLength,-1).toNum(); ReportWarning (_String("[MPI] Switched to mpiOptimizer loop with mode ") & hyphyMPIOptimizerMode); MPISendString (mpiLoopSwitchToOptimize,senderID); mpiOptimizerLoop (rank,size); ReportWarning ("[MPI] Returned from mpiOptimizer loop"); hyphyMPIOptimizerMode = _hyphyLFMPIModeNone; pathNames && & baseDir; } else if ( theMessage->Equal (&mpiLoopSwitchToBGM) ) { ReportWarning ("[MPI] Received signal to switch to mpiBgmLoop"); MPISendString (mpiLoopSwitchToBGM, senderID); // feedback to source to confirm receipt of message mpiBgmLoop (rank, size); ReportWarning ("[MPI] Returned from mpiBgmLoop"); } else { if (theMessage->beginswith ("#NEXUS")) { _String msgCopy (*theMessage); ReportWarning ("[MPI] Received a function to optimize"); ReadDataSetFile (nil,true,theMessage); ReportWarning ("[MPI] Done with the optimization"); _Variable* lfName = FetchVar(LocateVarByName(MPI_NEXUS_FILE_RETURN)); if (lfName) { resStr = (_String*)(lfName->Compute()->toStr()); } else { _FString *lfID = (_FString*)FetchObjectFromVariableByType (&lf2SendBack, STRING); if (!lfID) { FlagError (_String("[MPI] Malformed MPI likelihood function optimization request - did not specify the LF name to return in variable ") & lf2SendBack & ".\n\n\n" ); break; } long f = likeFuncNamesList.Find (lfID->theString); if (f<0) { FlagError ("[MPI] Malformed MPI likelihood function optimization request - LF name to return did not refer to a well-defined likelihood function.\n\n\n"); break; } _Parameter pv; checkParameter (shortMPIReturn, pv ,0); resStr = (_String*)checkPointer(new _String (1024L,true)); ((_LikelihoodFunction*)likeFuncList (f))->SerializeLF(*resStr,pv>0.5?_hyphyLFSerializeModeShortMPI:_hyphyLFSerializeModeLongMPI); resStr->Finalize(); } } else { _ExecutionList exL (*theMessage); _PMathObj res = exL.Execute(); resStr = res?(_String*)res->toStr():new _String ("0"); } checkPointer (resStr); MPISendString(*resStr,senderID); _Parameter keepState = 0.0; checkParameter (preserveSlaveNodeState, keepState, 0.0); if (keepState < 0.5) { PurgeAll (true); pathNames && & baseDir; } } DeleteObject (theMessage); theMessage = MPIRecvString (-1,senderID); } /*MPISendString(empty,senderID);*/ DeleteObject (resStr); DeleteObject (theMessage); }
void _Variable::SetFormula (_Formula& theF) // set the value of the var to a formula { bool changeMe = false, isAConstant = theF.IsAConstant(); _Formula* myF = &theF; if (isAConstant) { _PMathObj theP = theF.Compute(); if (theP) { myF = new _Formula ((_PMathObj)theP->makeDynamic(),false); checkPointer (myF); } else { return; } } _SimpleList vars; { _AVLList vA (&vars); theF.ScanFForVariables (vA,true); vA.ReorderList(); } if (vars.BinaryFind(theIndex)>=0) { _String * sf = (_String*)theF.toStr(); WarnError ((_String("Can't set variable ")&*GetName()&" to "&*sf&" because it would create a circular dependance.")); DeleteObject(sf); if (&theF!=myF) { delete myF; } return; } varFlags &= HY_VARIABLE_SET; if (varFlags & HY_VARIABLE_CHANGED) { varFlags -= HY_VARIABLE_CHANGED; } if (varFormula) { delete (varFormula); varFormula = nil; } else { changeMe = true; } if (varValue) { DeleteObject (varValue); varValue=nil; } //_Formula::Duplicate ((BaseRef)myF); varFormula = new _Formula; varFormula->Duplicate ((BaseRef)myF); // mod 20060125 added a call to simplify constants varFormula->SimplifyConstants (); // also update the fact that this variable is no longer independent in all declared // variable containers which contain references to this variable if (changeMe) if (deferSetFormula) { *deferSetFormula << theIndex; deferIsConstant << isAConstant; } else { long i; _SimpleList tcache; long iv; i = variableNames.Traverser (tcache,iv,variableNames.GetRoot()); for (; i >= 0; i = variableNames.Traverser (tcache,iv)) { _Variable* theV = FetchVar(i); if (theV->IsContainer()) { _VariableContainer* theVC = (_VariableContainer*)theV; if (theVC->SetDependance(theIndex) == -2) { ReportWarning ((_String("Can't make variable ")&*GetName()&" dependent in the context of "&*theVC->GetName()&" because its template variable is bound by another relation in the global context.")); continue; } } } { for (long i = 0; i<likeFuncList.lLength; i++) if (((_String*)likeFuncNamesList(i))->sLength) { ((_LikelihoodFunction*)likeFuncList(i))->UpdateIndependent(theIndex,isAConstant); } } } if (&theF!=myF) { delete myF; } }
//__________________________________________________________________________________ void mpiNormalLoop (int rank, int size, _String & baseDir) { long senderID = 0; ReportWarning ("Entered mpiNormalLoop"); _String* theMessage = MPIRecvString (-1,senderID), // listen for messages from any node * resStr = nil, _bgmSwitch ("_BGM_SWITCH_"), css("_CONTEXT_SWITCH_MPIPARTITIONS_"); while (theMessage->sLength) { setParameter (mpiNodeID, (_Parameter)rank); setParameter (mpiNodeCount, (_Parameter)size); //ReportWarning (*theMessage); DeleteObject (resStr); resStr = nil; if (theMessage->Equal (&css) ) { mpiPartitionOptimizer = true; ReportWarning ("Switched to mpiOptimizer loop"); MPISendString(css,senderID); mpiOptimizerLoop (rank,size); ReportWarning ("Returned from mpiOptimizer loop"); mpiPartitionOptimizer = false; pathNames && & baseDir; } else if ( theMessage->Equal (&_bgmSwitch) ) { ReportWarning ("Received signal to switch to mpiBgmLoop"); MPISendString (_bgmSwitch, senderID); // feedback to source to confirm receipt of message mpiBgmLoop (rank, size); ReportWarning ("Returned from mpiBgmLoop"); } else { if (theMessage->beginswith ("#NEXUS")) { _String msgCopy (*theMessage); ReportWarning ("Received a function to optimize"); ReadDataSetFile (nil,true,theMessage); ReportWarning ("Done with the optimization"); _Variable* lfName = FetchVar(LocateVarByName(MPI_NEXUS_FILE_RETURN)); if (lfName) { resStr = (_String*)(lfName->Compute()->toStr()); } else { long f = LocateVarByName (lf2SendBack); if (f>=0) lfName = FetchVar(f); if (!(lfName&&(lfName->ObjectClass()==STRING))) { _String errMsg ("Malformed MPI likelihood function optimization request - missing LF name to return.\n\n\n"); errMsg = errMsg & msgCopy; FlagError (errMsg); break; } f = likeFuncNamesList.Find (((_FString*)lfName->Compute())->theString); if (f<0) { _String errMsg ("Malformed MPI likelihood function optimization request - invalid LF name to return.\n\n\n"); errMsg = errMsg & msgCopy; FlagError (errMsg); break; } _Parameter pv; checkParameter (shortMPIReturn, pv ,0); resStr = new _String (1024L,true); checkPointer (resStr); ((_LikelihoodFunction*)likeFuncList (f))->SerializeLF(*resStr,pv>0.5?5:2); resStr->Finalize(); } } else { _ExecutionList exL (*theMessage); /*printf ("Received:\n %s\n", ((_String*)exL.toStr())->sData);*/ _PMathObj res = exL.Execute(); resStr = res?(_String*)res->toStr():new _String ("0"); } checkPointer (resStr); DeleteObject (theMessage); MPISendString(*resStr,senderID); _Parameter keepState = 0.0; checkParameter (preserveSlaveNodeState, keepState, 0.0); if (keepState < 0.5) { PurgeAll (true); pathNames && & baseDir; } } theMessage = MPIRecvString (-1,senderID); } /*MPISendString(empty,senderID);*/ DeleteObject (resStr); DeleteObject (theMessage); }
//__________________________________________________________________________________ void DeleteTreeVariable (long dv, _SimpleList & parms, bool doDeps) { if (dv>=0) { _String *name = (_String*)variableNames.Retrieve (dv); _String myName = *name&"."; long vidx = variableNames.GetXtra (dv); UpdateChangingFlas (vidx); _SimpleList recCache; variableNames.Find (name,recCache); _String nextVarID; long nvid; if ((nvid = variableNames.Next (dv,recCache))>=0) { nextVarID = *(_String*)variableNames.Retrieve(nvid); } { _SimpleList tcache; long iv, k = variableNames.Traverser (tcache, iv, variableNames.GetRoot()); for (; k>=0; k = variableNames.Traverser (tcache, iv)) { _Variable * thisVar = FetchVar(k); if (thisVar->CheckFForDependence (vidx,false)) { _PMathObj curValue = thisVar->Compute(); curValue->nInstances++; thisVar->SetValue (curValue); DeleteObject (curValue); } } } _Variable* delvar = (FetchVar(dv)); if (delvar->ObjectClass() != TREE) { variableNames.Delete (variableNames.Retrieve(dv),true); (*((_SimpleList*)&variablePtrs))[vidx]=0; freeSlots<<vidx; DeleteObject (delvar); } else { ((_VariableContainer*)delvar)->Clear(); } if (doDeps) { _List toDelete; recCache.Clear(); long nextVar = variableNames.Find (&nextVarID,recCache); for (; nextVar>=0; nextVar = variableNames.Next (nextVar, recCache)) { _String dependent = *(_String*)variableNames.Retrieve (nextVar); if (dependent.startswith(myName)) { if (dependent.Find ('.', myName.sLength+1, -1)>=0) { _Variable * checkDep = FetchVar (nextVar); if (!checkDep->IsIndependent()) { _PMathObj curValue = checkDep->Compute(); curValue->nInstances++; checkDep->SetValue (curValue); DeleteObject (curValue); } parms << variableNames.GetXtra (nextVar); } else { toDelete && & dependent; } } else { break; } } for (long k=0; k<toDelete.lLength; k++) { //StringToConsole (*(_String*)toDelete(k)); //BufferToConsole ("\n"); DeleteTreeVariable (*(_String*)toDelete(k),parms,false); } } } }
//__________________________________________________________________________________ void DeleteVariable (long dv, bool deleteself) { if (dv>=0) { _String *name = (_String*)variableNames.Retrieve (dv); _String myName = *name&'.'; long vidx = variableNames.GetXtra (dv); UpdateChangingFlas (vidx); _SimpleList recCache; variableNames.Find (name,recCache); _String nextVarID;// = *(_String*)variableNames.Retrieve(variableNames.Next (dv,recCache)); long nvid; if ((nvid = variableNames.Next (dv,recCache))>=0) { nextVarID = *(_String*)variableNames.Retrieve(nvid); } if (deleteself) { _SimpleList tcache; long iv, k = variableNames.Traverser (tcache, iv, variableNames.GetRoot()); for (; k>=0; k = variableNames.Traverser (tcache, iv)) { _Variable * thisVar = FetchVar(k); if (thisVar->CheckFForDependence (vidx,false)) { _PMathObj curValue = thisVar->Compute(); curValue->nInstances++; // this could be a leak 01/05/2004. thisVar->SetValue (curValue); DeleteObject (curValue); } } _Variable* delvar = (FetchVar(dv)); DeleteObject (delvar); variableNames.Delete (variableNames.Retrieve(dv),true); (*((_SimpleList*)&variablePtrs))[vidx]=0; freeSlots<<vidx; } else { _Variable* delvar = (FetchVar(dv)); if (delvar->IsContainer()) { _VariableContainer* dc = (_VariableContainer*)delvar; dc->Clear(); } } _List toDelete; recCache.Clear(); long nextVar = variableNames.Find (&nextVarID,recCache); for (; nextVar>=0; nextVar = variableNames.Next (nextVar, recCache)) { _String dependent = *(_String*)variableNames.Retrieve (nextVar); if (dependent.startswith(myName)) { toDelete && & dependent; } else { break; } } for (long k=0; k< toDelete.lLength; k++) { DeleteVariable (*(_String*)toDelete(k)); } } }