void FieldNode::disconnect(Node * n, Game * g) { Node::disconnect(n, g); if (!isSame(n)) { FieldNode * f = static_cast<FieldNode *>(n); getFieldData()->cities.erase(getFieldData()->cities.end() - f->getFieldData()->cities.size(), getFieldData()->cities.end()); } }
void FieldNode::connect(Node * n, Game * g) { if (!isSame(n)) { FieldNode * f = static_cast<FieldNode *>(n); getFieldData()->cities.insert(getFieldData()->cities.end(), f->getFieldData()->cities.begin(), f->getFieldData()->cities.end()); } Node::connect(n, g); }
void* getValueOfField(char* fieldname) { struct HEADERFIELD* data = getFieldData(fieldname); if (data != NULL) { return getFieldValue(data->location, data->type); } return (void*) -1; }
void ManagedSerializableObject::serialize() { if (mManagedInstance == nullptr) return; mCachedData.clear(); SPtr<ManagedSerializableObjectInfo> curType = mObjInfo; while (curType != nullptr) { for (auto& field : curType->mFields) { if (field.second->isSerializable()) { ManagedSerializableFieldKey key(field.second->mParentTypeId, field.second->mFieldId); mCachedData[key] = getFieldData(field.second); } } curType = curType->mBaseClass; } // Serialize children for (auto& fieldEntry : mCachedData) fieldEntry.second->serialize(); mManagedInstance = nullptr; }
void SoUnknownEngine::copyContents(const SoFieldContainer *fromFC, SbBool copyConnections) // //////////////////////////////////////////////////////////////////////// { // Make sure the copy has the correct class name const SoUnknownEngine *fromUnk = (const SoUnknownEngine *) fromFC; setClassName(fromUnk->className); // For each input in the original engine, create a new input and add // it to the new engine // NOTE: We can't use SoEngine::copyContents() to copy the field // data, since that uses SoFieldData::overlay(), which assumes the // fields have the same offsets in both engines. Instead, we just // copy the field values ourselves. const SoFieldData *fromData = fromUnk->getFieldData(); SoFieldData *toData = (SoFieldData *) getFieldData(); int i; for (i = 0; i < fromData->getNumFields(); i++) { SoField *fromField = fromData->getField(fromUnk, i); const SbName fieldName = fromData->getFieldName(i); SoType fieldType = fromField->getTypeId(); SoField *toField = (SoField *) (fieldType.createInstance()); toField->enableNotify(FALSE); toField->setContainer(this); toField->setDefault(TRUE); toField->enableNotify(TRUE); toData->addField(this, fieldName.getString(), toField); toField->setContainer(this); toField->copyFrom(*fromField); toField->setIgnored(fromField->isIgnored()); toField->setDefault(fromField->isDefault()); toField->fixCopy(copyConnections); if (fromField->isConnected() && copyConnections) toField->copyConnection(fromField); } // Copy the outputs SoEngineOutputData *toOutData = (SoEngineOutputData *) getOutputData(); SoEngineOutputList outList; fromUnk->getOutputs(outList); for(i = 0; i < outList.getLength(); i++) { SoEngineOutput *newOut = new SoEngineOutput; const SoType outType = outList[i]->getConnectionType(); SbName outName; getOutputName( outList[i], outName ); toOutData->addOutput(this, outName.getString(), newOut, outType); newOut->setContainer(this); } }
void SoGate::writeInstance(SoOutput *out) // //////////////////////////////////////////////////////////////////////// { if (! writeHeader(out, FALSE, TRUE)) { // Write type info typeField.write(out, "type"); const SoFieldData *fieldData = getFieldData(); if (fieldData != NULL) fieldData->write(out, this); writeFooter(out); } }
bool BattleField::setFieldHit(int position, bool hit) { FieldData* fieldData = getFieldData(position); if(fieldData) { // Check if field is already tried if(fieldData->isTried()) return false; // Set information on the field fieldData->setIsHit(hit); fieldData->setIsTried(true); if(hit) { // Visualize a hit fieldData->setColor("#00ff00"); // Check if ship is sunken QList<FieldData*> shipFields = _fieldsById[fieldData->shipId()]; bool shipSunken = true; foreach(FieldData* field, shipFields) { shipSunken = field->isHit(); if(!shipSunken) break; } // If ship is sunken emit a signal with the new number of ships if(shipSunken) { _numberOfShips--; emit numberOfShipsChanged(_numberOfShips); foreach(FieldData* field, shipFields) { field->setHideImage(false); updateField(field->modelPosition()); } } }
QvBool QvNode::readInstance(QvInput *in) { QvName typeString; QvFieldData *fieldData = getFieldData(); if (in->read(typeString, TRUE)) { if (typeString == "fields") { if (! fieldData->readFieldTypes(in, this)) { QvReadError::post(in, "Bad field specifications for node"); return FALSE; } } else in->putBack(typeString.getString()); } if (! fieldData->read(in, this)) return FALSE; return TRUE; }
void visNumSixWith(int propID, struct filtervalue* filters, struct filtervalue* requiredvisits) { int i, j, filt, nf, nmax, nmin, sum, flag; int nFields, *start, *len, **count; int maxnum[NFILTERS]; int hist[NFILTERS][NHIST], chist[NHIST], desired[NFILTERS], obsfilt[NFILTERS]; double xmin, xmax, ymin, ymax; double *ravec, *decvec, **value, valmin[NFILTERS], valmax[NFILTERS], frac, least; FILE *out; char labstr[1024]; start = malloc((numFields+1)*sizeof(int)); len = malloc(numFields*sizeof(int)); ravec = malloc(numFields*sizeof(double)); decvec =malloc(numFields*sizeof(double)); count = malloc(NFILTERS*sizeof(int *)); for(i=0; i<NFILTERS; i++) count[i] = malloc(numFields*sizeof(int)); value = malloc(NFILTERS*sizeof(double *)); for(i=0; i<NFILTERS; i++) value[i] = malloc(numFields*sizeof(double)); // order visits by field and get pointers getFieldData(&nFields, start, len); // count the number of visits per field, per fiter per proposal for(nf=0; nf<nFields; nf++) { for(filt=0; filt<NFILTERS; filt++) count[filt][nf]=0; for(filt=0; filt<NFILTERS; filt++) for(i=start[nf]; i<start[nf]+len[nf]; i++) if(obs[i].filter==filt && obs[i].propid==propID) count[filt][nf]++; } for(filt=0; filt<NFILTERS; filt++) { valmin[filt] = 0.0; valmax[filt] = 0.0; obsfilt[filt] = 0; for(nf=0; nf<nFields; nf++) obsfilt[filt] += count[filt][nf]; } // REQUIRED VISITS FOR EACH PROPOSAL if ( filters != NULL && requiredvisits != NULL) { for(filt=0; filt<filters[0].size; filt++) { if ( strcmp(filters[filt].value, "u") == 0 ) { valmax[0] = atof(requiredvisits[filt].value); } else if ( strcmp(filters[filt].value, "g") == 0 ) { valmax[1] = atof(requiredvisits[filt].value); } else if ( strcmp(filters[filt].value, "r") == 0 ) { valmax[2] = atof(requiredvisits[filt].value); } else if ( strcmp(filters[filt].value, "i") == 0 ) { valmax[3] = atof(requiredvisits[filt].value); } else if ( strcmp(filters[filt].value, "z") == 0 ) { valmax[4] = atof(requiredvisits[filt].value); } else if ( strcmp(filters[filt].value, "y") == 0 ) { valmax[5] = atof(requiredvisits[filt].value); } } if ( filters[0].size == 1 ) { valmax[0] = atof(requiredvisits[0].value); valmax[1] = atof(requiredvisits[0].value); valmax[2] = atof(requiredvisits[0].value); valmax[3] = atof(requiredvisits[0].value); valmax[4] = atof(requiredvisits[0].value); valmax[5] = atof(requiredvisits[0].value); } } else { for(filt=0; filt<NFILTERS; filt++) { double max = 0.0; for(nf=0; nf<nFields; nf++) { for(i=start[nf]; i<start[nf]+len[nf]; i++) { if ( count[filt][nf] > max ) { max = (double)count[filt][nf]; } } } valmax[filt] = max; } } // END OF REQUIRED VISITS FOR EACH PROPOSAL // adjust the desired values for survey duration //for(filt=0; filt<NFILTERS; filt++) valmax[filt] *= (endMJD-startMJD)/365.25/10.0; //for(filt=0; filt<NFILTERS; filt++) desired[filt] = valmax[filt]; // now fill the value array for plotting for(nf=0; nf<nFields; nf++) { ravec[nf] = obs[start[nf]].ra; decvec[nf] = obs[start[nf]].dec; for(filt=0; filt<NFILTERS; filt++) { value[filt][nf] = (double) count[filt][nf]; } } char* fileName = (char*)malloc(100); sprintf(fileName, "SixVisits-Num-%d", propID); // make the plot for the raw numbers char* xlabel = (char*)malloc(100); if ( filters != NULL && requiredvisits != NULL) { sprintf(xlabel, "%s", "requested number of visits"); } else { sprintf(xlabel, "%s", "acquired number of visits"); } plotSix(nFields, value, ravec, decvec, valmin, valmax, 1, xlabel, plotTitle, fileName, 0); /*for(nf=0; nf<nFields; nf++) { ravec[nf] = obs[start[nf]].ra; decvec[nf] = obs[start[nf]].dec; for(filt=0; filt<NFILTERS; filt++) { value[filt][nf] *= (100.0/desired[filt]); } } for(filt=0; filt<NFILTERS; filt++) { valmax[filt] = 110.0; valmin[filt] = 70.0; } // make the plot sprintf(fileName, "SixVisits-%d", propID); plotSix(nFields, value, ravec, decvec, valmin, valmax, 1, "% of requested visits", plotTitle, fileName, 0); for(i=0; i<NFILTERS; i++) free(value[i]); free(value); free(decvec); free(ravec); for(i=0; i<NFILTERS; i++) free(count[i]); free(count); free(len); free(start);*/ }
/* ALL VISITS */ void visNumSixAll() { int i, j, filt, nf, nmax, nmin, sum, flag; int nFields, *start, *len, **count; int maxnum[NFILTERS]; int hist[NFILTERS][NHIST], histmax[NFILTERS], histmin[NFILTERS], chist[NHIST], desired[NFILTERS], obsfilt[NFILTERS]; double xmin, xmax, ymin, ymax; double *ravec, *decvec, **value, valmin[NFILTERS], valmax[NFILTERS], frac, least; FILE *out; char labstr[1024]; start = malloc((numFields+1)*sizeof(int)); len = malloc(numFields*sizeof(int)); ravec = malloc(numFields*sizeof(double)); decvec =malloc(numFields*sizeof(double)); count = malloc(NFILTERS*sizeof(int *)); for(i=0; i<NFILTERS; i++) count[i] = malloc(numFields*sizeof(int)); value = malloc(NFILTERS*sizeof(double *)); for(i=0; i<NFILTERS; i++) value[i] = malloc(numFields*sizeof(double)); // order visits by field and get pointers getFieldData(&nFields, start, len); // count the number of visits per field, per fiter for(nf=0; nf<nFields; nf++) { for(filt=0; filt<NFILTERS; filt++) count[filt][nf]=0; for(filt=0; filt<NFILTERS; filt++) for(i=start[nf]; i<start[nf]+len[nf]; i++) if(obs[i].filter==filt) { count[filt][nf]++; } } for(filt=0; filt<NFILTERS; filt++) { obsfilt[filt] = 0; for(nf=0; nf<nFields; nf++) obsfilt[filt] += count[filt][nf]; } // now fill the value array for plotting for(nf=0; nf<nFields; nf++) { ravec[nf] = obs[start[nf]].ra; decvec[nf] = obs[start[nf]].dec; for(filt=0; filt<NFILTERS; filt++) { value[filt][nf] = (double) count[filt][nf]; } } // Reference maximum from SRD for 10-year survey if ( useDesignStretch == 0 ) { valmax[0] = 56; valmax[1] = 80; valmax[2] = 184; valmax[3] = 184; valmax[4] = 160; valmax[5] = 160; } else { valmax[0] = 70; valmax[1] = 100; valmax[2] = 230; valmax[3] = 230; valmax[4] = 200; valmax[5] = 200; } // adjust the desired values for survey duration for(filt=0; filt<NFILTERS; filt++) valmax[filt] *= (endMJD-startMJD)/365.25/10.0; for(filt=0; filt<NFILTERS; filt++) desired[filt] = valmax[filt]; // 120% SRD for 10-year survey for(filt=0; filt<NFILTERS; filt++) { valmax[filt] = 1.2 * desired[filt]; // 120% of SRD numbers valmin[filt] = 0.0; } // make the plot for the raw numbers plotSix(nFields, value, ravec, decvec, valmin, valmax, 1, "acquired number of visits", plotTitle, "SixVisitsAll-Num", 0); for(nf=0; nf<nFields; nf++) { ravec[nf] = obs[start[nf]].ra; decvec[nf] = obs[start[nf]].dec; for(filt=0; filt<NFILTERS; filt++) { value[filt][nf] *= (100.0/desired[filt]); } } // This is for plotting now, we are standardizing for 50 - 120 for(filt=0; filt<NFILTERS; filt++) { valmin[filt] = 50.0; valmax[filt] = 120.0; } plotSix(nFields, value, ravec, decvec, valmin, valmax, 1, "% of WFD visits", plotTitle, "SixVisits-All", 0); // now make histograms of completion for(filt=0; filt<NFILTERS; filt++) for(j=0; j<NHIST; j++) hist[filt][j] = 0; for(filt=0; filt<NFILTERS; filt++) { histmax[filt] = 0; histmin[filt] = 0; for(nf=0; nf<nFields; nf++) { if ((double)count[filt][nf]/(double)desired[filt] > 1.0 ) { histmax[filt]++; } else if ( count[filt][nf] == 0 ) { histmin[filt]++; } else { i = (int)((double)count[filt][nf]*10.0/(double)desired[filt]); hist[filt][i]++; } } } // intersection histogram for(j=0; j<NHIST; j++) { frac = (double)(j+1)/(double)NHIST; chist[j]=0; for(nf=0; nf<nFields; nf++) { flag = 1; for(filt=0; filt<NFILTERS; filt++) { if(count[filt][nf]<frac*desired[filt]) { flag = 0; break; } } if(flag==1) chist[j]++; } } /** * Making tex file for Six Visit */ FILE* tfp; char fName[80]; char s[100]; sprintf(fName, "../output/%s_%d_SixVisits-All.tex", hostname, sessionID); tfp = fopen(fName, "w"); fprintf(tfp, "\\begin{table}[H]{\\textbf{Frequency Distribution of Fields with Completeness}} \\\\ [1.0ex]\n"); fprintf(tfp, "\\begin{tabular*}{\\textwidth}{\\tblspace rrrrrrr}\n"); fprintf(tfp, "\\hline\n"); fprintf(tfp, "\\colhead{Percent Complete} &\n"); fprintf(tfp, "\\colhead{u} &\n"); fprintf(tfp, "\\colhead{g} &\n"); fprintf(tfp, "\\colhead{r} &\n"); fprintf(tfp, "\\colhead{i} &\n"); fprintf(tfp, "\\colhead{z} &\n"); fprintf(tfp, "\\colhead{y} \\\\ \n"); fprintf(tfp, "\\hline\n"); fprintf(tfp, "\\hline\n"); fprintf(tfp, "$N \\ge 100$ & "); for (filt=0; filt<NFILTERS; filt++) { if ( filt == NFILTERS - 1) { fprintf(tfp, "%4d \\\\", histmax[filt]); } else { fprintf(tfp, "%4d &", histmax[filt]); } } fprintf(tfp, "\n"); fprintf(quickfp, "All Fields &"); for(j=NHIST-1; j>=0; j--) { if ( j==0 ) { sprintf(s, "~~$%d \\le N < %d$ & ", j * NHIST, (j+1) * NHIST); } else { sprintf(s, "$%d \\le N < %d$ & ", j * NHIST, (j+1) * NHIST); } fprintf(tfp, "%s", s); for (filt=0; filt<NFILTERS; filt++) { if ( j == 9 ) { if ( filt == NFILTERS - 1) { fprintf(quickfp, "%4d \\\\", hist[filt][j] + histmax[filt]); } else { fprintf(quickfp, "%4d &", hist[filt][j] + histmax[filt]); } } if ( filt == NFILTERS - 1) { fprintf(tfp, "%4d \\\\", hist[filt][j]); } else { fprintf(tfp, "%4d &", hist[filt][j]); } } fprintf(tfp, "\n"); } /*fprintf(tfp, "~~~~~~~~~$N = 0$ & "); for (filt=0; filt<NFILTERS; filt++) { if ( filt == NFILTERS - 1) { fprintf(tfp, "%4d \\\\", histmin[filt]); } else { fprintf(tfp, "%4d &", histmin[filt]); } } fprintf(tfp, "\n");*/ fprintf(tfp, "\\hline\n"); fprintf(tfp, "\\hline\n"); fprintf(tfp, "\\end{tabular*}\n"); fprintf(tfp, "\\caption{The distribution of the number of fields with a given completeness for each filter. A fields completeness is given by the ratio of the number of visits to that field compared to the Scaled Design SRD number of visits. The N equals zero bin does not accurately represent the number of fields with no observations.}\n"); fprintf(tfp, "\\label{tab:FreqNumTable}\n"); fprintf(tfp, "\\end{table}\n"); fflush(tfp); fclose(tfp); /** * End of Making tex file for Six Visit */ printf("\nField Completeness:\n"); printf(" %% "); for(j=NHIST-1; j>=0; j--) printf("%4d ",(int)(100*(double)(j+1)/(double)NHIST)); printf("\n"); for(filt=0; filt<NFILTERS; filt++) { printf("%s ",filtername[filt]); for(j=NHIST-1; j>=0; j--) printf("%4d ",hist[filt][j]); printf("\n"); } printf("\nField Completeness (cumulative):\n"); printf(" %% "); for(j=NHIST-1; j>=0; j--) printf("%4d ",(int)(100*(double)(j+1)/(double)NHIST)); printf("\n"); for(filt=0; filt<NFILTERS; filt++) { printf("%s ",filtername[filt]); sum = 0; for(j=NHIST-1; j>=0; j--) { sum += hist[filt][j]; printf("%4d ",sum); hist[filt][j] = sum; } printf("\n"); } printf("all "); for(j=NHIST-1; j>=0; j--) printf("%4d ",chist[j]); printf("\n"); // plot of field completeness for(nf=0; nf<nFields; nf++) { least = 1.0; for(filt=0; filt<NFILTERS; filt++) { least = MIN(least,(double)count[filt][nf]/(double)desired[filt]); } value[0][nf] = least*100.0; } plotOne(nFields, value[0], ravec, decvec, 0.0, 100.0, "completed % in least-observed band", plotTitle, "completeness-all"); for(i=0; i<NFILTERS; i++) free(value[i]); free(value); free(decvec); free(ravec); for(i=0; i<NFILTERS; i++) free(count[i]); free(count); free(len); free(start); }
void SoSwitch::write(SoWriteAction *action) // //////////////////////////////////////////////////////////////////////// { SoOutput *out = action->getOutput(); // When writing out a switch that is in a path, we always want to // write out ALL children of the switch. If we did the default // thing of writing out just those children that affect the nodes // in the path, we could screw up. Consider a switch that has two // child separators and whichChild set to 1. If a path goes // through the switch to the second child, the first child, being // a separator, does not affect the path. But if we don't write // out the separator, the whichChild will reference a // nonexistent child. So we always write out all children. // NOTE: SoChildList::traverse() checks the current path code and // skips children off the path that do not affect the // state. Because of this we have to avoid calling it. Instead, we // do its work here. // This code is stolen and modified from SoGroup::write() and // SoChildList::traverse() int lastChild = getNumChildren() - 1; SoAction::PathCode pc = action->getCurPathCode(); // In write-reference counting phase if (out->getStage() == SoOutput::COUNT_REFS) { // Increment our write reference count addWriteReference(out); // If this is the first reference (i.e., we don't now have // multiple references), also count all appropriate children if (! hasMultipleWriteRefs()) { for (int i = 0; i <= lastChild; i++) { action->pushCurPath(i); action->traverse(getChild(i)); action->popCurPath(pc); } } } // In writing phase, we have to do some more work else if (! writeHeader(out, TRUE, FALSE)) { // Write fields const SoFieldData *fieldData = getFieldData(); fieldData->write(out, this); // We KNOW that all children should be written, so don't // bother calling shouldWrite() // If writing binary format, write out number of children // that are going to be written if (out->isBinary()) out->write(getNumChildren()); for (int i = 0; i <= lastChild; i++) { action->pushCurPath(i); action->traverse(getChild(i)); action->popCurPath(pc); } // Write post-children stuff writeFooter(out); } }
int mainClass::parseDataToXLSX() { QDir currDir(tempDir); lxw_workbook *workbook = workbook_new(outputFile.toUtf8().constData()); //Parse all tables to the Excel file for (int pos = 0; pos <= tables.count()-1; pos++) { if (tables[pos].islookup == false) { QString sourceFile; sourceFile = currDir.absolutePath() + currDir.separator() + tables[pos].name + ".xml"; pt::ptree tree; pt::read_xml(sourceFile.toUtf8().constData(), tree); BOOST_FOREACH(boost::property_tree::ptree::value_type const&db, tree.get_child("mysqldump") ) { const boost::property_tree::ptree & aDatabase = db.second; // value (or a subnode) BOOST_FOREACH(boost::property_tree::ptree::value_type const&ctable, aDatabase.get_child("") ) { const std::string & key = ctable.first.data(); if (key == "table_data") { const boost::property_tree::ptree & aTable = ctable.second; //Here we need to create the sheet QString tableDesc; tableDesc = tables[pos].desc; if (tableDesc == "") tableDesc = tables[pos].name; lxw_worksheet *worksheet = workbook_add_worksheet(workbook,getSheetDescription(tableDesc)); int rowNo = 1; bool inserted = false; BOOST_FOREACH(boost::property_tree::ptree::value_type const&row, aTable.get_child("") ) { const boost::property_tree::ptree & aRow = row.second; //Here we need to append a row int colNo = 0; BOOST_FOREACH(boost::property_tree::ptree::value_type const&field, aRow.get_child("") ) { const std::string & fkey = field.first.data(); if (fkey == "field") { const boost::property_tree::ptree & aField = field.second; std::string fname = aField.get<std::string>("<xmlattr>.name"); std::string fvalue = aField.data(); QString desc; QString valueType; int size; int decSize; QString fieldName = QString::fromStdString(fname); QString fieldValue = QString::fromStdString(fvalue); getFieldData(tables[pos].name,fieldName,desc,valueType,size,decSize); if (desc != "NONE") { inserted = true; if (rowNo == 1) worksheet_write_string(worksheet,0, colNo, fieldName.toUtf8().constData(), NULL); worksheet_write_string(worksheet,rowNo, colNo, fieldValue.toUtf8().constData(), NULL); colNo++; } } } if (inserted) rowNo++; } } } } } }
void SoUnknownNode::copyContents(const SoFieldContainer *fromFC, SbBool copyConnections) // //////////////////////////////////////////////////////////////////////// { // Make sure the copy has the correct class name const SoUnknownNode *fromUnk = (const SoUnknownNode *) fromFC; setClassName(fromUnk->className); // For each field in the original node, create a new field and add // it to the new node // NOTE: We can't use SoNode::copyContents() to copy the field // data, since that uses SoFieldData::overlay(), which assumes the // fields have the same offsets in both nodes. Instead, we just // copy the field values ourselves. const SoFieldData *fromData = fromUnk->getFieldData(); SoFieldData *toData = (SoFieldData *) getFieldData(); int i; for (i = 0; i < fromData->getNumFields(); i++) { SoField *fromField = fromData->getField(fromUnk, i); const SbName fieldName = fromData->getFieldName(i); SoType fieldType = fromField->getTypeId(); SoField *toField = (SoField *) (fieldType.createInstance()); toField->enableNotify(FALSE); toField->setContainer(this); toField->setDefault(TRUE); toField->enableNotify(TRUE); toData->addField(this, fieldName.getString(), toField); toField->setContainer(this); toField->copyFrom(*fromField); toField->setIgnored(fromField->isIgnored()); toField->setDefault(fromField->isDefault()); toField->fixCopy(copyConnections); if (fromField->isConnected() && copyConnections) toField->copyConnection(fromField); } // Copy the kids for (i = 0; i < fromUnk->hiddenChildren.getLength(); i++) { // If this node is being copied, it must be "inside" (see // SoNode::copy() for details.) Therefore, all of its children // must be inside, as well, since our addToCopyDict() takes // care of that. SoNode *fromKid = fromUnk->getChild(i); SoNode *kidCopy = (SoNode *) findCopy(fromKid, copyConnections); #ifdef DEBUG if (kidCopy == NULL) SoDebugError::post("(internal) SoUnknownNode::copyContents", "Child %d has not been copied yet", i); #endif /* DEBUG */ hiddenChildren.append(kidCopy); } // No need to copy the override flag, since this flag will have no // effect on an unknown node, and it is not read from or written // to files. }