void SimpleCrossSection :: giveTemperatureVector(FloatArray &answer, GaussPoint *gp, TimeStep *tStep) { Element *elem = gp->giveElement(); answer.clear(); //sum up all prescribed temperatures over an element StructuralElement *selem = dynamic_cast< StructuralElement * >(elem); selem->computeResultingIPTemperatureAt(answer, tStep, gp, VM_Total); /* add external source, if provided */ FieldManager *fm = this->domain->giveEngngModel()->giveContext()->giveFieldManager(); FieldPtr tf; if ( ( tf = fm->giveField(FT_Temperature) ) ) { // temperature field registered FloatArray gcoords, et2; int err; elem->computeGlobalCoordinates( gcoords, gp->giveNaturalCoordinates() ); if ( ( err = tf->evaluateAt(et2, gcoords, VM_Total, tStep) ) ) { OOFEM_ERROR("tf->evaluateAt failed, element %d, error code %d", elem->giveNumber(), err); } if ( et2.isNotEmpty() ) { if ( answer.isEmpty() ) { answer = et2; } else { answer.at(1) += et2.at(1); } } } }
DocumentPtr makeDocument(const String& docText) { DocumentPtr doc = newLucene<Document>(); FieldPtr f = newLucene<Field>(L"f", docText, Field::STORE_NO, Field::INDEX_ANALYZED); f->setOmitNorms(true); doc->add(f); return doc; }
DocumentPtr newDoc() { DocumentPtr d = newLucene<Document>(); double boost = nextNorm(); for (int32_t i = 0; i < 10; ++i) { FieldPtr f = newLucene<Field>(L"f" + StringUtils::toString(i), L"v" + StringUtils::toString(i), Field::STORE_NO, Field::INDEX_NOT_ANALYZED); f->setBoost(boost); d->add(f); } return d; }
void print_introspection(MessagePtr message, const std::string& prefix) { if (!message->hasInstance()) { std::cout << "No instance!" << std::endl; } for(Message::const_iterator it = message->begin(); it != message->end(); ++it) { FieldPtr field = *it; std::cout << prefix << std::string(field->getDataType()) << " " << std::string(field->getName()) << " = "; if (field->isContainer()) std::cout << "["; if (field->isMessage()) { std::cout << std::endl; for(std::size_t i = 0; i < field->size(); i++) { MessagePtr expanded = field->expand(i); if (!expanded) { std::cout << prefix << " (unknown)" << std::endl; continue; } print_introspection(expanded, prefix + " "); } std::cout << prefix; } else { for(std::size_t i = 0; i < field->size(); i++) { if (i > 0) std::cout << ", "; std::cout << field->as<std::string>(i); } } if (field->isContainer()) std::cout << "]"; std::cout << std::endl; } }
BOOL CADORecordset::GetFieldInfo(FieldPtr pField, CADOFieldInfo* fldInfo) { memset(fldInfo, 0, sizeof(CADOFieldInfo)); strcpy(fldInfo->m_strName, (LPCTSTR)pField->GetName()); fldInfo->m_lDefinedSize = pField->GetDefinedSize(); fldInfo->m_nType = pField->GetType(); fldInfo->m_lAttributes = pField->GetAttributes(); if(!IsEof()) fldInfo->m_lSize = pField->GetActualSize(); return TRUE; }
void TestTable::withoutRowid() { Table tt("testtable"); FieldPtr f = FieldPtr(new Field("a", "integer")); f->setAutoIncrement(true); tt.addField(f); tt.addField(FieldPtr(new Field("b", "integer"))); tt.setRowidColumn("a"); tt.addConstraint({f}, ConstraintPtr(new PrimaryKeyConstraint())); QCOMPARE(tt.sql(), QString("CREATE TABLE `testtable` (\n" "\t`a`\tinteger PRIMARY KEY AUTOINCREMENT,\n" "\t`b`\tinteger\n" ") WITHOUT ROWID;")); }
BOOL CADORecordset::GetChunk(FieldPtr pField, CString& strValue) { CString str; long lngSize, lngOffSet = 0; _variant_t varChunk; lngSize = pField->ActualSize; //Áг¤¶È str.Empty(); while(lngOffSet < lngSize) { try { varChunk = pField->GetChunk(ChunkSize); str += varChunk.bstrVal; lngOffSet += ChunkSize; } catch(_com_error &e) { dump_com_error(e); return FALSE; } } lngOffSet = 0; strValue = str; return TRUE; }
void TestTable::notnull() { Table tt("testtable"); FieldPtr f = FieldPtr(new Field("id", "integer")); f->setAutoIncrement(true); FieldPtr fkm = FieldPtr(new Field("km", "integer")); tt.addField(f); tt.addField(FieldPtr(new Field("car", "text", true))); tt.addField(fkm); tt.addConstraint({f}, ConstraintPtr(new PrimaryKeyConstraint())); QCOMPARE(tt.sql(), QString("CREATE TABLE `testtable` (\n" "\t`id`\tinteger PRIMARY KEY AUTOINCREMENT,\n" "\t`car`\ttext NOT NULL,\n" "\t`km`\tinteger\n" ");")); }
void checkNorms(IndexReaderPtr reader) { // test omit norms for (int32_t i = 0; i < DocHelper::fields.size(); ++i) { FieldPtr f = DocHelper::fields[i]; if (f->isIndexed()) { BOOST_CHECK_EQUAL(reader->hasNorms(f->name()), !f->getOmitNorms()); BOOST_CHECK_EQUAL(reader->hasNorms(f->name()), !DocHelper::noNorms.contains(f->name())); if (!reader->hasNorms(f->name())) { // test for fake norms of 1.0 or null depending on the flag ByteArray norms = reader->norms(f->name()); uint8_t norm1 = DefaultSimilarity::encodeNorm(1.0); BOOST_CHECK(!norms); norms.resize(reader->maxDoc()); reader->norms(f->name(), norms, 0); for (int32_t j = 0; j < reader->maxDoc(); ++j) BOOST_CHECK_EQUAL(norms[j], norm1); } } } }
__int64 ADOConnection::_GetIdentityFromRS(_RecordsetPtr pRS) const { try { if (pRS->GetRecordCount() == 0) return 0; _variant_t vaField; FieldsPtr pFields; FieldPtr pField; HRESULT hr; hr = pRS->get_Fields( &pFields ); vaField = "IDENT"; pFields->get_Item( vaField, &pField ); vaField.Clear(); if (pField) { pField->get_Value(&vaField); switch (vaField.vt) { case VT_NULL: return 0; case VT_I4: return vaField.intVal; default: if (vaField.decVal.sign == DECIMAL_NEG) return -vaField.cyVal.int64; else return vaField.cyVal.int64; } } return 0; } catch (...) { ErrorManager::Instance()->ReportError(ErrorManager::High, 5030, "ADOConnection::_GetIdentityFromRS", "Error while determening @@IDENTITY"); } return 0; }
bool BibtexCollection::modifyField(Tellico::Data::FieldPtr newField_) { if(!newField_) { return false; } // myDebug(); bool success = Collection::modifyField(newField_); FieldPtr oldField = fieldByName(newField_->name()); QString oldBibtex = oldField->property(QLatin1String("bibtex")); QString newBibtex = newField_->property(QLatin1String("bibtex")); if(!oldBibtex.isEmpty()) { success &= (m_bibtexFieldDict.remove(oldBibtex) != 0); } if(!newBibtex.isEmpty()) { oldField->setProperty(QLatin1String("bibtex"), newBibtex); m_bibtexFieldDict.insert(newBibtex, oldField.data()); } return success; }
void TestTable::sqlOutput() { Table tt("testtable"); FieldPtr f = FieldPtr(new Field("id", "integer")); f->setPrimaryKey(true); FieldPtr fkm = FieldPtr(new Field("km", "integer", false, "", "km > 1000")); fkm->setPrimaryKey(true); tt.addField(f); tt.addField(FieldPtr(new Field("car", "text"))); tt.addField(fkm); QCOMPARE(tt.sql(), QString("CREATE TABLE `testtable` (\n" "\t`id`\tinteger,\n" "\t`car`\ttext,\n" "\t`km`\tinteger CHECK(km > 1000),\n" "\tPRIMARY KEY(id,km)\n" ");")); }
void PlayerBehavior::pickupCrystal( const CONTROLL& controll ) { //コントロールプレイヤーじゃなかったら取れない if ( !_controll ) { return; } AppPtr app = App::getTask(); CrystalsPtr crystals = app->getCrystals(); if ( !crystals ) { return; } if ( controll.action == CONTROLL::ATTACK ) { int crystal_num = 0; FieldPtr field = app->getField( ); Vector pos = _parent->getPos( ); for ( int i = -CRYSTAL_LENGTH; i < CRYSTAL_LENGTH; i++ ) { for ( int j = -CRYSTAL_LENGTH; j < CRYSTAL_LENGTH; j++ ) { ObjectPtr object = field->getTarget( ( int )pos.x + i, ( int )pos.y + j ); CrystalPtr crystal = std::dynamic_pointer_cast< Crystal >( object ); if ( !crystal ) { continue; } //大きいクリスタルをとったら if ( crystals->getCrystalNum( ) >= Crystals::MAX_CRYSTAL_NUM ) { CameraPtr camera = Camera::getTask(); PlayerCameraPtr p_camera = std::dynamic_pointer_cast< PlayerCamera >( camera ); Vector boss_map_pos = Vector( Ground::BOSS_X * Ground::CHIP_WIDTH + 2, Ground::BOSS_Y * Ground::CHIP_HEIGHT + 2, 0 ); p_camera->setPos( Vector( boss_map_pos.x, boss_map_pos.y - 250, 20 ) ); boss_map_pos -= _keep_pos; _parent->move( boss_map_pos ); } crystal->pickup( ); Effect effect; int id = effect.setEffect( Effect::EFFECT_PICKUP_CRYSTAL ); effect.drawEffect( id, Vector( 0.5, 0.5, 0.5 ), crystal->getPos( ) - Vector( 0, 0, 0.3 ), Vector( 0, 0, 1 ) ); return; } } } }
//读Buffer bool CRsDupRgn::LoadBufferField(CBaseDBEntity *pGoods, const char* szFiledName, _RecordsetPtr &rs) { if(pGoods == NULL) return false; try { if(rs->GetadoEOF()) { ReleaseRs(rs); return false; } long lSize=0; FieldsPtr fldPtr = rs->GetFields(); FieldPtr itemPtr = fldPtr->GetItem(szFiledName); lSize = itemPtr->ActualSize; if(lSize > 0) { _variant_t varBLOB; varBLOB = itemPtr->GetChunk(lSize); BYTE *pBuf=NULL; SafeArrayAccessData(varBLOB.parray,(void**)&pBuf); SafeArrayUnaccessData(varBLOB.parray); // 清空SKILL CEntityProperty* ep = pGoods->GetDataEntityManager().GetEntityProperty(string(szFiledName)); if(ep) ep->SetBufAttr(0, pBuf, lSize); } } catch(_com_error e) { PrintErr(CStringReading::LoadString(IDS_DBS_RSDUPRGN,STR_DBS_DUPRGN_LOADRGNOBJERR), e); return false; } return true; }
BOOL CADORecordset::AppendChunk(FieldPtr pField, LPVOID lpData, UINT nBytes) { HRESULT hr; _variant_t varChunk; long lngOffset = 0; UCHAR chData; SAFEARRAY FAR *psa = NULL; SAFEARRAYBOUND rgsabound[1]; try { //Create a safe array to store the array of BYTES rgsabound[0].lLbound = 0; rgsabound[0].cElements = nBytes; psa = SafeArrayCreate(VT_UI1,1,rgsabound); while(lngOffset < (long)nBytes) { chData = ((UCHAR*)lpData)[lngOffset]; hr = SafeArrayPutElement(psa, &lngOffset, &chData); if(FAILED(hr)) return FALSE; lngOffset++; } lngOffset = 0; //Assign the Safe array to a variant. varChunk.vt = VT_ARRAY|VT_UI1; varChunk.parray = psa; hr = pField->AppendChunk(varChunk); if(SUCCEEDED(hr)) return TRUE; } catch(_com_error &e) { dump_com_error(e); return FALSE; } return FALSE; }
int CDialerChannel::PlayBlob(int nChan, FieldPtr xPlyBlob, BOOL bTermDtmf) { SsmSetDtmfStopPlay( nChan, bTermDtmf ); SsmClearRxDtmfBuf( nChan ); LONG lDataSize = xPlyBlob->ActualSize; if ( lDataSize <= 0 ) return SsmPutUserEvent( E_PROC_PlayEnd, nChan, 0 ); _variant_t TheValue = xPlyBlob->GetChunk(lDataSize); LPTSTR *pBuf = NULL; SafeArrayAccessData(TheValue.parray, (void **)&pBuf); this->SetBuffer( pBuf, lDataSize ); SafeArrayUnaccessData( TheValue.parray ); if ( SsmPlayMem( nChan, 6, (LPBYTE)BufferPtr, BufferSize, 0, BufferSize - 1 ) ) return SsmPutUserEvent( E_PROC_PlayEnd, nChan, 0 ); return 0; }
BOOL CADORecordset::GetChunk(FieldPtr pField, LPVOID lpData) { long lngSize, lngOffSet = 0; _variant_t varChunk; UCHAR chData; HRESULT hr; long lBytesCopied = 0; lngSize = pField->ActualSize; while(lngOffSet < lngSize) { try { varChunk = pField->GetChunk(ChunkSize); //Copy the data only upto the Actual Size of Field. for(long lIndex = 0; lIndex <= (ChunkSize - 1); lIndex++) { hr= SafeArrayGetElement(varChunk.parray, &lIndex, &chData); if(SUCCEEDED(hr)) { //Take BYTE by BYTE and advance Memory Location //hr = SafeArrayPutElement((SAFEARRAY FAR*)lpData, &lBytesCopied ,&chData); ((UCHAR*)lpData)[lBytesCopied] = chData; lBytesCopied++; } else break; } lngOffSet += ChunkSize; } catch(_com_error &e) { dump_com_error(e); return FALSE; } } lngOffSet = 0; return TRUE; }
BOOL CFoxBase::CopyData(_RecordsetPtr &IRecordS, _RecordsetPtr &IRecordD) { _variant_t TempValue; short Item; FieldsPtr IFields; FieldPtr IField; if(IRecordS==NULL || IRecordD==NULL) { ExceptionInfo(_T("Source Recordset or destination Recordset cann't be empty")); return FALSE; } if(IRecordS->adoEOF && IRecordS->BOF) { return TRUE; } if(!IRecordD->adoEOF || !IRecordD->BOF) { try { IRecordD->MoveLast(); } catch(_com_error &e) { ExceptionInfo(e); return FALSE; } } try { IRecordS->MoveFirst(); } catch(_com_error &e) { ExceptionInfo(e); return FALSE; } try { while(!IRecordS->adoEOF) { IRecordS->get_Fields(&IFields); IRecordD->AddNew(); for(Item=0;Item<IFields->GetCount();Item++) { IFields->get_Item(_variant_t(Item),&IField); TempValue=IRecordS->GetCollect(_variant_t(IField->GetName())); IRecordD->PutCollect(_variant_t(IField->GetName()),TempValue); IField.Release(); } IRecordD->Update(); IFields.Release(); IRecordS->MoveNext(); } } catch(_com_error &e) { ExceptionInfo(e); return FALSE; } return TRUE; }
CString CFoxBase::CreateTableSQL(_RecordsetPtr &IRecord,LPCTSTR pTableName) { short Item; CString CreateSQL; CString DefList; FieldsPtr IFields; FieldPtr IField; CreateSQL=_T(""); if(IRecord==NULL) { ExceptionInfo(_T("Recordset interface cann't be NULL")); return CreateSQL; } if(pTableName==NULL) { ExceptionInfo(_T("Table name cann't be NULL")); return CreateSQL; } IRecord->get_Fields(&IFields); DefList=_T(""); for(Item=0;Item<IFields->GetCount();Item++) { IFields->get_Item(_variant_t(Item),&IField); DefList+=IField->GetName(); DefList+=_T(" "); switch(IField->GetType()) { case adVarWChar: { CString Temp; Temp.Format(_T("varchar(%d)"),IField->GetDefinedSize()); DefList+=Temp; break; } case adLongVarWChar: DefList+=_T("text"); break; case adVarBinary: { CString Temp; Temp.Format(_T("varbinary(%d)"),IField->GetDefinedSize()); DefList+=Temp; break; } // case adNumeric: // case adGUID: // case adLongVarBinary: // { // ExceptionInfo(_T("不支持LongVarBinary")); // return CreateSQL; // } // break; case adInteger: DefList+=_T("int"); break; case adUnsignedTinyInt: case adSmallInt: DefList+=_T("smallint"); break; case adSingle: case adDouble: DefList+=_T("float"); break; case adDBTimeStamp: case adDate: DefList+=_T("date"); break; // case adBoolean: // break; default: { ExceptionInfo(_T("不支持此类型")); return CreateSQL; } } if(Item < IFields->GetCount()-1) { DefList+=_T(","); } IField.Release(); } CreateSQL.Format(_T("CREATE TABLE %s(%s)"),pTableName,DefList); return CreateSQL; }
Tellico::Data::FieldList BibtexCollection::defaultFields() { FieldList list; FieldPtr field; /******************* General ****************************/ field = createDefaultField(TitleField); field->setProperty(QLatin1String("bibtex"), QLatin1String("title")); list.append(field); QStringList types; types << QLatin1String("article") << QLatin1String("book") << QLatin1String("booklet") << QLatin1String("inbook") << QLatin1String("incollection") << QLatin1String("inproceedings") << QLatin1String("manual") << QLatin1String("mastersthesis") << QLatin1String("misc") << QLatin1String("phdthesis") << QLatin1String("proceedings") << QLatin1String("techreport") << QLatin1String("unpublished") << QLatin1String("periodical") << QLatin1String("conference"); field = new Field(QLatin1String("entry-type"), i18n("Entry Type"), types); field->setProperty(QLatin1String("bibtex"), QLatin1String("entry-type")); field->setCategory(i18n(bibtex_general)); field->setFlags(Field::AllowGrouped | Field::NoDelete); field->setDescription(i18n("These entry types are specific to bibtex. See the bibtex documentation.")); list.append(field); field = new Field(QLatin1String("author"), i18n("Author")); field->setProperty(QLatin1String("bibtex"), QLatin1String("author")); field->setCategory(i18n(bibtex_general)); field->setFlags(Field::AllowCompletion | Field::AllowMultiple | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatName); list.append(field); field = new Field(QLatin1String("bibtex-key"), i18n("Bibtex Key")); field->setProperty(QLatin1String("bibtex"), QLatin1String("key")); field->setCategory(i18n("General")); field->setFlags(Field::NoDelete); list.append(field); field = new Field(QLatin1String("booktitle"), i18n("Book Title")); field->setProperty(QLatin1String("bibtex"), QLatin1String("booktitle")); field->setCategory(i18n(bibtex_general)); field->setFormatType(FieldFormat::FormatTitle); list.append(field); field = new Field(QLatin1String("editor"), i18n("Editor")); field->setProperty(QLatin1String("bibtex"), QLatin1String("editor")); field->setCategory(i18n(bibtex_general)); field->setFlags(Field::AllowCompletion | Field::AllowMultiple | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatName); list.append(field); field = new Field(QLatin1String("organization"), i18n("Organization")); field->setProperty(QLatin1String("bibtex"), QLatin1String("organization")); field->setCategory(i18n(bibtex_general)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); // field = new Field(QLatin1String("institution"), i18n("Institution")); // field->setProperty(QLatin1String("bibtex"), QLatin1String("institution")); // field->setCategory(i18n(bibtex_general)); // field->setFlags(Field::AllowDelete); // field->setFormatType(FieldFormat::FormatTitle); // list.append(field); /******************* Publishing ****************************/ field = new Field(QLatin1String("publisher"), i18n("Publisher")); field->setProperty(QLatin1String("bibtex"), QLatin1String("publisher")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("address"), i18n("Address")); field->setProperty(QLatin1String("bibtex"), QLatin1String("address")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("edition"), i18n("Edition")); field->setProperty(QLatin1String("bibtex"), QLatin1String("edition")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowCompletion); list.append(field); // don't make it a number, it could have latex processing commands in it field = new Field(QLatin1String("pages"), i18n("Pages")); field->setProperty(QLatin1String("bibtex"), QLatin1String("pages")); field->setCategory(i18n(bibtex_publishing)); list.append(field); field = new Field(QLatin1String("year"), i18n("Year"), Field::Number); field->setProperty(QLatin1String("bibtex"), QLatin1String("year")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("isbn"), i18n("ISBN#")); field->setProperty(QLatin1String("bibtex"), QLatin1String("isbn")); field->setCategory(i18n(bibtex_publishing)); field->setDescription(i18n("International Standard Book Number")); list.append(field); field = new Field(QLatin1String("journal"), i18n("Journal")); field->setProperty(QLatin1String("bibtex"), QLatin1String("journal")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("doi"), i18n("DOI")); field->setProperty(QLatin1String("bibtex"), QLatin1String("doi")); field->setCategory(i18n(bibtex_publishing)); field->setDescription(i18n("Digital Object Identifier")); list.append(field); // could make this a string list, but since bibtex import could have funky values // keep it an editbox field = new Field(QLatin1String("month"), i18n("Month")); field->setProperty(QLatin1String("bibtex"), QLatin1String("month")); field->setCategory(i18n(bibtex_publishing)); field->setFlags(Field::AllowCompletion); list.append(field); field = new Field(QLatin1String("number"), i18n("Number"), Field::Number); field->setProperty(QLatin1String("bibtex"), QLatin1String("number")); field->setCategory(i18n(bibtex_publishing)); list.append(field); field = new Field(QLatin1String("howpublished"), i18n("How Published")); field->setProperty(QLatin1String("bibtex"), QLatin1String("howpublished")); field->setCategory(i18n(bibtex_publishing)); list.append(field); // field = new Field(QLatin1String("school"), i18n("School")); // field->setProperty(QLatin1String("bibtex"), QLatin1String("school")); // field->setCategory(i18n(bibtex_publishing)); // field->setFlags(Field::AllowCompletion | Field::AllowGrouped); // list.append(field); /******************* Classification ****************************/ field = new Field(QLatin1String("chapter"), i18n("Chapter"), Field::Number); field->setProperty(QLatin1String("bibtex"), QLatin1String("chapter")); field->setCategory(i18n(bibtex_misc)); list.append(field); field = new Field(QLatin1String("series"), i18n("Series")); field->setProperty(QLatin1String("bibtex"), QLatin1String("series")); field->setCategory(i18n(bibtex_misc)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatTitle); list.append(field); field = new Field(QLatin1String("volume"), i18nc("A number field in a bibliography", "Volume"), Field::Number); field->setProperty(QLatin1String("bibtex"), QLatin1String("volume")); field->setCategory(i18n(bibtex_misc)); list.append(field); field = new Field(QLatin1String("crossref"), i18n("Cross-Reference")); field->setProperty(QLatin1String("bibtex"), QLatin1String("crossref")); field->setCategory(i18n(bibtex_misc)); list.append(field); // field = new Field(QLatin1String("annote"), i18n("Annotation")); // field->setProperty(QLatin1String("bibtex"), QLatin1String("annote")); // field->setCategory(i18n(bibtex_misc)); // list.append(field); field = new Field(QLatin1String("keyword"), i18n("Keywords")); field->setProperty(QLatin1String("bibtex"), QLatin1String("keywords")); field->setCategory(i18n(bibtex_misc)); field->setFlags(Field::AllowCompletion | Field::AllowMultiple | Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("url"), i18n("URL"), Field::URL); field->setProperty(QLatin1String("bibtex"), QLatin1String("url")); field->setCategory(i18n(bibtex_misc)); list.append(field); field = new Field(QLatin1String("abstract"), i18n("Abstract"), Field::Para); field->setProperty(QLatin1String("bibtex"), QLatin1String("abstract")); list.append(field); field = new Field(QLatin1String("note"), i18n("Notes"), Field::Para); field->setProperty(QLatin1String("bibtex"), QLatin1String("note")); list.append(field); field = createDefaultField(IDField); field->setCategory(i18n(bibtex_misc)); list.append(field); field = createDefaultField(CreatedDateField); field->setCategory(i18n(bibtex_misc)); list.append(field); field = createDefaultField(ModifiedDateField); field->setCategory(i18n(bibtex_misc)); list.append(field); return list; }
FieldPtr after = from.getField(Variable::T, 0); ASSERT_EQ(10, after->getNumLat()); ASSERT_EQ(10, after->getNumLon()); ASSERT_EQ(1, after->getNumEns()); EXPECT_FLOAT_EQ(0.3, (*after)(5,2,0)); EXPECT_FLOAT_EQ(0.3, (*after)(5,9,0)); EXPECT_FLOAT_EQ(0.3, (*after)(0,9,0)); } TEST_F(TestCalibratorRegression, 10x10_1order) { FileArome from("testing/files/10x10.nc"); ParameterFileText par(Options("file=testing/files/regression1order.txt")); CalibratorRegression cal = CalibratorRegression(Variable::T, Options()); cal.calibrate(from, &par); FieldPtr after = from.getField(Variable::T, 0); ASSERT_EQ(10, after->getNumLat()); ASSERT_EQ(10, after->getNumLon()); ASSERT_EQ(1, after->getNumEns()); EXPECT_FLOAT_EQ(361.5, (*after)(5,2,0)); // 0.3 + 1.2*301 EXPECT_FLOAT_EQ(365.1, (*after)(5,9,0)); EXPECT_FLOAT_EQ(384.3, (*after)(0,9,0)); } TEST_F(TestCalibratorRegression, 10x10_2order) { FileArome from("testing/files/10x10.nc"); ParameterFileText par(Options("file=testing/files/regression2order.txt")); CalibratorRegression cal = CalibratorRegression(Variable::T, Options()); cal.calibrate(from, &par); FieldPtr after = from.getField(Variable::T, 0);
foreach(FieldPtr f, m_fields) { if(f->autoIncrement()) return true; }
// Create store in object field ValueBuilder BlockBuilder::createStore(ValueBuilder thisValue, FieldPtr field, ValueBuilder value) { llvm::Value* offset = offsetField(thisValue, field); new llvm::StoreInst(value.getValue(), offset, "", m_block); return ValueBuilder(*this, value.getValue(), field->getFieldType()); }
Tellico::Data::FieldList WineCollection::defaultFields() { FieldList list; FieldPtr field; field = createDefaultField(TitleField); field->setProperty(QLatin1String("template"), QLatin1String("%{vintage} %{producer:1} %{varietal:1}")); field->setFlags(Field::NoDelete | Field::Derived); field->setFormatType(FieldFormat::FormatNone); list.append(field); field = new Field(QLatin1String("producer"), i18nc("Wine Producer", "Producer")); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("appellation"), i18n("Appellation")); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("varietal"), i18n("Varietal")); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("vintage"), i18n("Vintage"), Field::Number); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowGrouped); list.append(field); QStringList type; type << i18n("Red Wine") << i18n("White Wine") << i18n("Sparkling Wine"); field = new Field(QLatin1String("type"), i18n("Type"), type); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("country"), i18n("Country")); field->setCategory(i18n(wine_general)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); field->setFormatType(FieldFormat::FormatPlain); list.append(field); field = new Field(QLatin1String("pur_date"), i18n("Purchase Date")); field->setCategory(i18n(wine_personal)); field->setFormatType(FieldFormat::FormatDate); list.append(field); field = new Field(QLatin1String("pur_price"), i18n("Purchase Price")); field->setCategory(i18n(wine_personal)); list.append(field); field = new Field(QLatin1String("location"), i18n("Location")); field->setCategory(i18n(wine_personal)); field->setFlags(Field::AllowCompletion | Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("quantity"), i18n("Quantity"), Field::Number); field->setCategory(i18n(wine_personal)); list.append(field); field = new Field(QLatin1String("drink-by"), i18n("Drink By"), Field::Number); field->setCategory(i18n(wine_personal)); field->setFlags(Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("rating"), i18n("Rating"), Field::Rating); field->setCategory(i18n(wine_personal)); field->setFlags(Field::AllowGrouped); list.append(field); field = new Field(QLatin1String("gift"), i18n("Gift"), Field::Bool); field->setCategory(i18n(wine_personal)); list.append(field); field = new Field(QLatin1String("label"), i18n("Label Image"), Field::Image); list.append(field); field = new Field(QLatin1String("description"), i18n("Description"), Field::Para); list.append(field); field = new Field(QLatin1String("comments"), i18n("Comments"), Field::Para); list.append(field); list.append(createDefaultField(IDField)); list.append(createDefaultField(CreatedDateField)); list.append(createDefaultField(ModifiedDateField)); return list; }
// Create load from object field ValueBuilder BlockBuilder::createLoad(ValueBuilder thisValue, FieldPtr field) { llvm::Value* offset = offsetField(thisValue, field); llvm::Value* loadInst = new llvm::LoadInst(offset, "", m_block); return ValueBuilder(*this, loadInst, field->getFieldType()); }
bool CalibratorKriging::calibrateCore(File& iFile, const ParameterFile* iParameterFile) const { int nLat = iFile.getNumLat(); int nLon = iFile.getNumLon(); int nEns = iFile.getNumEns(); int nTime = iFile.getNumTime(); vec2 lats = iFile.getLats(); vec2 lons = iFile.getLons(); vec2 elevs = iFile.getElevs(); // Check if this method can be applied bool hasValidGridpoint = false; for(int i = 0; i < nLat; i++) { for(int j = 0; j < nLon; j++) { if(Util::isValid(lats[i][j]) && Util::isValid(lons[i][j]) && Util::isValid(elevs[i][j])) { hasValidGridpoint = true; } } } if(!hasValidGridpoint) { Util::warning("There are no gridpoints with valid lat/lon/elev values. Skipping kriging..."); return false; } // Precompute weights from auxillary variable std::vector<std::vector<std::vector<std::vector<float> > > > auxWeights; if(mAuxVariable != Variable::None) { // Initialize auxWeights.resize(nLat); for(int i = 0; i < nLat; i++) { auxWeights[i].resize(nLon); for(int j = 0; j < nLon; j++) { auxWeights[i][j].resize(nEns); for(int e = 0; e < nEns; e++) { auxWeights[i][j][e].resize(nTime, 0); } } } // Load auxillarcy variable std::vector<FieldPtr> auxFields; auxFields.resize(nTime); for(int t = 0; t < nTime; t++) { auxFields[t] = iFile.getField(mAuxVariable, t); } // Compute auxillary weights for(int t = 0; t < nTime; t++) { #pragma omp parallel for for(int i = 0; i < nLat; i++) { for(int j = 0; j < nLon; j++) { for(int e = 0; e < nEns; e++) { float total = 0; int start = std::max(t-mWindow,0); int end = std::min(nTime-1,t+mWindow); int numValid = 0; for(int tt = start; tt <= end; tt++) { float aux = (*auxFields[tt])(i,j,e); if(Util::isValid(aux)) { if(aux >= mLowerThreshold && aux <= mUpperThreshold) { total++; } numValid++; } } int windowSize = end - start + 1; if(numValid == 0) auxWeights[i][j][e][t] = 1; else auxWeights[i][j][e][t] += total / numValid; } } } } } if(!iParameterFile->isLocationDependent()) { std::stringstream ss; ss << "Kriging requires a parameter file with spatial information"; Util::error(ss.str()); } std::vector<Location> obsLocations = iParameterFile->getLocations(); // General proceedure for a given gridpoint: // S = matrix * weights // weights = (matrix)^-1 * S // gridpoint_bias = weights' * bias (scalar) // where // matrix: The obs-to-obs covariance matrix (NxN) // S: The obs-to-current_grid_point covariance (Nx1) // bias: The bias at each obs location (Nx1) // // Note that when computing the weights, we can take a shortcut since most values in // S are zero. However, the weights will still have the length of all stations (not the // number of nearby stations), since when computing the bias we still need to // weight in far away biases (because they can covary with the nearby stations) // Compute obs-obs covariance-matrix once vec2 matrix; int N = obsLocations.size(); std::cout << " Point locations: " << N << std::endl; matrix.resize(N); for(int ii = 0; ii < N; ii++) { matrix[ii].resize(N,0); } for(int ii = 0; ii < N; ii++) { Location iloc = obsLocations[ii]; // The diagonal is 1, since the distance from a point to itself // is 0, therefore its weight is 1. matrix[ii][ii] = 1; // The matrix is symmetric, so only compute one of the halves for(int jj = ii+1; jj < N; jj++) { Location jloc = obsLocations[jj]; // Improve conditioning of matrix when you have two or more stations // that are very close float factor = 0.414 / 0.5; float R = calcCovar(iloc, jloc)*factor; // Store the number in both halves matrix[ii][jj] = R; matrix[jj][ii] = R; } } // Compute (matrix)^-1 std::cout << " Precomputing inverse of obs-to-obs covariance matrix: "; std::cout.flush(); double s1 = Util::clock(); vec2 inverse = Util::inverse(matrix); double e1 = Util::clock(); std::cout << e1 - s1 << " seconds" << std::endl; // Compute grid-point to obs-point covariances std::cout << " Precomputing gridpoint-to-obs covariances: "; std::cout.flush(); double s2 = Util::clock(); // Store the covariances of each gridpoint to every obs-point. To save memory, // only store values that are above 0. Store the index of the obs-point. // This means that Sindex does not have the same size for every gridpoint. std::vector<std::vector<std::vector<float> > > S; // lat, lon, obspoint std::vector<std::vector<std::vector<int> > > Sindex; S.resize(nLat); Sindex.resize(nLat); for(int i = 0; i < nLat; i++) { S[i].resize(nLon); Sindex[i].resize(nLon); } #pragma omp parallel for for(int i = 0; i < nLat; i++) { for(int j = 0; j < nLon; j++) { float lat = lats[i][j]; float lon = lons[i][j]; float elev = elevs[i][j]; const Location gridPoint(lat, lon, elev); for(int ii = 0; ii < N; ii++) { Location obsPoint = obsLocations[ii]; float covar = calcCovar(obsPoint, gridPoint); if(covar > 0) { S[i][j].push_back(covar); Sindex[i][j].push_back(ii); } } } } double e2 = Util::clock(); std::cout << e2 - s2 << " seconds" << std::endl; // Loop over offsets for(int t = 0; t < nTime; t++) { FieldPtr field = iFile.getField(mVariable, t); FieldPtr accum = iFile.getEmptyField(0); FieldPtr weights = iFile.getEmptyField(0); // Arrange all the biases for all stations into one vector std::vector<float> bias(N,0); for(int k = 0; k < obsLocations.size(); k++) { Location loc = obsLocations[k]; Parameters parameters = iParameterFile->getParameters(t, loc, false); if(parameters.size() > 0) { float currBias = parameters[0]; if(Util::isValid(currBias)) { // For * and /, operate on the flucuations areound a mean of 1 if(mOperator == Util::OperatorMultiply) { currBias = currBias - 1; } else if(mOperator == Util::OperatorDivide) { currBias = currBias - 1; } } bias[k] = currBias; } } #pragma omp parallel for for(int i = 0; i < nLat; i++) { for(int j = 0; j < nLon; j++) { std::vector<float> currS = S[i][j]; std::vector<int> currI = Sindex[i][j]; int currN = currS.size(); // No correction if there are no nearby stations if(currN == 0) continue; // Don't use the nearest station when cross validating float maxCovar = Util::MV; int ImaxCovar = Util::MV; if(mCrossValidate) { for(int ii = 0; ii < currS.size(); ii++) { if(!Util::isValid(ImaxCovar) || currS[ii] > maxCovar) { ImaxCovar = ii; maxCovar = currS[ii]; } } currS[ImaxCovar] = 0; vec2 cvMatrix = matrix; for(int ii = 0; ii < currN; ii++) { cvMatrix[ImaxCovar][ii] = 0; cvMatrix[ii][ImaxCovar] = 0; } cvMatrix[ImaxCovar][ImaxCovar] = 1; inverse = Util::inverse(cvMatrix); } // Compute weights (matrix-vector product) std::vector<float> weights; weights.resize(N, 0); for(int ii = 0; ii < N; ii++) { // Only loop over non-zero values in the vector for(int jj = 0; jj < currN; jj++) { int JJ = currI[jj]; weights[ii] += inverse[ii][JJ] * currS[jj]; } } // Set the weight of the nearest location to 0 when cross-validating if(mCrossValidate) { weights[ImaxCovar] = 0; } // Compute final bias (dot product of bias and weights) float finalBias = 0; for(int ii = 0; ii < N; ii++) { float currBias = bias[ii]; if(!Util::isValid(currBias)) { finalBias = Util::MV; break; } finalBias += bias[ii]*weights[ii]; } if(Util::isValid(finalBias)) { // Reconstruct the factor/divisor by adding the flucuations // onto the mean of 1 if(mOperator == Util::OperatorMultiply) finalBias = finalBias + 1; else if(mOperator == Util::OperatorDivide) finalBias = finalBias - 1; // Apply bias to each ensemble member for(int e = 0; e < nEns; e++) { float rawValue = (*field)(i,j,e); // Adjust bias based on auxillary weight if(mAuxVariable != Variable::None) { float weight = auxWeights[i][j][e][t]; if(mOperator == Util::OperatorAdd || mOperator == Util::OperatorSubtract) { finalBias = finalBias * weight; } else { finalBias = pow(finalBias, weight); } } if(mOperator == Util::OperatorAdd) { (*field)(i,j,e) += finalBias; } else if(mOperator == Util::OperatorSubtract) { (*field)(i,j,e) -= finalBias; } else if(mOperator == Util::OperatorMultiply) { // TODO: How do we ensure that the matrix is positive definite in this // case? (*field)(i,j,e) *= finalBias; } else if(mOperator == Util::OperatorDivide) { // TODO: How do we ensure that the matrix is positive definite in this // case? (*field)(i,j,e) /= finalBias; } else { Util::error("Unrecognized operator in CalibratorKriging"); } } } } } } return true; }