//---------------------------------------------- // get track info from URI //---------------------------------------------- TRef TLSpotify::TSession::FindTrack(const TString& URI) { THeapArray<char> UriString; URI.GetAnsi( UriString ); // find link sp_link *link = sp_link_create_from_string( UriString.GetData() ); if (!link) return TRef(); // get track from the link sp_track* pTrack = sp_link_as_track( link ); if ( !pTrack ) { TDebugString Debug_String; Debug_String << "URI " << URI << " is not a track"; TLDebug_Print( Debug_String ); return TRef(); } // create new track info TRef TrackRef = AddTrack( *pTrack ); const char* TrackName = sp_track_name( pTrack ); // The create function will have increased the reference count for us so release sp_link_release(link); return TrackRef; }
void CHemlockLooperModel::AddDailyResult(const StringVector& header, const StringVector& data) { enum TInputAllStage{ E_NAME, E_ID, E_YEAR, E_MONTH, E_DAY, E_JDAY, E_CUMUL_L1, E_CUMUL_L2, E_CUMUL_L3, E_CUMUL_L4, E_CUMUL_PUPA, E_CUMUL_ADULT, NB_INPUT, NB_COLUMN = NB_INPUT + 7 }; enum TInputPupasion{ P_NAME, P_ID, P_YEAR, P_MONTH, P_DAY, P_NB_DAYS, P_N, NB_INPUT_PUPAISON }; CTRef TRef(ToInt(data[E_YEAR]), ToSizeT(data[E_MONTH]) - 1, ToSizeT(data[E_DAY]) - 1); if (header.size() == NB_COLUMN) { std::vector<double> obs(NB_INPUT - E_CUMUL_L1); for (size_t i = 0; i < obs.size(); i++) obs[i] = ToDouble(data[E_CUMUL_L1 + i]); m_SAResult.push_back(CSAResult(TRef, obs)); } else if (header.size() == NB_INPUT_PUPAISON) { std::vector<double> obs(2); obs[PUP_NB_DAYS] = ToDouble(data[P_NB_DAYS]); obs[PUP_N] = ToDouble(data[P_N]); m_SAResult.push_back(CSAResult(TRef, obs)); } }
void CWhitePineWeevilModel::AddDailyResult(const StringVector& header, const StringVector& data) { enum TInputAllStage{ E_ID, E_YEAR, E_MONTH, E_DAY, E_DD, E_EGG, E_EGG_CUMUL, E_L1, E_CUMUL_L1, E_L2, E_CUMUL_L2, E_L3, E_CUMUL_L3, E_L4, E_CUMUL_L4, E_PUPA, E_CUMUL_PUPA, E_ADULT, E_CUMUL_ADULT, NB_INPUTS_STAGE }; enum TInputDD{ P_YEAR, P_MONTH, P_DAY, P_DD, NB_INPUTS_DD }; if (header.size() == NB_INPUTS_STAGE) { CTRef TRef(ToInt(data[E_YEAR]), ToSizeT(data[E_MONTH]) - 1, ToSizeT(data[E_DAY]) - 1); std::vector<double> obs(NB_STAGES,-999); for (size_t i = 0; i < 7 && E_EGG_CUMUL + 2 * i<data.size(); i++) obs[i] = !data[E_EGG_CUMUL + 2 * i].empty()?ToDouble(data[E_EGG_CUMUL + 2 * i]):-999; m_SAResult.push_back(CSAResult(TRef, obs)); } /*else if (header.size() == NB_INPUTS_DD) { //CTRef TRef(ToInt(data[P_YEAR]), ToSizeT(data[P_MONTH]) - 1, ToSizeT(data[P_DAY]) - 1); std::vector<double> obs(2); obs[PUP_NB_DAYS] = ToDouble(data[P_NB_DAYS]); obs[PUP_N] = ToDouble(data[P_N]); m_SAResult.push_back(CSAResult(TRef, obs)); }*/ }
void TLMenu::TMenuController::Update() { if(m_QueuedCommand.GetRef().IsValid()) { if(m_QueuedCommand.GetRef() == "Open") { // Open new menu OpenMenu( m_QueuedCommand.GetTypeRef() ); } else if(m_QueuedCommand.GetRef() == "close") { CloseMenu(); } // Invalidate m_QueuedCommand.SetRef(TRef()); m_QueuedCommand.SetTypeRef(TRef()); } }
//--------------------------------------------------------------------------- result_readTransportProperties readTransportProperties( const volVectorFieldHolder& U, const surfaceScalarFieldHolder& phi, singlePhaseTransportModelHolder& laminarTransport ) { laminarTransport = singlePhaseTransportModelHolder( U, phi ); // Thermal expansion coefficient [1/K] dimensionedScalar beta(laminarTransport->lookup("beta")); // Reference temperature [K] dimensionedScalar TRef(laminarTransport->lookup("TRef")); // Laminar Prandtl number dimensionedScalar Pr(laminarTransport->lookup("Pr")); // Turbulent Prandtl number dimensionedScalar Prt(laminarTransport->lookup("Prt")); return result_readTransportProperties( beta, TRef, Pr, Prt ); }
//simulated annaling void CClimaticModel::AddSAResult(const StringVector& header, const StringVector& data) { if (header.size() == 12) { std::vector<double> obs(4); CTRef TRef(ToShort(data[2]), ToShort(data[3]) - 1, ToShort(data[4]) - 1, ToShort(data[5])); for (int i = 0; i < 4; i++) obs[i] = ToDouble(data[i + 6]); ASSERT(obs.size() == 4); m_SAResult.push_back(CSAResult(TRef, obs)); } /*if( header.size()==26) { std::vector<double> obs(24); for(int h=0; h<24; h++) obs[h] = data[h+2].ToDouble(); ASSERT( obs.size() == 24 ); m_SAResult.push_back( CSAResult(CTRef(), obs ) ); } else if( header.size()==13) { std::vector<double> obs(7); CTRef TRef(data[2].ToShort(),data[3].ToShort()-1,data[4].ToShort()-1,data[5].ToShort()); for(int c=0; c<7; c++) obs[c] = data[c+6].ToDouble(); ASSERT( obs.size() == 7 ); m_SAResult.push_back( CSAResult(TRef, obs ) ); } else if( header.size()==12) { std::vector<double> obs(7); CTRef TRef(data[2].ToShort(),data[3].ToShort()-1,data[4].ToShort()-1); for(int c=0; c<7; c++) obs[c] = data[c+5].ToDouble(); ASSERT( obs.size() == 7 ); m_SAResult.push_back( CSAResult(TRef, obs ) ); } else if( header.size()==11) { std::vector<double> obs(7); CTRef TRef(data[2].ToShort(),data[3].ToShort()-1); for(int c=0; c<7; c++) obs[c] = data[c+4].ToDouble(); ASSERT( obs.size() == 7 ); m_SAResult.push_back( CSAResult(TRef, obs ) ); }*/ }
void TLSpotify::TSession::UnloadTrack() { sp_session_player_unload( m_pSession ); m_CurrentTrack = TRef(); }
int main(int argc, char *argv[]) { # include "setRootCase.H" # include "createTime.H" # include "createMesh.H" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // Read in the existing solution files. Info << "Reading field U" << endl; volVectorField U ( IOobject ( "U", runTime.timeName(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ), mesh ); Info << "Reading field T" << endl; volScalarField T ( IOobject ( "T", runTime.timeName(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ), mesh ); Info << "Reading field p_rgh" << endl; volScalarField p_rgh ( IOobject ( "p_rgh", runTime.timeName(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ), mesh ); // Compute the velocity flux at the faces. This is needed // by the laminar transport model. Info<< "Creating/Calculating face flux field, phi..." << endl; surfaceScalarField phi ( IOobject ( "phi", runTime.timeName(), mesh, IOobject::READ_IF_PRESENT, IOobject::AUTO_WRITE ), linearInterpolate(U) & mesh.Sf() ); // Read the gravitational acceleration. This is needed // for calculating dp/dn on boundaries. Info << "Reading gravitational acceleration..." << endl; uniformDimensionedVectorField g ( IOobject ( "g", runTime.constant(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // Read the value of TRef in the transportProperties file. singlePhaseTransportModel laminarTransport(U, phi); dimensionedScalar TRef(laminarTransport.lookup("TRef")); // Use Tref and the T field to compute rhok, which is needed // to calculate dp/dn on boundaries. Info<< "Creating the kinematic density field, rhok..." << endl; volScalarField rhok ( IOobject ( "rhok", runTime.timeName(), mesh ), 1.0 - (T - TRef)/TRef ); // Get access to the input dictionary. IOdictionary setFieldsABLDict ( IOobject ( "setFieldsABLDict", runTime.time().system(), runTime, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // Read in the setFieldsABLDict entries. word velocityInitType(setFieldsABLDict.lookup("velocityInitType")); word temperatureInitType(setFieldsABLDict.lookup("temperatureInitType")); word tableInterpTypeU(setFieldsABLDict.lookupOrDefault<word>("tableInterpTypeU","linear")); word tableInterpTypeT(setFieldsABLDict.lookupOrDefault<word>("tableInterpTypeT","linear")); scalar deltaU(setFieldsABLDict.lookupOrDefault<scalar>("deltaU",1.0)); scalar deltaV(setFieldsABLDict.lookupOrDefault<scalar>("deltaV",1.0)); scalar zPeak(setFieldsABLDict.lookupOrDefault<scalar>("zPeak",0.03)); scalar Uperiods(setFieldsABLDict.lookupOrDefault<scalar>("Uperiods",4)); scalar Vperiods(setFieldsABLDict.lookupOrDefault<scalar>("Vperiods",4)); scalar xMin(setFieldsABLDict.lookupOrDefault<scalar>("xMin",0.0)); scalar yMin(setFieldsABLDict.lookupOrDefault<scalar>("yMin",0.0)); scalar zMin(setFieldsABLDict.lookupOrDefault<scalar>("zMin",0.0)); scalar xMax(setFieldsABLDict.lookupOrDefault<scalar>("xMax",3000.0)); scalar yMax(setFieldsABLDict.lookupOrDefault<scalar>("yMax",3000.0)); scalar zMax(setFieldsABLDict.lookupOrDefault<scalar>("zMax",1000.0)); scalar zRef(setFieldsABLDict.lookupOrDefault<scalar>("zRef",600.0)); bool useWallDistZ(setFieldsABLDict.lookupOrDefault<bool>("useWallDistZ",false)); bool scaleVelocityWithHeight(setFieldsABLDict.lookupOrDefault<bool>("scaleVelocityWithHeight",false)); scalar zInversion(setFieldsABLDict.lookupOrDefault<scalar>("zInversion",600.0)); scalar Ug(setFieldsABLDict.lookupOrDefault<scalar>("Ug",15.0)); scalar UgDir(setFieldsABLDict.lookupOrDefault<scalar>("UgDir",270.0)); scalar Tbottom(setFieldsABLDict.lookupOrDefault<scalar>("Tbottom",300.0)); scalar Ttop(setFieldsABLDict.lookupOrDefault<scalar>("Ttop",304.0)); scalar dTdz(setFieldsABLDict.lookupOrDefault<scalar>("dTdz",0.003)); scalar widthInversion(setFieldsABLDict.lookupOrDefault<scalar>("widthInversion",80.0)); scalar TPrimeScale(setFieldsABLDict.lookupOrDefault<scalar>("TPrimeScale",0.0)); scalar z0(setFieldsABLDict.lookupOrDefault<scalar>("z0",0.016)); scalar kappa(setFieldsABLDict.lookupOrDefault<scalar>("kappa",0.40)); List<List<scalar> > profileTable(setFieldsABLDict.lookup("profileTable")); bool updateInternalFields(setFieldsABLDict.lookupOrDefault<bool>("updateInternalFields",true)); bool updateBoundaryFields(setFieldsABLDict.lookupOrDefault<bool>("updateBoundaryFields",true)); // Change the table profiles from scalar lists to scalar fields scalarField zProfile(profileTable.size(),0.0); scalarField UProfile(profileTable.size(),0.0); scalarField VProfile(profileTable.size(),0.0); scalarField TProfile(profileTable.size(),0.0); forAll(zProfile,i) { zProfile[i] = profileTable[i][0]; UProfile[i] = profileTable[i][1]; VProfile[i] = profileTable[i][2]; TProfile[i] = profileTable[i][3]; }
void Foam::calc(const argList& args, const Time& runTime, const fvMesh& mesh) { bool writeResults = !args.optionFound("noWrite"); IOobject Theader ( "T", runTime.timeName(), mesh, IOobject::MUST_READ ); IOobject qheader ( "q", runTime.timeName(), mesh, IOobject::MUST_READ ); IOdictionary transportProperties ( IOobject ( "transportProperties", runTime.constant(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); dimensionedScalar Cpa ( transportProperties.lookup("Cpa") ); dimensionedScalar Cpv ( transportProperties.lookup("Cpv") ); dimensionedScalar lambda ( transportProperties.lookup("lambda") ); // Fluid density dimensionedScalar rho ( transportProperties.lookup("rho") ); dimensionedScalar TRef ( transportProperties.lookup("TRef") ); dimensionedScalar qRef ( transportProperties.lookup("qRef") ); if (qheader.headerOk() && Theader.headerOk()) { Info<< " Reading q" << endl; volScalarField q(qheader, mesh); Info<< " Reading T" << endl; volScalarField T(Theader, mesh); // specific enthalpy of dry air hda - ASHRAE 1.8 volScalarField hda("hda", rho*Cpa*T-rho*Cpa*TRef); // specific enthalpy of dry vapor volScalarField hdv("hdv", rho*q*(lambda + Cpv*T) - rho*qRef*(lambda + Cpv*TRef)); // specific enthalpy for moist air hmoist - ASHRAE 1.8 volScalarField hmoist("hmoist", hda + hdv); if (writeResults) { hda.write(); hdv.write(); hmoist.write(); } else { Info<< " Min hda : " << min(hda).value() << " [J/m3]" << "\n Max hda : "<< max(hda).value() << " [J/m3]" << endl; Info<< " Min hdv : " << min(hdv).value() << " [J/m3]" << "\n Max hdv : "<< max(hdv).value() << " [J/m3]" << endl; Info<< " Min hmoist : " << min(hmoist).value() << " [J/m3]" << "\n Max hmoist : "<< max(hmoist).value() << " [J/m3]" << endl; } // print results // // surfaceScalarField hdaBoundary = // fvc::interpolate(hda); // // const surfaceScalarField::GeometricBoundaryField& patchhda = // hdaBoundary.boundaryField(); // // const surfaceScalarField::GeometricBoundaryField& patchCondHeatFlux = // // condHeatFluxNormal.boundaryField(); // // const surfaceScalarField::GeometricBoundaryField& patchTurbHeatFlux = // // turbHeatFluxNormal.boundaryField(); // // const surfaceScalarField::GeometricBoundaryField& patchTotHeatFlux = // // totHeatFluxNormal.boundaryField(); // // Info<< "\nHeat at the boundaries " << endl; // forAll(patchhda, patchi) // { // if ( (!isA<emptyFvPatch>(mesh.boundary()[patchi])) && // (mesh.boundary()[patchi].size() > 0) ) // { // Info<< " " // << mesh.boundary()[patchi].name() // << "\n Total area [m2] : " // << gSum(mesh.magSf().boundaryField()[patchi]) // << "\n Integral Energy [J] : " // << gSum // ( // mesh.magSf().boundaryField()[patchi] // *patchhda[patchi] // ) // << nl << endl; // } // } // Info << endl; } else { Info<< " No q or No T" << endl; } Info<< "\nEnd\n" << endl; }
Bool TLCollada::TGeometry::Import(TXmlTag& Tag,TPtrArray<TLCollada::TMaterial>& Materials) { // import names m_pGeometryName = Tag.GetProperty("name"); // optional // require ID const TString* pGeometryID = Tag.GetProperty("id"); if ( !pGeometryID ) { TLDebug_Break("<geometry> missing ID"); return FALSE; } // copy ID and prefix with # to make it easier to match m_GeometryID = "#"; m_GeometryID.Append( *pGeometryID ); // get the "mesh" child TXmlTag* pMeshTag = Tag.GetChild("mesh"); if ( !pMeshTag ) { TLDebug_Break("<mesh> tag expected"); return FALSE; } // import the geometry data first TLDebug_Print("Collada: Importing geometry source data..."); TPtrArray<TXmlTag> DataTags; pMeshTag->GetChildren("source", DataTags ); pMeshTag->GetChildren("vertices", DataTags ); for ( u32 i=0; i<DataTags.GetSize(); i++ ) { TXmlTag* pDataTag = DataTags[i]; TPtr<TGeometryData> pGeometryData = new TGeometryData(); if ( !pGeometryData->Import( *pDataTag ) ) { TLDebug_Break("failed to import geometry data"); return FALSE; } // add to list m_GeometryData.Add( pGeometryData ); } // create a mesh to import the geometry into m_pMesh = new TLAsset::TMesh( TRef(*m_pGeometryName) ); // import triangles into the mesh TLDebug_Print("Collada: Importing geometry triangles..."); TPtrArray<TXmlTag> TriangleTags; pMeshTag->GetChildren("triangles", TriangleTags ); for ( u32 t=0; t<TriangleTags.GetSize(); t++ ) { TXmlTag* pTriangleTag = TriangleTags[t]; // get material const TString* pMaterialID = pTriangleTag->GetProperty("Material"); TPtr<TLCollada::TMaterial>* ppMaterial = pMaterialID ? Materials.Find( *pMaterialID ) : NULL; TLCollada::TMaterial* pMaterial = ppMaterial ? (*ppMaterial).GetObjectPointer() : NULL; // get all the different mappings of triangle data -> vertex data u32 TriangleDataStep = 1; // step is the biggest offset from the inputs, + 1 // gr: store relavant inputs - dont need all of them (maybe for future expansion) TPtr<TLCollada::TriangleInput> pPositionInput; //TPtr<TLCollada::TriangleInput> pNormalInput; TPtr<TLCollada::TriangleInput> pTexCoordInput; TLDebug_Print("Collada: Importing geometry triangle inputs..."); TPtrArray<TXmlTag> InputTags; pTriangleTag->GetChildren("input",InputTags); for ( u32 i=0; i<InputTags.GetSize(); i++ ) { TPtr<TLCollada::TriangleInput> pInput = new TLCollada::TriangleInput(); if ( !pInput->Import( *InputTags[i], m_GeometryData ) ) { TLDebug_Break("failed to import <triangles> input tag"); return FALSE; } // gr: make sure we take ALL offsets into account to get the correct stride of the triangle data if ( pInput->m_Offset+1 > TriangleDataStep ) TriangleDataStep = pInput->m_Offset+1; // store input if ( pInput->m_Semantic == "Vertex" ) pPositionInput = pInput; //else if ( pInput->m_Semantic == "Normal" ) // pNormalInput = pInput; else if ( pInput->m_Semantic == "TexCoord" ) pTexCoordInput = pInput; // else unwanted input } // pull out triangle data TXmlTag* pDataTag = pTriangleTag->GetChild("p"); if ( !pDataTag ) { TLDebug_Break("<triangles> tag is missing data tag <p>"); return FALSE; } // get vertex's colour TColour* pVertexColour = pMaterial ? &pMaterial->m_Colour : NULL; TLDebug_Print("Collada: Importing geometry triangle's triangles..."); u32 DataIndex = 0; // which bit of data are we on (ie. index in the array) u32 TriangleComponentIndex = 0; // which of the 3 parts of the triangle are we on // keep reading through all the data and get each index of data s32 InputIndex = -1; u32 CharIndex = 0; TLCollada::VertexMap CurrentTriangleVertexMap; TLAsset::TMesh::Triangle CurrentTriangle; while ( TLString::ReadNextInteger( pDataTag->GetDataString(), CharIndex, InputIndex ) ) { u32 Offset = DataIndex % TriangleDataStep; DataIndex++; // set the vertex index for each kind of input at this offset if ( pPositionInput && pPositionInput->m_Offset == Offset ) CurrentTriangleVertexMap.m_Position = InputIndex; if ( pTexCoordInput && pTexCoordInput->m_Offset == Offset ) CurrentTriangleVertexMap.m_TexCoord = InputIndex; // more components to read of the current vertex map if ( Offset != TriangleDataStep-1 ) continue; // all parts of the vertex map have been read, get vertex // see if the vertex map configuration already exists (which means vertex has already been created) TLCollada::VertexMap* pExistingVertexMap = m_VertexMap.Find( CurrentTriangleVertexMap ); // doesnt already exist, create vertex and entry if ( !pExistingVertexMap ) { // get vertex's data const float3* pPosition = pPositionInput ? pPositionInput->m_pData->GetData<float3>( CurrentTriangleVertexMap.m_Position ) : NULL; const float2* pTexCoord = pTexCoordInput ? pTexCoordInput->m_pData->GetData<float2>( CurrentTriangleVertexMap.m_TexCoord ) : NULL; if ( !pPosition ) { TLDebug_Break("Missing vertex position"); return FALSE; } // create vertex CurrentTriangleVertexMap.m_VertexIndex = m_pMesh->AddVertex( *pPosition, pVertexColour, pTexCoord ); // add to list s32 MapIndex = m_VertexMap.Add( CurrentTriangleVertexMap ); pExistingVertexMap = &m_VertexMap[MapIndex]; } // add to triangle's data if ( pExistingVertexMap->m_VertexIndex == -1 ) { TLDebug_Break("Missing vertex index from mapping"); return FALSE; } // update triangle CurrentTriangle[TriangleComponentIndex] = pExistingVertexMap->m_VertexIndex; // more of the triangle to fetch? if ( TriangleComponentIndex < 2 ) { TriangleComponentIndex++; continue; } // triangle complete! add to mesh m_pMesh->GetTriangles().Add( CurrentTriangle ); m_pMesh->OnPrimitivesChanged(); // reset temporary triangle for debugging CurrentTriangle.Set( 0xffff, 0xffff, 0xffff ); TriangleComponentIndex = 0; } // clean map for next set of triangles m_VertexMap.Empty(); } return TRUE; }