// Construct from components
eulerianScalarField::eulerianScalarField
(
    const dictionary&   dict,
    cfdemCloud&         sm,
    word                modelType,
    int                 modelID
)
:
    dict_(dict),
    particleCloud_(sm),
    fieldName_(modelType),
    cpVolumetricFieldName_(dict_.lookupOrDefault<word>("cpVolumetricFieldName", "na")),
    cpVolumetric_(dict_.lookupOrDefault<scalar>("cpVolumetric", 0.0)),
    updateMixtureProperties_(dict_.lookupOrDefault<bool>("updateMixtureProperties", false)),
    rho_(dict_.lookupOrDefault<scalar>("rho"+fieldName_, -1)),
    rhoCarrier_(dict_.lookupOrDefault<scalar>("rhoCarrier", -1)),
    cp_(dict_.lookupOrDefault<scalar>("cp"+fieldName_, -1)),
    cpCarrier_(dict_.lookupOrDefault<scalar>("cpCarrier", -1)),
    m_
    (   IOobject
        (
            fieldName_,
            sm.mesh().time().timeName(),
            sm.mesh(),
            IOobject::MUST_READ,
            IOobject::AUTO_WRITE
        ),
        sm.mesh()
    ),
    mSource_
    (   IOobject
        (
            fieldName_+"Source",
            sm.mesh().time().timeName(),
            sm.mesh(),
            IOobject::MUST_READ,
            IOobject::AUTO_WRITE
        ),
        sm.mesh()
    ),
    mSourceKImpl_
    (   IOobject
        (
            fieldName_+"SourceKImpl",
            sm.mesh().time().timeName(),
            sm.mesh(),
            IOobject::NO_READ,
            IOobject::AUTO_WRITE
        ),
        0.0*mSource_ /( m_ + dimensionedScalar("dummy", m_.dimensions(), 1e-32) ) //initi with zero
    ),
    fieldType_("undefined")
    #ifndef versionExt32
    ,fvOptions_(sm.mesh())
    #endif
{


    if ( m_.dimensions() == dimensionSet(0, 0, 0, 1, 0) )
    {
        speciesID_ = -1;
        fieldType_ = "temperature";
        Info << "eulerianScalarField:: found a Temperature field! " << endl;
    }
    else
    {
        speciesID_ = modelID;
        fieldType_ = "species";
        Info << "eulerianScalarField:: found a species field, will assign speciesID: " 
             << speciesID_
             << endl;
    }

    #ifndef versionExt32
    fvOptions_.reset(dict.subDict("fvOptions"+fieldName_));
    #endif

    if( (cpVolumetricFieldName_=="na"||!updateMixtureProperties_) && cpVolumetric_<=0.0)
        FatalError <<"You did not specify a cpVolumetricFieldName (or you do not updateMixtureProperties) and also cpVolumetric is zero (or negative)! Either provide the field name, or set cpVolumetric to a reasonable value. \n" 
                   << abort(FatalError);    

    if(speciesID_>-1 && updateMixtureProperties_ && (rho_<=0 || cp_<=0) )
        FatalError <<"You like to update the phase properties, but density and cp of the eulerianScalarField with name '" 
                   << fieldName_
                   <<"' are not specified or zero. \n" 
                   << abort(FatalError);    

    if(speciesID_>-1 && updateMixtureProperties_ && (rhoCarrier_<=0 || cpCarrier_<=0) )
        FatalError <<"You like to update the phase properties, but density and cp of the carrier phase are not specified or zero \n" 
                   << abort(FatalError);    
                   
    //Report options for cp 
    if(fieldType_=="temperature")
    {
        if(cpVolumetric_!=0.0 && cpVolumetricFieldName_!="na")
        FatalError <<"eulerianScalarField:: You have specified 'cpVolumetric' and 'cpVolumetricFieldName' in a dictionary in '/constant'. This might be confusing. Please unset one of these two inputs to avoid confusion. \n" 
                   << abort(FatalError);    

        if(cpVolumetricFieldName_=="na" || !updateMixtureProperties_) //use also if mixture properties are not updated
            Info << "eulerianScalarField:: will use the following FIXED VOLUMETRIC HEAT CAPACITY: " 
                 << cpVolumetric_ << " [J/K/m³]" << endl;
        else
            Info << "eulerianScalarField:: will use the a SPATIALLY-VARAIBLE VOLUMETRIC HEAT CAPACITY with name: " << cpVolumetricFieldName_ << endl;
    }
}
Beispiel #2
0
Foam::specie::specie(const dictionary& dict)
:
//    name_(dict.dictName()),
    nMoles_(readScalar(dict.subDict("specie").lookup("nMoles"))),
    molWeight_(readScalar(dict.subDict("specie").lookup("molWeight")))
{}
// Construct from components
polyVelocityBounded::polyVelocityBounded
(
    Time& t,
    polyMoleculeCloud& molCloud,
    const dictionary& dict
)
:
    polyStateController(t, molCloud, dict),
    propsDict_(dict.subDict(typeName + "Properties")),
    molIds_(),
    bb_(),
//     avMass_(0.0),
    lambda_(readScalar(propsDict_.lookup("lambda"))),
    deltaT_(t.deltaT().value()), 
    accumulatedTime_(0.0),
    startTime_(0.0),
    endTime_(GREAT),
    X_(false),
    Y_(false),
    Z_(false)
{
    writeInTimeDir_ = false;
    writeInCase_ = false;

//     singleValueController() = true;

    setBoundBox();
    
    molIds_.clear();

    selectIds ids
    (
        molCloud_.cP(),
        propsDict_
    );

    molIds_ = ids.molIds();

    velocity_ = propsDict_.lookup("velocity");

    if (propsDict_.found("componentControl"))
    {
        componentControl_ = Switch(propsDict_.lookup("componentControl"));

        if(componentControl_)
        {
            if (propsDict_.found("X"))
            {
                X_ = Switch(propsDict_.lookup("X"));
            }
    
            if (propsDict_.found("Y"))
            {
                Y_ = Switch(propsDict_.lookup("Y"));
            }
    
            if (propsDict_.found("Z"))
            {
                Z_ = Switch(propsDict_.lookup("Z"));
            }

            if(!X_ && !Y_ && !Z_)
            {
                FatalErrorIn("polyVelocityBounded::polyVelocityBounded()")
                    << "At least one component (X, Y, Z) should be chosen " << nl << "in: "
                    << time_.time().system()/"controllersDict"
                    << exit(FatalError);
            }
        }
    }
    
    if (propsDict_.found("startAtTime"))
    {    
        startTime_ = readScalar(propsDict_.lookup("startAtTime"));
    }
    
    if (propsDict_.found("endAtTime"))
    {
        endTime_ = readScalar(propsDict_.lookup("endAtTime"));
    }    
}
Foam::searchableSurfaces::searchableSurfaces
(
    const IOobject& io,
    const dictionary& topDict
)
    :
    PtrList<searchableSurface>(topDict.size()),
    names_(topDict.size()),
    regionNames_(topDict.size()),
    allSurfaces_(identity(topDict.size()))
{
    label surfI = 0;
    forAllConstIter(dictionary, topDict, iter)
    {
        const word& key = iter().keyword();

        if (!topDict.isDict(key))
        {
            FatalErrorIn
            (
                "searchableSurfaces::searchableSurfaces"
                "( const IOobject&, const dictionary&)"
            )   << "Found non-dictionary entry " << iter()
                << " in top-level dictionary " << topDict
                << exit(FatalError);
        }

        const dictionary& dict = topDict.subDict(key);

        names_[surfI] = key;
        dict.readIfPresent("name", names_[surfI]);

        // Make IOobject with correct name
        autoPtr<IOobject> namedIO(io.clone());
        // Note: we would like to e.g. register triSurface 'sphere.stl' as
        // 'sphere'. Unfortunately
        // no support for having object read from different location than
        // their object name. Maybe have stlTriSurfaceMesh which appends .stl
        // when reading/writing?
        namedIO().rename(key);  // names_[surfI]

        // Create and hook surface
        set
        (
            surfI,
            searchableSurface::New
            (
                dict.lookup("type"),
                namedIO(),
                dict
            )
        );
        const searchableSurface& s = operator[](surfI);

        // Construct default region names by prepending surface name
        // to region name.
        const wordList& localNames = s.regions();

        wordList& rNames = regionNames_[surfI];
        rNames.setSize(localNames.size());

        forAll(localNames, regionI)
        {
            rNames[regionI] = names_[surfI] + '_' + localNames[regionI];
        }

        // See if dictionary provides any global region names.
        if (dict.found("regions"))
        {
            const dictionary& regionsDict = dict.subDict("regions");

            forAllConstIter(dictionary, regionsDict, iter)
            {
                const word& key = iter().keyword();

                if (regionsDict.isDict(key))
                {
                    // Get the dictionary for region iter.keyword()
                    const dictionary& regionDict = regionsDict.subDict(key);

                    label index = findIndex(localNames, key);

                    if (index == -1)
                    {
                        FatalErrorIn
                        (
                            "searchableSurfaces::searchableSurfaces"
                            "( const IOobject&, const dictionary&)"
                        )   << "Unknown region name " << key
                            << " for surface " << s.name() << endl
                            << "Valid region names are " << localNames
                            << exit(FatalError);
                    }

                    rNames[index] = word(regionDict.lookup("name"));
                }
            }
        }

        surfI++;
    }
Beispiel #5
0
Foam::C10H22::C10H22(const dictionary& dict)
:
    liquidProperties(dict),
    rho_(dict.subDict("rho")),
    pv_(dict.subDict("pv")),
    hl_(dict.subDict("hl")),
    Cp_(dict.subDict("Cp")),
    h_(dict.subDict("h")),
    Cpg_(dict.subDict("Cpg")),
    B_(dict.subDict("B")),
    mu_(dict.subDict("mu")),
    mug_(dict.subDict("mug")),
    K_(dict.subDict("K")),
    Kg_(dict.subDict("Kg")),
    sigma_(dict.subDict("sigma")),
    D_(dict.subDict("D"))
{}
Beispiel #6
0
Foam::Leonov::Leonov
(
    const word& name,
    const volScalarField& alpha,
    const volVectorField& U,
    const surfaceScalarField& phi,
    const dictionary& dict
)
:
    viscoelasticLaw(name, alpha, U, phi),
    sigma_
    (
        IOobject
        (
            "sigma" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::MUST_READ,
            IOobject::AUTO_WRITE
        ),
        U.mesh()
    ),
    tau_
    (
        IOobject
        (
            "tau" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::NO_READ,
            IOobject::AUTO_WRITE
        ),
        U.mesh(),
        dimensionedSymmTensor
        (
            "zero",
            dimensionSet(1, -1, -2, 0, 0, 0, 0),
            symmTensor::zero
        )
    ),
    I_
    (
        dimensionedSymmTensor
        (
            "I",
            dimensionSet(0, 0, 0, 0, 0, 0, 0),
            symmTensor
            (
                1, 0, 0,
                   1, 0,
                      1
            )
        )
    ),
    rho1_(dict.subDict("phase1").lookup("rho")),
    rho2_(dict.subDict("phase2").lookup("rho")),
    etaS1_(dict.subDict("phase1").lookup("etaS")),
    etaS2_(dict.subDict("phase2").lookup("etaS")),
    etaP1_(dict.subDict("phase1").lookup("etaP")),
    etaP2_(dict.subDict("phase2").lookup("etaP")),
    lambda1_(dict.subDict("phase1").lookup("lambda")),
    lambda2_(dict.subDict("phase2").lookup("lambda"))
{}
bool Foam::combustionModel::read(const dictionary& combustionProps)
{
    coeffs_ = combustionProps.subDict(type() + "Coeffs");

    return true;
}
Beispiel #8
0
Foam::phaseModel::phaseModel
(
    const fvMesh& mesh,
    const dictionary& transportProperties,
    const word& phaseName
)
:
    dict_
    (
        transportProperties.subDict("phase" + phaseName)
    ),
    name_(phaseName),
    d_
    (
        dict_.lookup("d")
    ),
    nu_
    (
        dict_.lookup("nu")
    ),
    rho_
    (
        dict_.lookup("rho")
    ),
    U_
    (
        IOobject
        (
            "U" + phaseName,
            mesh.time().timeName(),
            mesh,
            IOobject::MUST_READ,
            IOobject::AUTO_WRITE
        ),
        mesh
    )
{
    const word phiName = "phi" + phaseName;

    IOobject phiHeader
    (
        phiName,
        mesh.time().timeName(),
        mesh,
        IOobject::NO_READ
    );

    if (phiHeader.headerOk())
    {
        Info<< "Reading face flux field " << phiName << endl;

        phiPtr_.reset
        (
            new surfaceScalarField
            (
                IOobject
                (
                    phiName,
                    mesh.time().timeName(),
                    mesh,
                    IOobject::MUST_READ,
                    IOobject::AUTO_WRITE
                ),
                mesh
            )
        );
    }
    else
    {
        Info<< "Calculating face flux field " << phiName << endl;

        wordList phiTypes
        (
            U_.boundaryField().size(),
            calculatedFvPatchScalarField::typeName
        );

        for (label i=0; i<U_.boundaryField().size(); i++)
        {
            if (isA<fixedValueFvPatchVectorField>(U_.boundaryField()[i]))
            {
                phiTypes[i] = fixedValueFvPatchScalarField::typeName;
            }
        }

        phiPtr_.reset
        (
            new surfaceScalarField
            (
                IOobject
                (
                    phiName,
                    mesh.time().timeName(),
                    mesh,
                    IOobject::NO_READ,
                    IOobject::AUTO_WRITE
                ),
                fvc::interpolate(U_) & mesh.Sf(),
                phiTypes
            )
        );
    }
}
Beispiel #9
0
// from components
Feta_PTT::Feta_PTT
(
    const word& name,
    const volScalarField& alpha,
    const volVectorField& U,
    const surfaceScalarField& phi,
    const dictionary& dict
)
:
    viscoelasticLaw(name, alpha, U, phi),
    tau_
    (
        IOobject
        (
            "tau" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::MUST_READ,
            IOobject::AUTO_WRITE
        ),
        U.mesh()
    ),
    etaS1_(dict.subDict("phase1").lookup("etaS")),
    etaS2_(dict.subDict("phase2").lookup("etaS")),
    etaP1_(dict.subDict("phase1").lookup("etaP")),
    etaP2_(dict.subDict("phase2").lookup("etaP")),
    lambda1_(dict.subDict("phase1").lookup("lambda")),
    lambda2_(dict.subDict("phase2").lookup("lambda")),
    epsilon1_(dict.subDict("phase1").lookup("epsilon")),
    epsilon2_(dict.subDict("phase2").lookup("epsilon")),
    zeta1_(dict.subDict("phase1").lookup("zeta")),
    zeta2_(dict.subDict("phase2").lookup("zeta")),
    A1_(dict.subDict("phase1").lookup("A")),
    A2_(dict.subDict("phase2").lookup("A")),
    a1_(dict.subDict("phase1").lookup("a")),
    a2_(dict.subDict("phase2").lookup("a")),
    b1_(dict.subDict("phase1").lookup("b")),
    b2_(dict.subDict("phase2").lookup("b")),
    alpha1f_
    (
        IOobject
        (
            "alpha1f" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::NO_READ,
            IOobject::NO_WRITE
        ),
        min(max(alpha, scalar(0)), scalar(1))
    ),
    etaPEff_
    (
        IOobject
        (
            "etaPEff" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::NO_READ,
            IOobject::AUTO_WRITE
        ),
        (etaP1_/( Foam::pow(scalar(1) + A1_*Foam::pow(0.5*( Foam::sqr(tr(tau_)) - tr(tau_ & tau_) ) * Foam::sqr(lambda1_) / Foam::sqr(etaP1_), a1_), b1_) ) )*alpha1f_ + (etaP2_/( Foam::pow(scalar(1) + A2_*Foam::pow(0.5*( Foam::sqr(tr(tau_)) - tr(tau_ & tau_) ) * Foam::sqr(lambda2_) / Foam::sqr(etaP2_), a2_), b2_) ) )*(scalar(1) - alpha1f_)
    ),
    lambdaEff_
    (
        IOobject
        (
            "lambdaEff" + name,
            U.time().timeName(),
            U.mesh(),
            IOobject::NO_READ,
            IOobject::AUTO_WRITE
        ),
        (lambda1_ / (scalar(1)  + epsilon1_*lambda1_*tr(tau_) / etaP1_) )*alpha1f_ + (lambda2_ / (scalar(1)  + epsilon2_*lambda2_*tr(tau_) / etaP2_) )*(scalar(1) - alpha1f_)
    )
{}
Foam::radiation::greyMeanAbsorptionEmission::greyMeanAbsorptionEmission
(
    const dictionary& dict,
    const fvMesh& mesh
)
:
    absorptionEmissionModel(dict, mesh),
    coeffsDict_((dict.subDict(typeName + "Coeffs"))),
    speciesNames_(0),
    specieIndex_(0),
    lookUpTable_
    (
        fileName(coeffsDict_.lookup("lookUpTableFileName")),
        mesh.time().constant(),
        mesh
    ),
    thermo_(mesh.lookupObject<basicThermo>("thermophysicalProperties")),
    EhrrCoeff_(readScalar(coeffsDict_.lookup("EhrrCoeff"))),
    Yj_(nSpecies_)
{
    label nFunc = 0;
    const dictionary& functionDicts = dict.subDict(typeName + "Coeffs");

    forAllConstIter(dictionary, functionDicts, iter)
    {
        // safety:
        if (!iter().isDict())
        {
            continue;
        }
        const word& key = iter().keyword();
        speciesNames_.insert(key, nFunc);
        const dictionary& dict = iter().dict();
        coeffs_[nFunc].initialise(dict);
        nFunc++;
    }

    // Check that all the species on the dictionary are present in the
    // look-up table and save the corresponding indices of the look-up table

    label j = 0;
    forAllConstIter(HashTable<label>, speciesNames_, iter)
    {
        if (mesh.foundObject<volScalarField>("ft"))
        {
            if (lookUpTable_.found(iter.key()))
            {
                label index = lookUpTable_.findFieldIndex(iter.key());

                Info<< "specie: " << iter.key() << " found on look-up table "
                    << " with index: " << index << endl;

                specieIndex_[iter()] = index;
            }
            else if (mesh.foundObject<volScalarField>(iter.key()))
            {
                volScalarField& Y =
                    const_cast<volScalarField&>
                    (
                        mesh.lookupObject<volScalarField>(iter.key())
                    );
                Yj_.set(j, &Y);
                specieIndex_[iter()] = 0;
                j++;
                Info<< "specie: " << iter.key() << " is being solved" << endl;
            }
            else
            {
                FatalErrorIn
                (
                    "Foam::radiation::greyMeanAbsorptionEmission(const"
                    "dictionary& dict, const fvMesh& mesh)"
                )   << "specie: " << iter.key()
                    << " is neither in look-up table: "
                    << lookUpTable_.tableName()
                    << " nor is being solved" << nl
                    << exit(FatalError);
            }
        }
        else
        {
            FatalErrorIn
            (
                "Foam::radiation::greyMeanAbsorptionEmission(const"
                "dictionary& dict, const fvMesh& mesh)"
            )   << "specie ft is not present " << nl
                << exit(FatalError);

        }
    }
}
void Foam::multiSolver::buildDictionary
(
    dictionary& outputDict,
    const dictionary& inputDict,
    const word& solverDomainName
)
{
    outputDict.remove("sameAs");
    outputDict.remove("multiLoad");
    wordList alreadyMerged(0);
    wordList mergeMe(0);
    if (inputDict.found("default"))
    {
        if (outputDict.found("multiLoad"))
        {
            mergeMe = wordList(outputDict.lookup("multiLoad"));
            outputDict.remove("multiLoad");
        }
        if (outputDict.found("sameAs"))
        {
            label newIndex(mergeMe.size());
            mergeMe.setSize(newIndex + 1);
            mergeMe[newIndex] = word(outputDict.lookup("sameAs"));
            outputDict.remove("sameAs");
        }
    }
    
    // We load solverDomain last, but we need its sameAs / multiLoad now
    if (inputDict.found(solverDomainName))
    {
        const dictionary& solverDict(inputDict.subDict(solverDomainName));
        if (solverDict.found("multiLoad"))
        {
            mergeMe.append(wordList(solverDict.lookup("multiLoad")));
        }
        if (solverDict.found("sameAs"))
        {
            label newIndex(mergeMe.size());
            mergeMe.setSize(newIndex + 1);
            mergeMe[newIndex] = word(solverDict.lookup("sameAs"));
        }
    }
    
    label mergeI(-1);
    while ((mergeI + 1) < mergeMe.size())
    {
        mergeI++;
        bool skipMe(false);
        forAll(alreadyMerged, alreadyI)
        {
            if (mergeMe[mergeI] == alreadyMerged[alreadyI])
            {
                skipMe = true;
                break;
            }
        }
        if (skipMe)
        {
            break;
        }
        outputDict.merge(inputDict.subDict(mergeMe[mergeI]));
        
        // Add to alreadyMerged
        label mergedIndex(alreadyMerged.size());
        alreadyMerged.setSize(mergedIndex + 1);
        alreadyMerged[mergedIndex] = mergeMe[mergeI];
        
        // Recursive search
        if (outputDict.found("multiLoad"))
        {
            mergeMe.append(wordList(outputDict.lookup("multiLoad")));
            outputDict.remove("multiLoad");
        }
        if (outputDict.found("sameAs"))
        {
            label newMergeMeIndex(mergeMe.size());
            mergeMe.setSize(newMergeMeIndex + 1);
            mergeMe[newMergeMeIndex] = word(outputDict.lookup("sameAs"));
            outputDict.remove("sameAs");
        }
    }
    
    // Merge the solverDomain name, even if it already has merged
    if (inputDict.found(solverDomainName))
    {
        outputDict.merge(inputDict.subDict(solverDomainName));

        // These have been handled already        
        outputDict.remove("sameAs");
        outputDict.remove("multiLoad");
    }
}
pythonInterpreterWrapper::pythonInterpreterWrapper
(
    const objectRegistry& obr,
    const dictionary& dict,
    bool forceToNamespace
):
    generalInterpreterWrapperCRTP<pythonInterpreterWrapper>(
        obr,
        dict,
        forceToNamespace,
        "python"
    ),
    pythonState_(NULL),
    useNumpy_(dict.lookupOrDefault<bool>("useNumpy",true)),
    useIPython_(dict.lookupOrDefault<bool>("useIPython",true)),
    triedIPython_(false),
    oldIPython_(false)
{
    if(generalInterpreterWrapper::debug>debug) {
        debug=1;
    }

    Pbug << "Starting constructor" << endl;

    syncParallel();

#ifdef FOAM_HAS_LOCAL_DEBUGSWITCHES
    debug=dict.lookupOrDefault<label>("debugPythonWrapper",debug());
#else
    debug=dict.lookupOrDefault<label>("debugPythonWrapper",debug);
#endif

    if(!dict.found("useNumpy")) {
        WarningIn("pythonInterpreterWrapper::pythonInterpreterWrapper")
            << "Switch 'useNumpy' not found in " << dict.name() << nl
                << "Assuming it to be 'true' (if that is not what you want "
                << "set it. Also set it to make this warning go away)"
                << endl;
    }

    if(!dict.found("useIPython")) {
        WarningIn("pythonInterpreterWrapper::pythonInterpreterWrapper")
            << "Switch 'useIPython' not found in " << dict.name() << nl
                << "Assuming it to be 'true' (if that is not what you want "
                << "set it. Also set it to make this warning go away)"
                << endl;
    }

    if(interpreterCount==0) {
        Pbug << "Initializing Python" << endl;

        Py_Initialize();

        if(debug) {
            PyThreadState *current=PyGILState_GetThisThreadState();
            Pbug << "GIL-state before thread" << getHex(current) << endl;
        }
        PyEval_InitThreads();
         if(debug) {
            PyThreadState *current=PyGILState_GetThisThreadState();
            Pbug << "GIL-state after thread" << getHex(current) << endl;
        }
        // importLib("scipy.stats","stats"); - OK
        mainThreadState = PyEval_SaveThread();
        // importLib("scipy.stats","stats"); - segFault

        Pbug << "Main thread state: " << getHex(mainThreadState) << endl;
        // PyRun_SimpleString("import IPython\n" // here it works as expected
        // "IPython.embed()\n");
    }

    if(Pstream::parRun()) {
        Pbug << "This is a parallel run" << endl;

        parallelMasterOnly_=readBool(dict.lookup("parallelMasterOnly"));
    }

    if(parallelNoRun(true)) {
        Pbug << "Getting out because of 'parallelNoRun'" << endl;
        return;
    }

    interpreterCount++;

    Pbug << "Getting new interpreter" << endl;
    pythonState_=Py_NewInterpreter();
    Pbug << "Interpreter state: " << getHex(pythonState_) << endl;

    //    interactiveLoop("Clean");

    initIPython();

    Pbug << "Currently " << interpreterCount
        << " Python interpreters (created one)" << endl;

    if(
        interactiveAfterExecute_
        ||
        interactiveAfterException_
    ) {
    } else {
        if(useIPython_) {
            WarningIn("pythonInterpreterWrapper::pythonInterpreterWrapper")
                << "'useIPython' not needed in " << dict.name()
                    << " if there is no interactivity"
                    << endl;
        }
    }

    if(useNumpy_) {
        Dbug << "Attempting to import numpy" << endl;

        static bool warnedNumpy=false;
        if(!warnedNumpy) {
            if(getEnv("FOAM_SIGFPE")!="false") {
                WarningInFunction
                    << "Attempting to import numpy. On some platforms that will raise a "
                        << "(harmless) floating point exception. To avoid switch off "
                        << "by setting the environment variable 'FOAM_SIGFPE' to 'false'"
                        << endl;
            }
            warnedNumpy=true;
        }

        int fail=!importLib("numpy");
        if(fail) {
            FatalErrorIn("pythonInterpreterWrapper::pythonInterpreterWrapper")
                << "Problem during import of numpy." << nl
                    << "Switch if off with 'useNumpy false;' if it is not needed"
                    << endl
                    << exit(FatalError);
        }
        fail=PyRun_SimpleString(
            // "def _swak_wrapOpenFOAMField_intoNumpy(address,typestr,size,nr=None):\n"
            // "   class iWrap(object):\n"
            // "      def __init__(self):\n"
            // "         self.__array_interface__={}\n"
            // "         self.__array_interface__['data']=(int(address,16),False)\n"
            // "         if nr:\n"
            // "             self.__array_interface__['shape']=(size,nr)\n"
            // "         else:\n"
            // "             self.__array_interface__['shape']=(size,)\n"
            // "         self.__array_interface__['version']=3\n"
            // "         self.__array_interface__['typestr']=typestr\n"
            // "   return numpy.asarray(iWrap())\n"
            "class OpenFOAMFieldArray(numpy.ndarray):\n"
            "   def __new__(cls,address,typestr,size,nr=None,names=None):\n"
            "      obj=type('Temporary',(object,),{})\n"
            "      obj.__array_interface__={}\n"
            "      obj.__array_interface__['data']=(address,False)\n"
            "      if nr:\n"
            "          obj.__array_interface__['shape']=(size,nr)\n"
            "      else:\n"
            "          obj.__array_interface__['shape']=(size,)\n"
            //            "      obj.__array_interface__['descr']=[('x',typestr)]\n"
            "      obj.__array_interface__['version']=3\n"
            "      obj.__array_interface__['typestr']=typestr\n"
            "      obj=numpy.asarray(obj).view(cls)\n"
            "      if names:\n"
            "         for i,n in enumerate(names):\n"
            "            def f(ind):\n"
            "               return obj[:,ind]\n"
            "            setattr(obj,n,f(i))\n"
            "      return obj\n"
            "   def __array_finalize__(self,obj):\n"
            "      if obj is None: return\n"
        );
    }

    if(dict.found("importLibs")) {
        const dictionary &libList=dict.subDict("importLibs");
        forAllConstIter(dictionary,libList,iter) {
            word as=(*iter).keyword();
            word full((*iter).stream());
            if(full=="") {
                full=as;
            }
            importLib(full,as,true);
        }
    } else {
Beispiel #13
0
// Construct from dictionary
Foam::engineValve::engineValve
(
    const word& name,
    const polyMesh& mesh,
    const dictionary& dict
)
:
    name_(name),
    mesh_(mesh),
    engineDB_(refCast<const engineTime>(mesh_.time())),
    csPtr_
    (
        coordinateSystem::New
        (
            "coordinateSystem",
            dict.subDict("coordinateSystem")
        )
    ),
    bottomPatch_(dict.lookup("bottomPatch"), mesh.boundaryMesh()),
    poppetPatch_(dict.lookup("poppetPatch"), mesh.boundaryMesh()),
    stemPatch_(dict.lookup("stemPatch"), mesh.boundaryMesh()),
    curtainInPortPatch_
    (
        dict.lookup("curtainInPortPatch"),
        mesh.boundaryMesh()
    ),
    curtainInCylinderPatch_
    (
        dict.lookup("curtainInCylinderPatch"),
        mesh.boundaryMesh()
    ),
    detachInCylinderPatch_
    (
        dict.lookup("detachInCylinderPatch"),
        mesh.boundaryMesh()
    ),
    detachInPortPatch_
    (
        dict.lookup("detachInPortPatch"),
        mesh.boundaryMesh()
    ),
    detachFaces_(dict.lookup("detachFaces")),
    liftProfile_
    (
        "theta",
        "lift",
        name_,
        IFstream
        (
            mesh.time().path()/mesh.time().constant()/
            word(dict.lookup("liftProfileFile"))
        )()
    ),
    liftProfileStart_(min(liftProfile_.x())),
    liftProfileEnd_(max(liftProfile_.x())),
    minLift_(readScalar(dict.lookup("minLift"))),
    minTopLayer_(readScalar(dict.lookup("minTopLayer"))),
    maxTopLayer_(readScalar(dict.lookup("maxTopLayer"))),
    minBottomLayer_(readScalar(dict.lookup("minBottomLayer"))),
    maxBottomLayer_(readScalar(dict.lookup("maxBottomLayer"))),
    diameter_(readScalar(dict.lookup("diameter")))
{}
// Construct from dictionary
Foam::dirichletNeumannFriction::dirichletNeumannFriction
(
    const word& name,
    const fvPatch& patch,
    const dictionary& dict,
    const label masterPatchID,
    const label slavePatchID,
    const label masterFaceZoneID,
    const label slaveFaceZoneID
)
:
  frictionContactModel
  (
      name,
      patch,
      dict,
      masterPatchID,
      slavePatchID,
      masterFaceZoneID,
      slaveFaceZoneID
      ),
  frictionContactModelDict_(dict.subDict(name+"FrictionModelDict")),
  frictionLawPtr_(NULL),
  mesh_(patch.boundaryMesh().mesh()),
  slaveDisp_(mesh().boundaryMesh()[slavePatchID].size(), vector::zero),
  oldSlaveDisp_(slaveDisp_),
  oldSlip_(slaveDisp_.size(), vector::zero),
  slaveTraction_(mesh().boundaryMesh()[slavePatchID].size(), vector::zero),
  oldSlaveTraction_(slaveTraction_),
  slaveValueFrac_(mesh_.boundaryMesh()[slavePatchID].size(), symmTensor::zero),
  oldSlaveValueFrac_(slaveValueFrac_),
  relaxationFactor_
  (readScalar(frictionContactModelDict_.lookup("relaxationFactor"))),
  contactIterNum_(0),
  infoFreq_(readInt(frictionContactModelDict_.lookup("infoFrequency"))),
  oscillationCorr_(frictionContactModelDict_.lookup("oscillationCorrection")),
  smoothingSteps_(readInt(frictionContactModelDict_.lookup("smoothingSteps"))),
  oldStickSlip_(slaveDisp_.size(), 0.0),
  contactFilePtr_(NULL)
{
  // create friction law
  frictionLawPtr_ = frictionLaw::New(
                     frictionContactModelDict_.lookup("frictionLaw"),
                     frictionContactModelDict_
                     ).ptr();

  // master proc open contact info file
  if (Pstream::master())
    {
      word masterName = mesh_.boundary()[masterPatchID].name();
      word slaveName = mesh_.boundary()[slavePatchID].name();
      contactFilePtr_ =
          new OFstream
          (fileName("frictionContact_"+masterName+"_"+slaveName+".txt"));
      OFstream& contactFile = *contactFilePtr_;
      int width = 20;
      contactFile << "time";
      contactFile.width(width);
      contactFile << "iterNum";
      contactFile.width(width);
      contactFile << "relaxationFactor";
      contactFile.width(width);
      contactFile << "slipFaces";
      contactFile.width(width);
      contactFile << "stickFaces";
      contactFile.width(width);
      contactFile << "maxMagSlaveTraction" << endl;
    }
}