Ejemplo n.º 1
0
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
Rim3dOverlayInfoConfig::HistogramData Rim3dOverlayInfoConfig::histogramData()
{
    auto eclipseView = dynamic_cast<RimEclipseView*>(m_viewDef.p());
    auto geoMechView = dynamic_cast<RimGeoMechView*>(m_viewDef.p());
    
    if (eclipseView) return histogramData(eclipseView);
    if (geoMechView) return histogramData(geoMechView);
    return HistogramData();
}
Ejemplo n.º 2
0
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
void Rim3dOverlayInfoConfig::updateGeoMech3DInfo(RimGeoMechView * geoMechView)
{
    HistogramData histData;

    if (m_showResultInfo() || m_showHistogram())
    {
        histData = histogramData(geoMechView);
    }

    // Compose text

    QString infoText;

    if (m_showCaseInfo())
    {
        infoText = caseInfoText(geoMechView);
    }

    if (m_showResultInfo())
    {
        infoText += resultInfoText(histData, geoMechView);
    }

    if (!infoText.isEmpty())
    {
        geoMechView->viewer()->setInfoText(infoText);
    }

    // Populate histogram

    if (m_showHistogram())
    {
        RimGeoMechCase* geoMechCase = geoMechView->geoMechCase();
        RigGeoMechCaseData* caseData = geoMechCase ? geoMechCase->geoMechData() : nullptr;
        bool isResultsInfoRelevant = caseData && geoMechView->hasUserRequestedAnimation() && geoMechView->cellResultResultDefinition()->hasResult();

        if (isResultsInfoRelevant)
        {
            geoMechView->viewer()->showHistogram(true);
            geoMechView->viewer()->setHistogram(histData.min, histData.max, *histData.histogram);
            geoMechView->viewer()->setHistogramPercentiles(histData.p10, histData.p90, histData.mean);
        }
    }
}
Ejemplo n.º 3
0
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
void Rim3dOverlayInfoConfig::updateEclipse3DInfo(RimEclipseView * eclipseView)
{
    HistogramData histData;

    if (m_showHistogram() || m_showResultInfo())
    {
        histData = histogramData();
    }

    QString infoText;

    if (m_showCaseInfo())
    {
        infoText = caseInfoText();
    }

    if (m_showResultInfo())
    {
        infoText += resultInfoText(histData);
    }

    if (!infoText.isEmpty())
    {
        eclipseView->viewer()->setInfoText(infoText);
    }

    if (m_showHistogram())
    {
        bool isResultsInfoRelevant = eclipseView->hasUserRequestedAnimation() && eclipseView->cellResult()->hasResult();
        
        if (isResultsInfoRelevant && histData.histogram)
        {
            eclipseView->viewer()->showHistogram(true);
            eclipseView->viewer()->setHistogram(histData.min, histData.max, *histData.histogram);
            eclipseView->viewer()->setHistogramPercentiles(histData.p10, histData.p90, histData.mean);
        }
    }
}
TripMetricsReference::TripMetricsReference( const std::vector< TripMetrics >& input,
                                           long binsForHistograms,
                                           const TripMetricsReference& reference ):
m_histograms(),
m_binsForHistograms( binsForHistograms ),
m_meanValues(),
m_stdValues(),
m_pca( 0 )
{
    // Create the vectors to feed the histograms
    if ( input.size() == 0 ) {
        throw std::runtime_error( "TripMetricsReference::TripMetricsReference : 0 size input given." );
    }
    
    const size_t numberOfHistograms = input.front().values().size();
    
    std::vector< std::vector<double> > allValues( numberOfHistograms, std::vector<double>() );
    
    // Loop over the trip metrics and fill in the valarrays, minimum and maximum values
    for ( size_t iMetric = 0; iMetric < input.size(); ++iMetric ) {
        
        const std::vector<double>& metricValues = input[iMetric].values();

        for ( size_t iValue = 0; iValue < numberOfHistograms; ++iValue ) {
            if ( std::isnan(metricValues[iValue])) continue;
            double currentValue = metricValues[iValue];
            allValues[iValue].push_back( currentValue );
        }
    }
    
    
    // For each vector create the corresponding histogram
    for ( size_t iValue = 0; iValue < numberOfHistograms; ++iValue ) {

        std::vector<double>& valuesForMetric = allValues[iValue];

        double lowEdge = reference.m_histograms[iValue]->lowEdge();
        double highEdge = reference.m_histograms[iValue]->highEdge();
        double binSize = ( (highEdge/1.01) - lowEdge ) / binsForHistograms;
        
        // Create the histogram
        m_histograms.push_back( new Histogram( valuesForMetric,
                                              binsForHistograms,
                                              lowEdge,
                                              highEdge ) );
        
    }

    
    
    
    // Get rid of empty values and normalise
    allValues.clear();
    allValues.reserve( input.size() );
    for ( std::vector< TripMetrics >::const_iterator iSample = input.begin(); iSample != input.end(); ++iSample )
        allValues.push_back( iSample->values() );

    const long nBinaryVariables = TripMetrics::numberOfBinaryMetrics();
    const long numberOfFeatures = numberOfHistograms - nBinaryVariables;
    
    std::vector< std::vector< double > > cleanData;
    cleanData.reserve( allValues.size() );
    
    m_meanValues = reference.m_meanValues;
    m_stdValues = reference.m_stdValues;
    for ( size_t iSample = 0; iSample < allValues.size(); ++iSample ) {
        bool nanFound = false;
        const std::vector<double>& metricValues = allValues[iSample];
        for ( size_t iMetric = 0; iMetric < metricValues.size(); ++iMetric ) {
            if ( std::isnan( metricValues[iMetric]) ) {
                nanFound = true;
                break;
            }
        }
        if ( nanFound ) continue;
        std::vector<double> sampleValues = std::vector<double>( metricValues.begin() + nBinaryVariables, metricValues.end() );
        // Normalise using previously calculated mean and std values.
        for ( size_t iFeature = 0; iFeature < numberOfFeatures; ++iFeature ) sampleValues[iFeature] = (sampleValues[iFeature] - m_meanValues[iFeature] ) / m_stdValues[iFeature];
        cleanData.push_back( sampleValues );
    }
    
    // Transform the clean data using the reference pca obejcts and create the corresponding histograms
    m_pca = new PCA( *(reference.m_pca ) );

    for ( std::vector< std::vector< double > >::iterator iData = cleanData.begin(); iData != cleanData.end(); ++iData )
        *iData = m_pca->transform( *iData );
    
    const size_t nPrincipalComponents = cleanData.front().size();
    const size_t numberOfSamples = cleanData.size();
    m_histogramsPCA.reserve( nPrincipalComponents );
    for ( size_t iComponent = 0; iComponent < nPrincipalComponents; ++iComponent ) {
        std::vector< double > histogramData( numberOfSamples, 0.0 );
        for ( size_t iSample = 0; iSample < numberOfSamples; ++iSample ) {
            histogramData[iSample] = cleanData[iSample][iComponent];
        }
        
        // Create the histogram
        m_histogramsPCA.push_back( new Histogram( histogramData,
                                                 m_binsForHistograms,
                                                 reference.m_histogramsPCA[iComponent]->lowEdge(),
                                                 reference.m_histogramsPCA[iComponent]->highEdge() ) );
    }
}
void
TripMetricsReference::performPCA( const std::vector< std::vector<double> >& input )
{
    const long nBinaryVariables = TripMetrics::numberOfBinaryMetrics();
    
    // Get rid of empty values.
    std::vector< std::vector< double > > cleanData;
    cleanData.reserve( input.size() );
    for ( size_t iSample = 0; iSample < input.size(); ++iSample ) {
        bool nanFound = false;
        const std::vector<double>& metricValues = input[iSample];
        for ( size_t iMetric = nBinaryVariables; iMetric < metricValues.size(); ++iMetric ) {
            if ( std::isnan( metricValues[iMetric]) ) {
                nanFound = true;
                break;
            }
            
        }
        if ( nanFound ) continue;
        cleanData.push_back( std::vector<double>( metricValues.begin() + nBinaryVariables, metricValues.end() ) );
    }
    
    // Get rid of the extreme values
    const double percentageToKeep = 99.8;
    std::vector<std::vector<double> > cleanDataNoExtremes = tripExtremesFromColumns( cleanData, percentageToKeep );
    
    
    // Then normalize the values. Store the mean and std to be used when scoring
    const size_t numberOfFeatures = cleanDataNoExtremes.front().size();
    const size_t numberOfSamples = cleanDataNoExtremes.size();
    m_meanValues = std::vector< double >( numberOfFeatures, 0.0 );
    m_stdValues = std::vector< double >( numberOfFeatures, 0.0 );
    
    for ( size_t iFeature = 0; iFeature < numberOfFeatures; ++iFeature ) {
        // calculate mean and standard deviation
        double sx = 0;
        double sxx = 0;
        for ( size_t iSample = 0; iSample < cleanDataNoExtremes.size(); ++iSample ) {
            const double x = cleanDataNoExtremes[iSample][iFeature];
            sx += x;
            sxx += x*x;
        }
        
        const double mx = sx / numberOfSamples;
        const double stdx  = std::sqrt( (sxx / numberOfSamples) - mx*mx );
        
        m_meanValues[iFeature] = mx;
        m_stdValues[iFeature] = stdx;

        // normalise values
        for ( size_t iSample = 0; iSample < cleanDataNoExtremes.size(); ++iSample ) {
            const double x = cleanData[iSample][iFeature];
            const double xnew = (x - mx) / stdx;
            cleanDataNoExtremes[iSample][iFeature] = xnew;
        }

    }
    
    if ( m_pca ) delete m_pca;
    m_pca = new PCA;
    
    // Find the principal components using the clean sample without the extremes
    m_pca->fit( cleanDataNoExtremes );
    
    
    // Transform the clean data to identify the histogram edges
    for ( std::vector< std::vector< double > >::iterator iData = cleanDataNoExtremes.begin(); iData != cleanDataNoExtremes.end(); ++iData )
        *iData = m_pca->transform( *iData );
    const size_t nPrincipalComponents = cleanDataNoExtremes.front().size();
    std::vector<double> minValues = cleanDataNoExtremes.front();
    std::vector<double> maxValues = minValues;
    for ( size_t iComponent = 0; iComponent < nPrincipalComponents; ++iComponent ) {
        for (size_t i = 0; i < cleanDataNoExtremes.size(); ++i ) {
            const double value = cleanDataNoExtremes[i][iComponent];
            if ( value < minValues[iComponent] ) minValues[iComponent] = value;
            if ( value > maxValues[iComponent] ) maxValues[iComponent] = value;
        }
    }
    
    // Normalise the full reference data
    
    
    // Normalise and tranform the full reference data
    for ( std::vector< std::vector< double > >::iterator iData = cleanData.begin(); iData != cleanData.end(); ++iData ) {
        std::vector<double>& sampleData = *iData;
        // Normalise
        for ( size_t iFeature = 0; iFeature < numberOfFeatures; ++iFeature )
            sampleData[iFeature] = (sampleData[iFeature] - m_meanValues[iFeature] ) / m_stdValues[iFeature];
        // Transform
        sampleData = m_pca->transform( sampleData );
    }
    
    // Create the histograms with the principal component values
    m_histogramsPCA.reserve( nPrincipalComponents );
    
    for ( size_t iComponent = 0; iComponent < nPrincipalComponents; ++iComponent ) {
        std::vector< double > histogramData( numberOfSamples, 0.0 );
        double minValue = minValues[iComponent];
        double maxValue = maxValues[iComponent];
        for ( size_t iSample = 0; iSample < cleanData.size(); ++iSample ) {
            const double value = cleanData[iSample][iComponent];
            histogramData[iSample] = value;
        }
        
        // Determine the edges, the bins and create the corresponding histogram.
        double binSize = ( maxValue - minValue ) / m_binsForHistograms;
        maxValue += 0.01 * binSize;
        
            // Create the histogram
        m_histogramsPCA.push_back( new Histogram( histogramData,
                                                 m_binsForHistograms,
                                                 minValue,
                                                 maxValue ) );
    }
}