TEST(MatrixProfile, testMul1000)
{
    PreciseTimer start;
    Matrix * input[POLUTING_INPUTS];

    const static int  TEST_H_SIZE = 1000;
    const static int  TEST_W_SIZE = TEST_H_SIZE;

    for (unsigned i = 0; i < POLUTING_INPUTS; i++)
    {
        input[i] = new Matrix(TEST_H_SIZE ,TEST_W_SIZE);
        auto touch = [](int i, int j, double &el) -> void { el = ((i+1) * (j + 1)) + ((j + 1) / 5.0); };
        input[i]->touchOperationElementwize(touch);
    }

    SYNC_PRINT(("Profiling     Simple Approach:"));
    start = PreciseTimer::currentTime();
    for (unsigned i = 0; i < LIMIT; i++) {
        Matrix &A = *input[i % POLUTING_INPUTS];
        Matrix B = A * A;
    }
    uint64_t delaySimple = start.usecsToNow();
    SYNC_PRINT(("%8" PRIu64 "us %8" PRIu64 "ms SP: %8" PRIu64 "us\n", delaySimple, delaySimple / 1000, delaySimple / LIMIT));

    for (unsigned i = 0; i < POLUTING_INPUTS; i++) {
        delete_safe(input[i]);
    }
}
Example #2
0
ImageCaptureInterface::FramePair RTSPCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    FramePair result = fcb.dequeue();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    emit newStatisticsReady(stats);

    if (!mIsPaused)
    {        
        frame_data_t frameData;
        frameData.timestamp = fcb.secondFrameTimestamp();
        //SYNC_PRINT(("RTSPCapture::getFrame(): sending notification ts = %d\n", frameData.timestamp));
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("RTSPCapture::getFrame(): Paused\n"));
    }

    return result;
}
Example #3
0
void testRadialInversion(int scale)
{
    RGB24Buffer *image = new RGB24Buffer(250 * scale, 400 * scale);

    auto operation = [](int i, int j, RGBColor *pixel)
    {
        i = i / 100;
        j = j / 200;
        if ( (i % 2) &&  (j % 2))   *pixel = RGBColor::Green();
        if (!(i % 2) &&  (j % 2))   *pixel = RGBColor::Yellow();
        if ( (i % 2) && !(j % 2))   *pixel = RGBColor::Red();
        if (!(i % 2) && !(j % 2))   *pixel = RGBColor::Blue();
    };
    touchOperationElementwize(image, operation);

    LensDistortionModelParameters deformator;
    deformator.setPrincipalX(image->w / 2);
    deformator.setPrincipalY(image->h / 2);

    deformator.setTangentialX(0.000001);
    deformator.setTangentialY(0.000001);

    deformator.setAspect(1.0);
    deformator.setScale(1.0);

    deformator.mKoeff.push_back( 0.0001);
    deformator.mKoeff.push_back(-0.00000002);
    deformator.mKoeff.push_back( 0.00000000000003);

    RadialCorrection T(deformator);
    PreciseTimer timer;

    cout << "Starting deformation... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *deformed = image->doReverseDeformationBlTyped<RadialCorrection>(&T);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    /* */
    cout << "Starting invertion... " << flush;
    RadialCorrection invert = T.invertCorrection(image->h, image->w, 30);
    cout << "done" << endl;

    cout << "Starting backprojection... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *backproject = deformed->doReverseDeformationBlTyped<RadialCorrection>(&invert);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;
    cout << "done" << endl;

    BMPLoader().save("input.bmp"      , image);
    BMPLoader().save("forward.bmp"    , deformed);
    BMPLoader().save("backproject.bmp", backproject);

    delete_safe(image);
    delete_safe(deformed);
    delete_safe(backproject);
}
TEST(SSEWrappers, profileSSEWrapper)
{
#ifdef WITH_SSE
    uint64_t LIMIT = 10000000;

    PreciseTimer start;
    uint64_t delay0, delay1;

    start = PreciseTimer::currentTime();
    __m128i acc0 = _mm_set1_epi32(128);
    for (uint64_t i = 0 ; i < LIMIT; i++ )
    {
        int a = i * 32 / LIMIT;
        __m128i a0 = _mm_set1_epi32(a);
        __m128i b0 = _mm_set1_epi32(i % 2);
        __m128i c0 = _mm_sll_epi32(a0,b0);
        __m128i d0 = _mm_slli_epi32(a0,1);

        acc0 = _mm_add_epi32(acc0,d0);
        acc0 = _mm_add_epi32(acc0,c0);
        acc0 = _mm_sub_epi32(acc0,a0);
    }

    delay0 = start.usecsToNow();
    printf("Dirty Oldstyle :%8" PRIu64 "us \n", delay0);

    /* Same with new style */

    start = PreciseTimer::currentTime();
    Int32x4 acc1(128);
    for (uint64_t i = 0 ; i < LIMIT; i++ )
    {
        int a = i * 32 / LIMIT;
        Int32x4 a1(a);
        Int32x4 b1((uint32_t)i % 2);
        Int32x4 c1 = a1 << b1;
        Int32x4 d1 = a1 << 1;

        acc1 += d1;
        acc1 += c1;
        acc1 -= a1;
    }

    delay1 = start.usecsToNow();
    printf("Cool new style :%8" PRIu64 "us \n", delay1);
    printf("Diff is  :%8" PRIi64 "us \n", delay1 - delay0);

    printf("Results are %s\n",sse32(acc0, 0) ==  sse32(acc1.data, 0) ? "equal" : "different");

    CORE_ASSERT_TRUE(sse32(acc0, 0) == sse32(acc1.data, 0), "Ops... arithmetics flaw");
#endif
}
Example #5
0
UEyeCaptureInterface::FramePair UEyeCaptureInterface::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();
    FramePair result( NULL, NULL);

//    printf("Called getFrame\n");

    protectFrame.lock();
        decodeData(&leftCamera , currentLeft,  &(result.bufferLeft));
        decodeData(&rightCamera, currentRight, &(result.bufferRight));

        result.leftTimeStamp  = currentLeft->usecsTimeStamp();
        result.rightTimeStamp = currentRight->usecsTimeStamp();

        stats.framesSkipped = skippedCount > 0 ? skippedCount - 1 : 0;
        skippedCount = 0;

        stats.triggerSkipped = triggerSkippedCount;
        triggerSkippedCount = 0;

        int64_t internalDesync =  currentLeft->internalTimestamp - currentRight->internalTimestamp;
    protectFrame.unlock();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();
    stats.values[CaptureStatistics::INTERFRAME_DELAY] = frameDelay;

    int64_t desync =  result.leftTimeStamp - result.rightTimeStamp;
    stats.values[CaptureStatistics::DESYNC_TIME] = desync > 0 ? desync : -desync;
    stats.values[CaptureStatistics::INTERNAL_DESYNC_TIME] = internalDesync > 0 ? internalDesync : -internalDesync;

    /* Get temperature data */
    stats.temperature[0] = leftCamera.getTemperature();
    stats.temperature[1] = rightCamera.getTemperature();

    //stats.values[CaptureStatistics::DATA_SIZE] = currentLeft.bytesused;

    emit newStatisticsReady(stats);

//    printf("Finished getFrame\n");
    return result;
}
/* ========================== TRY TO DO NEW POWERFUL CLUSTERING ========================== */
void Clustering3D::_clusterStarting(Statistics& stat)
{
    mClusters.clear();
    std::sort(mCloud->begin(), mCloud->end(), SortSwarmPointTexY());
    Cloud::iterator first, second;
    first  = mCloud->begin();
    second = mCloud->begin();
    for (unsigned i = 0; i < mCloud->size(); i++)
    {
        if ((*first).texCoor.y() != (*second).texCoor.y())
        {
            std::sort(first, second - 1, SortSwarmPointTexX());
            first = second;
        }
        second++;
    }

    PreciseTimer time = PreciseTimer::currentTime();
    this->_clustering(640, 480);
    stat.setTime("_clustering", time.usecsToNow());

    /* colour for clusters */
    CloudCluster::iterator it;
    mCloud->clear();
    int i = 1;
    vector<CloudCluster *>::iterator it2;
    for (it2 = mpClusters.begin(); it2 < mpClusters.end(); it2++)
    {
        for (it = (*it2)->begin(); it < (*it2)->end(); it++)
        {
            (*it).cluster = i;
            mCloud->push_back(*it);
        }
        i++;
    }
}
Example #7
0
V4L2CaptureInterface::FramePair V4L2CaptureInterface::getFrameRGB24()
{
    CaptureStatistics  stats;

    PreciseTimer start = PreciseTimer::currentTime();

    protectFrame.lock();

    FramePair result;

    RGB24Buffer **results[MAX_INPUTS_NUMBER] = {
        &result.buffers[LEFT_FRAME  ].rgbBuffer,
        &result.buffers[RIGHT_FRAME ].rgbBuffer,
        &result.buffers[THIRD_FRAME ].rgbBuffer,
        &result.buffers[FOURTH_FRAME].rgbBuffer
    };

    for (int i = 0; i < MAX_INPUTS_NUMBER; i++)
    {
        decodeDataRGB24(&camera[i],  &currentFrame[i],  results[i]);

        if ((*results[i]) == NULL) {
            printf("V4L2CaptureInterface::getFrameRGB24(): Precrash condition at %d (%s)\n", i, getFrameSourceName((FrameSourceId)i));
        }
    }

    if (result.rgbBufferLeft() != NULL) {
        result.setBufferLeft  ( result.rgbBufferLeft() ->toG12Buffer() ); // FIXME
    }
    if (result.rgbBufferRight() != NULL) {
        result.setBufferRight ( result.rgbBufferRight()->toG12Buffer() );
    }

    if (currentFrame[LEFT_FRAME].isFilled)
        result.setTimeStampLeft  (currentFrame[LEFT_FRAME].usecsTimeStamp());

    if (currentFrame[RIGHT_FRAME].isFilled)
        result.setTimeStampRight (currentFrame[RIGHT_FRAME].usecsTimeStamp());

    if (skippedCount == 0)
    {
   //     SYNC_PRINT(("Warning: Requested same frames twice. Is this by design?\n"));
    }

    stats.framesSkipped = skippedCount > 0 ? skippedCount - 1 : 0;
    skippedCount = 0;
    protectFrame.unlock();
    stats.values[CaptureStatistics::DECODING_TIME]    = start.usecsToNow();
    stats.values[CaptureStatistics::INTERFRAME_DELAY] = frameDelay;

    int64_t desync =  currentFrame[LEFT_FRAME ].usecsTimeStamp() -
                      currentFrame[RIGHT_FRAME].usecsTimeStamp();

    stats.values[CaptureStatistics::DESYNC_TIME] = CORE_ABS(desync);
    stats.values[CaptureStatistics::DATA_SIZE]   = currentFrame[LEFT_FRAME].bytesused;
    if (imageInterfaceReceiver != NULL) {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    } else {
        SYNC_PRINT(("Warning:  V4L2CaptureInterface::getFrameRGB24(): imageInterfaceReceiver is NULL\n"));
    }

    return result;
}
Example #8
0
V4L2CaptureInterface::FramePair V4L2CaptureInterface::getFrame()
{

//    SYNC_PRINT(("V4L2CaptureInterface::getFrame(): called\n"));
    CaptureStatistics  stats;

    PreciseTimer start = PreciseTimer::currentTime();
    FramePair result;

    protectFrame.lock();
    G12Buffer **results[MAX_INPUTS_NUMBER] = {
            &result.buffers[LEFT_FRAME ].g12Buffer,
            &result.buffers[RIGHT_FRAME].g12Buffer,
            &result.buffers[LEFT_FRAME ].g12Buffer,
            &result.buffers[RIGHT_FRAME].g12Buffer,

    };

    result.setRgbBufferRight(NULL);
    result.setRgbBufferLeft (NULL);

    //SYNC_PRINT(("LF:%s RF:%s\n",
    //           currentFrame[Frames::LEFT_FRAME ].isFilled ? "filled" : "empty" ,
    //           currentFrame[Frames::RIGHT_FRAME].isFilled ? "filled" : "empty"));

    for (int i = 0; i < MAX_INPUTS_NUMBER; i++)
    {
        decodeData(&camera[i],  &currentFrame[i],  results[i]);

        if ((*results[i]) == NULL) {
            SYNC_PRINT(("V4L2CaptureInterface::getFrame(): Precrash condition\n"));
        }
    }

    if (currentFrame[LEFT_FRAME].isFilled)
        result.setTimeStampLeft (currentFrame[LEFT_FRAME].usecsTimeStamp());

    if (currentFrame[RIGHT_FRAME].isFilled)
        result.setTimeStampRight   (currentFrame[RIGHT_FRAME].usecsTimeStamp());

    if (skippedCount == 0)
    {
        //SYNC_PRINT(("Warning: Requested same frames twice. Is this by design?\n"));
    }

    stats.framesSkipped = skippedCount > 0 ? skippedCount - 1 : 0;
    skippedCount = 0;
    protectFrame.unlock();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();
    stats.values[CaptureStatistics::INTERFRAME_DELAY] = frameDelay;

    int64_t desync =  currentFrame[LEFT_FRAME].usecsTimeStamp() - currentFrame[RIGHT_FRAME].usecsTimeStamp();
    stats.values[CaptureStatistics::DESYNC_TIME] = desync > 0 ? desync : -desync;
    stats.values[CaptureStatistics::DATA_SIZE] = currentFrame[LEFT_FRAME].bytesused;

    if (imageInterfaceReceiver != NULL) {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    }

    return result;
}
Example #9
0
ALIGN_STACK_SSE void DirectShowCaptureInterface::memberCallback(DSCapDeviceId dev, FrameData data)
{
    //SYNC_PRINT(("Received new frame in a member %d\n", dev));
    mProtectFrame.lock();

    DirectShowCameraDescriptor *camera = NULL;
    if (mCameras[0].deviceHandle == dev) camera = &mCameras[0];
    else
    if (mCameras[1].deviceHandle == dev) camera = &mCameras[1];
    else
        goto exit;

    {
        PreciseTimer timer = PreciseTimer::currentTime();
        camera->gotBuffer = true;
        camera->timestamp = (data.timestamp + 5) / 10;
        delete_safe (camera->buffer);
        delete_safe (camera->buffer24);

        if (data.format.type == CAP_YUV)
        {
            if (mIsRgb) {
                camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, false);
                camera->buffer24->fillWithYUYV((uint8_t *)data.data);
            }
            else {
                camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
                camera->buffer->fillWithYUYV((uint16_t *)data.data);
            }
        }
        else if (data.format.type == CAP_MJPEG)
        {
            MjpegDecoderLazy *lazyDecoder = new MjpegDecoderLazy;   // don't place it at stack, it's too huge!
            if (mIsRgb)
                camera->buffer24 = lazyDecoder->decodeRGB24((uchar *)data.data);
            else
                camera->buffer   = lazyDecoder->decode((uchar *)data.data);
            delete lazyDecoder;
        }
        else if (data.format.type == CAP_RGB)
        {
            if (mIsRgb) {
                camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, true);
                int w = camera->buffer24->w;
                int h = camera->buffer24->h;
                for (int i = 0; i < h; i++) {
                    uint8_t  *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w;
                    RGBColor *rgb24Data = &(camera->buffer24->element(i, 0));
                    for (int j = 0; j < w; j++) {
                        RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]);   // the given data format has B,G,R order
                        *rgb24Data++ = rgb;
                        rgbData += 3;
                    }
                }
            }
            else {
                camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
                int w = camera->buffer->w;
                int h = camera->buffer->h;
                for (int i = 0; i < h; i++) {
                    uint8_t  *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w;
                    uint16_t *greyData = &(camera->buffer->element(i, 0));
                    for (int j = 0; j < w; j++) {
                        RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]);   // the given data format has B,G,R order
                        *greyData++ = rgb.luma12();
                        rgbData += 3;
                    }
                }
            }
        }
        else {
            camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
        }

        camera->decodeTime = timer.usecsToNow();
        /* If both frames are in place */

        if (mCameras[0].gotBuffer && mCameras[1].gotBuffer)
        {
            mCameras[0].gotBuffer = false;
            mCameras[1].gotBuffer = false;

            CaptureStatistics stats;
            int64_t desync = mCameras[0].timestamp - mCameras[1].timestamp;
            stats.values[CaptureStatistics::DESYNC_TIME] = desync > 0 ? desync : -desync;
            stats.values[CaptureStatistics::DECODING_TIME] = mCameras[0].decodeTime + mCameras[1].decodeTime;
            if (lastFrameTime.usecsTo(PreciseTimer()) != 0)
            {
                stats.values[CaptureStatistics::INTERFRAME_DELAY] = lastFrameTime.usecsToNow();
            }
            lastFrameTime = PreciseTimer::currentTime();

            frame_data_t frameData;
            frameData.timestamp = mCameras[0].timestamp / 2 + mCameras[1].timestamp / 2;
            newFrameReady(frameData);
            newStatisticsReady(stats);
        }
        else {
            frame_data_t frameData;
            frameData.timestamp = mCameras[0].timestamp;
            newFrameReady(frameData);
            //newStatisticsReady(stats);
            skippedCount++;
        }
    }
exit:
    mProtectFrame.unlock();
}
void TestbedMainWindow::preprocessImage(void)
{
    if (mImage == NULL) {
        return;
    }

    L_INFO_P("Starting to preprocess");
    PointScene *scene = new PointScene();
    //scene->scene.push_back(PointScene::Point(Vector3dd(1.0, 1.0, 1.0)));
    scene->showBuffer(mImage);
    m3DHist->setNewScenePointer(QSharedPointer<Scene3D>(scene), CloudViewDialog::MAIN_SCENE);

    Vector3dd mean(0.0);

    for (int i = 0; i < mImage->h; i++)
    {
        for (int j = 0; j < mImage->w; j++)
        {
            mean += mImage->element(i,j).toDouble();
        }
    }
    mean /= (mImage->h * mImage->w);

    EllipticalApproximationUnified<Vector3dd> ellip;
    for (int i = 0; i < mImage->h; i++)
    {
        for (int j = 0; j < mImage->w; j++)
        {
            ellip.addPoint(mImage->element(i,j).toDouble() - mean);
        }
    }
    ellip.getEllipseParameters();
    qDebug() << "Size is: "<< ellip.mAxes.size();

    EllApproxScene *sceneEl = new EllApproxScene(mean, ellip);
    m3DHist->setNewScenePointer(QSharedPointer<Scene3D>(sceneEl), CloudViewDialog::ADDITIONAL_SCENE);
    L_INFO_P("Color distribution analyzed");
    L_INFO_P("Preparing HSV presentation");

    delete_safe(mHComp);
    delete_safe(mSComp);
    delete_safe(mVComp);
    mHComp = new G8Buffer(mImage->getSize(), false);
    mSComp = new G8Buffer(mImage->getSize(), false);
    mVComp = new G8Buffer(mImage->getSize(), false);

    for (int i = 0; i < mImage->h; i++)
    {
        for (int j = 0; j < mImage->w; j++)
        {
            RGBColor &color = mImage->element(i,j);
            mHComp->element(i,j) = color.hue() * 255 / 360;
            mSComp->element(i,j) = color.saturation();
            mVComp->element(i,j) = color.value();
        }
    }
    L_INFO_P("Preparing edges");
    G12Buffer *tempBuffer = G8Buffer::toG12Buffer(mVComp);
    delete_safe(mEdges);

    delete_safe(mCannyEdges);
    delete_safe(mEdges);

    CannyParameters *cannyParams = mUi->cannyParametersWidget->createParameters();

    DerivativeBuffer *derivativeBuffer = NULL;
    mCannyEdges = CannyFilter::doFilter(tempBuffer, *cannyParams, &derivativeBuffer);
    mEdges = derivativeBuffer->gradientMagnitudeBuffer(10.0);

    delete_safe(derivativeBuffer);

    delete_safe(cannyParams);
    delete_safe(tempBuffer);


    L_INFO_P("Preparing projected buffer");
    Vector3dd mainDirection = ellip.mAxes[0];
    mainDirection.normalise();
    L_INFO << "Principal component is:" << mainDirection;
    delete_safe(mPrincipal);
    mPrincipal = projectToDirection(mImage, mainDirection);

    Vector3dd secondaryDirection = ellip.mAxes[1];
    secondaryDirection.normalise();
    L_INFO << "Secondary component is:" << secondaryDirection;
    delete_safe(mPrincipal2);
    mPrincipal2 = projectToDirection(mImage, secondaryDirection);

    Vector3dd thirdDirection = ellip.mAxes[2];
    thirdDirection.normalise();
    L_INFO << "Third component is:" << thirdDirection;
    delete_safe(mPrincipal3);
    mPrincipal3 = projectToDirection(mImage, thirdDirection);

    PreciseTimer timer;
    L_INFO_P("Preparing local histogram buffer");
    timer = PreciseTimer::currentTime();

    delete_safe(mHistBuffer);
    mHistBuffer = new AbstractBuffer<LocalHistogram>(mPrincipal->getSize());
    int bound = mUi->histRadiusSpinBox->value();
    for (int i = bound; i < mPrincipal->h - bound; i++)
    {
        for (int j = bound; j < mPrincipal->w - bound; j++)
        {
            for (int dy = -bound; dy <= bound; dy++)
            {
                for (int dx = -bound; dx <= bound; dx++)
                {
                    mHistBuffer->element(i, j).inc(mPrincipal->element(i + dy, j + dx));
                }
            }

        }
    }
    L_INFO_P("  Done in %d", timer.usecsToNow());


    L_INFO_P("Preparing local histogram2D buffer");
    timer = PreciseTimer::currentTime();

    delete_safe(mHist2DBuffer);
    mHist2DBuffer = new Histogram2DBuffer(mPrincipal->getSize());
    bound = mUi->histRadiusSpinBox->value();
    for (int i = bound; i < mPrincipal->h - bound; i++)
    {
        for (int j = bound; j < mPrincipal->w - bound; j++)
        {
            LocalHistogram2D *hist = &mHist2DBuffer->element(i, j);
            hist->isSet = true;
            for (int dy = -bound; dy <= bound; dy++)
            {
                for (int dx = -bound; dx <= bound; dx++)
                {
                    hist->inc(mPrincipal->element(i + dy, j + dx), mPrincipal2->element(i + dy, j + dx));
                }
            }

        }
    }
    L_INFO_P("  Done in %d", timer.usecsToNow());


    updateViewImage();

}
TEST(MatrixProfile, testMulSize3)
{
//    int  sizes    [] = { 1024, 2048, 4096, 16384 };

    int  sizes    [] = { 1000, 2000, 4000, 16000 };

    int  polca    [] = {   10,   20,    5,     1 };
    int  runs     [] = {   10,    5,    2,     2 };

    bool runsimple[] = { true, false, false, false };
    bool runslow  [] = { true, true , false, false };
    bool runour   [] = { true, true ,  true, false };
    bool runfast  [] = { true, true ,  true, false };   // 16K * 16K - skip at all


    printHeader();

    for (size_t testnum = 0; testnum < CORE_COUNT_OF(sizes); testnum++)
    {
        int       TEST_H_SIZE     = sizes[testnum] /* /128*/;
        int       TEST_W_SIZE     = TEST_H_SIZE;
        unsigned  POLUTING_INPUTS = polca[testnum];
        unsigned  LIMIT           = runs[testnum];

        double mem    = 2.0 * sizeof(double) * (double)TEST_H_SIZE * TEST_H_SIZE;
        double flop   = 2.0 * (double)TEST_H_SIZE * TEST_H_SIZE * TEST_H_SIZE;
        double gflop  = flop / 1000000.0 / 1000.0;


        PreciseTimer start;
        Matrix ** input1 = new Matrix*[POLUTING_INPUTS]; // Unfortunately VS2013 does not support C99
        Matrix ** input2 = new Matrix*[POLUTING_INPUTS];
        Matrix AB(1,1);

        for (unsigned i = 0; i < POLUTING_INPUTS; i++)
        {
            input1[i] = new Matrix(TEST_H_SIZE ,TEST_W_SIZE);
            input2[i] = new Matrix(TEST_H_SIZE ,TEST_W_SIZE);

           // auto touch1 = [](int i, int j, double &el) -> void { el = ((i+1) * (j + 1)) + ((j + 1) / 5.0); };
            auto touch1 = [](int i, int j, double &el) -> void
            {
                uint16_t semirand = (uint16_t )(i * 1237657 + j * 235453);
                el = ((double)semirand - 32768) / 65536.0;
            };
            input1[i]->touchOperationElementwize(touch1);

           // auto touch2 = [](int i, int j, double &el) -> void { el = ((i+4) * (j + 1)) + ((i + 1) / 5.0); };
            auto touch2 = [](int i, int j, double &el) -> void
            {
                uint16_t semirand = (uint16_t )(i * 54657 + j * 2517);
                el = ((double)semirand - 32768) / 65536.0;
            };
            input2[i]->touchOperationElementwize(touch2);
        }

        if (runsimple[testnum])
        {
            printName("Simple", TEST_H_SIZE, mem, LIMIT);
            start = PreciseTimer::currentTime();
            for (unsigned i = 0; i < LIMIT; i++) {
                Matrix &A = *input1[i % POLUTING_INPUTS];
                Matrix &B = *input2[i % POLUTING_INPUTS];
                AB = Matrix::multiplyHomebrew(A, B, false, false);
            }
            uint64_t delaySimple = start.usecsToNow();
            printResult(gflop, delaySimple, LIMIT);
        }

        /*if (!AB.isFinite()) {
            SYNC_PRINT(("Matrix is not finite\n"));
        } else {
            SYNC_PRINT(("Matrix is finite - ok\n"));
        }*/


        if (runslow[testnum])
        {
            printName("TBB", TEST_H_SIZE, mem, LIMIT);
            start = PreciseTimer::currentTime();
            for (unsigned i = 0; i < LIMIT; i++) {
                Matrix &A = *input1[i % POLUTING_INPUTS];
                Matrix &B = *input2[i % POLUTING_INPUTS];
                AB = Matrix::multiplyHomebrew(A, B, true, false);
            }
            uint64_t delayTBB = start.usecsToNow();
            printResult(gflop, delayTBB, LIMIT);

#ifdef WITH_SSE
#ifdef WITH_AVX
            printName("AVX/SSE", TEST_H_SIZE, mem, LIMIT);
#else
            printName("---/SSE", TEST_H_SIZE, mem, LIMIT);
#endif
            start = PreciseTimer::currentTime();
            for (unsigned i = 0; i < LIMIT; i++) {
                Matrix &A = *input1[i % POLUTING_INPUTS];
                Matrix &B = *input2[i % POLUTING_INPUTS];
                AB = Matrix::multiplyHomebrew(A, B, false, true);
            }
            uint64_t delayVector = start.usecsToNow();
            printResult(gflop, delayVector, LIMIT);
#endif
        }

        if (runour[testnum])
        {
            printName("All On", TEST_H_SIZE, mem, LIMIT);
            start = PreciseTimer::currentTime();
            for (unsigned i = 0; i < LIMIT; i++) {
                Matrix &A = *input1[i % POLUTING_INPUTS];
                Matrix &B = *input2[i % POLUTING_INPUTS];
                AB = Matrix::multiplyHomebrew(A, B, true, true);
            }
            uint64_t delayHome = start.usecsToNow();
            printResult(gflop, delayHome, LIMIT);

        }

        if (runfast[testnum])
        {
#ifdef WITH_BLAS
            printName("OpenBLAS", TEST_H_SIZE, mem, LIMIT);
            start = PreciseTimer::currentTime();
            for (unsigned i = 0; i < LIMIT; i++) {
                Matrix &A = *input1[i % POLUTING_INPUTS];
                Matrix &B = *input2[i % POLUTING_INPUTS];
                AB = Matrix::multiplyBlas(A, B);
            }
            uint64_t delayBlas = start.usecsToNow();
            printResult(gflop, delayBlas, LIMIT);
#endif // WITH_BLAS
        }

        for (unsigned i = 0; i < POLUTING_INPUTS; i++) {
            delete_safe(input1[i]);
            delete_safe(input2[i]);
        }
        delete[] input1;
        delete[] input2;
    }
}
void Clustering3D::clusterStartRecursive(SortingType sortingType)
{
    mClustersCenter.clear();
    mClustersTexCenter.clear();
    mClustersFlow.clear();
    mClusterSize.clear();
    mCluster6DSize.clear();
    mHeadSize.clear();

    PreciseTimer stat = PreciseTimer::currentTime();
    switch (sortingType)
    {
        case SORT_X:
            std::sort((*mCloud).begin(), (*mCloud).end(), SortSwarmPointX());
            break;
        case SORT_Y:
            std::sort((*mCloud).begin(), (*mCloud).end(), SortSwarmPointY());
            break;
        case SORT_Z:
            std::sort((*mCloud).begin(), (*mCloud).end(), SortSwarmPointZ());
            break;
        case NONE:
            break;
    }
    mSortingTime = stat.usecsTo(PreciseTimer::currentTime());

    for (unsigned i = 0; i < mCloud->size(); i++)
    {
        if ((*mCloud)[i].cluster != 0) {
            continue;
        }

        mIndexes.clear();
        mIndexes.push_back((int)i);
        clusteringRecursive(mDepth, 0, sortingType);
        if (mClusters.back().size() > mSize )
        {
            bool isGood = true;
            int sz = (int)mClustersCenter.size();
            mClusters.back().getStat();
            for (int i = 0; i < sz; i++)
            {
                // check if new cluster not down from previous
                if (fabs(mClusters.back().mClusterInfo.point.z() - mClustersCenter[i].z()) < mHeadArea &&
                    fabs(mClusters.back().mClusterInfo.point.x() - mClustersCenter[i].x()) < mHeadArea )
                    isGood = false;
            }
            if (isGood)
            {
                mClustersTexCenter.push_back(mClusters.back().mClusterInfo.texCoor);
                mClustersCenter.push_back(mClusters.back().mClusterInfo.point);
                mClustersFlow.push_back(mClusters.back().mClusterInfo.speed);
                mClusterSize.push_back((int)mClusters.back().size());
                mCluster6DSize.push_back(mClusters.back().m6Dpoints);
                vector<double> tmpVect = mClusters.back().mEllipse.mValues;
                double forMax = 0;
                for (unsigned i = 0; i < tmpVect.size(); i++)
                {
                    forMax = forMax < tmpVect[i] ? tmpVect[i] : forMax;
                }
                mHeadSize.push_back(forMax);
            }
            if (mClustersCenter.size() == mHeadNumber) break;
        }
    }
    mClusteringTime = stat.usecsTo(PreciseTimer::currentTime());
}
Example #13
0
ImageCaptureInterface::FramePair AviCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    //SYNC_PRINT(("AviCapture::getFrame(): called\n"));
    //mProtectFrame.lock();
        FramePair result(NULL, NULL);
        int res;
        while ( (res = av_read_frame(mFormatContext, &mPacket)) >= 0)
        {
            if (mPacket.stream_index == mVideoStream)
            {
                int frame_finished;
                avcodec_decode_video2(mCodecContext, mFrame, &frame_finished, &mPacket);
                av_free_packet(&mPacket);
                if (frame_finished) {
//                    SYNC_PRINT(("AviCapture::getFrame(): Frame ready\n"));
                    break;
                }
            } else {
                av_free_packet(&mPacket);
            }
        }

        if (res >= 0)
        {            
            if (mFrame->format == AV_PIX_FMT_YUV420P ||
                mFrame->format != AV_PIX_FMT_YUVJ420P)
            {
                result.setRgbBufferLeft(new RGB24Buffer(mFrame->height, mFrame->width));
                result.setBufferLeft   (new G12Buffer  (mFrame->height, mFrame->width));
                for (int i = 0; i < mFrame->height; i++)
                {
                    for (int j = 0; j < mFrame->width; j++)
                    {
                        uint8_t y = (mFrame->data[0])[i * mFrame->linesize[0] + j];

                        uint8_t u = (mFrame->data[1])[(i / 2) * mFrame->linesize[1] + (j / 2)];
                        uint8_t v = (mFrame->data[2])[(i / 2) * mFrame->linesize[2] + (j / 2)];

                        result.rgbBufferLeft()->element(i,j) = RGBColor::FromYUV(y,u,v);
                        result.bufferLeft()   ->element(i,j) = (int)y << 4;
                    }
                }

                result.setRgbBufferRight (new RGB24Buffer(result.rgbBufferLeft()));
                result.setBufferRight    (new G12Buffer(result.bufferLeft()));
             } else if (mFrame->format == AV_PIX_FMT_YUV422P ) {
                SYNC_PRINT(("AviCapture::getFrame(): format AV_PIX_FMT_YUV422P \n"));

                return result;
             } else {
                SYNC_PRINT(("AviCapture::getFrame(): Not supported format %d\n", mFrame->format));
                return result;
             }
        } else {
            SYNC_PRINT(("AviCapture::getFrame(): av_read_frame failed with %d", res));
        }


        result.setTimeStampLeft (count * 10);
        result.setTimeStampRight(count * 10);

    //mProtectFrame.unlock();
    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    if (imageInterfaceReceiver != NULL)
    {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    }

    if (!mIsPaused)
    {
        //SYNC_PRINT(("AviCapture::getFrame(): New notification sending\n"));
        count++;
        frame_data_t frameData;
        frameData.timestamp = (count * 10);
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("AviCapture::getFrame(): Paused\n"));
    }

    //count++;

    return result;
}
Example #14
0
void testRadialApplication(int scale)
{
    cout << "Starting test: testRadialApplication ()" << endl;
    RGB24Buffer *image = new RGB24Buffer(250 * scale, 400 * scale);

    auto operation = [](int i, int j, RGBColor *pixel)
    {
        i = i / 100;
        j = j / 200;
        if ( (i % 2) &&  (j % 2))   *pixel = RGBColor::Green();
        if (!(i % 2) &&  (j % 2))   *pixel = RGBColor::Yellow();
        if ( (i % 2) && !(j % 2))   *pixel = RGBColor::Red();
        if (!(i % 2) && !(j % 2))   *pixel = RGBColor::Blue();
    };
    touchOperationElementwize(image, operation);
    LensDistortionModelParameters deformator;
    deformator.setPrincipalX(image->w / 2);
    deformator.setPrincipalY(image->h / 2);

    deformator.setTangentialX(0.000001);
    deformator.setTangentialY(0.000001);

    deformator.setAspect(1.0);
    deformator.setScale(1.0);

    deformator.mKoeff.push_back( 0.0001);
    deformator.mKoeff.push_back(-0.00000002);
    deformator.mKoeff.push_back( 0.00000000000003);

    RadialCorrection T(deformator);
    PreciseTimer timer;

    /**
     * 1. Compute reverse image
     *
     *  Radial coorection stores transformation from real image to ideal.
     *  However to transform buffer we need to find inverse image of the pixel
     *
     *  We can either compute the inverse with the "analytical method" -
     *  the example is in testRadialInversion()
     *
     *  Or cache the inverse like we are doing here
     *
     **/
    cout << "Starting deformation inversion... " << flush;

    timer = PreciseTimer::currentTime();
    DisplacementBuffer *inverse = DisplacementBuffer::CacheInverse(&T,
        image->h, image->w,
        0.0,0.0,
        (double)image->w, (double)image->h, 0.5
    );
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    cout << "Applying deformation inversion... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *deformed = image->doReverseDeformationBlTyped<DisplacementBuffer>(inverse);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    /**
     * 2. We have 4 ways to invert the transform.
     *   1. Apply T directly
     *   2. Create distortion buffer that will cache the invert
     *
     **/

    /*2.1*/
    cout << "Applying forward deformation... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *corrected21 = deformed->doReverseDeformationBlTyped<RadialCorrection>(&T);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    RGB24Buffer *diff21 = RGB24Buffer::diff(image, corrected21);

    /*2.2*/
    cout << "Preparing forward deformation cache... " << flush;
    timer = PreciseTimer::currentTime();
    DisplacementBuffer *forward = new DisplacementBuffer(&T, image->h, image->w, false);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    cout << "Applying forward deformation cache... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *corrected22 = deformed->doReverseDeformationBlTyped<DisplacementBuffer>(forward);
    cout << "done in: " << timer.usecsToNow()  << "us"  << endl;

    RGB24Buffer *diff22 = RGB24Buffer::diff(image, corrected22);


    BMPLoader().save("input.bmp"                , image);
    BMPLoader().save("forward.bmp"              , deformed);
    BMPLoader().save("backward-direct.bmp"      , corrected21);
    BMPLoader().save("backward-direct-diff.bmp" , diff21);

    BMPLoader().save("backward-cached.bmp"      , corrected22);
    BMPLoader().save("backward-cached-diff.bmp" , diff22);

    delete_safe(image);
    delete_safe(deformed);
    delete_safe(forward);
    delete_safe(inverse);
    delete_safe(diff21);
    delete_safe(diff22);
    delete_safe(corrected21);
    delete_safe(corrected22);

}
Example #15
0
void testRadialInversion(int scale)
{
    RGB24Buffer *image = new RGB24Buffer(250 * scale, 400 * scale);

    auto operation = [](int i, int j, RGBColor *pixel)
    {
        i = i / 100;
        j = j / 200;
        if ( (i % 2) &&  (j % 2))   *pixel = RGBColor::Green();
        if (!(i % 2) &&  (j % 2))   *pixel = RGBColor::Yellow();
        if ( (i % 2) && !(j % 2))   *pixel = RGBColor::Red();
        if (!(i % 2) && !(j % 2))   *pixel = RGBColor::Blue();
    };
    touchOperationElementwize(image, operation);

#if 0
    LensDistortionModelParameters deformator;
    deformator.setPrincipalX(image->w / 2);
    deformator.setPrincipalY(image->h / 2);
    deformator.setNormalizingFocal(deformator.principalPoint().l2Metric());

    deformator.setTangentialX(0.001);
    deformator.setTangentialY(0.001);

    deformator.setAspect(1.0);
    deformator.setScale(1.0);

    deformator.mKoeff.push_back( 0.1);
    deformator.mKoeff.push_back(-0.2);
    deformator.mKoeff.push_back( 0.3);
#else
    LensDistortionModelParameters deformator;
    deformator.setMapForward(false);
    deformator.setPrincipalX(480);
    deformator.setPrincipalY(360);
    deformator.setNormalizingFocal(734.29999999999995);

    deformator.setTangentialX(0.00);
    deformator.setTangentialY(0.00);

    deformator.setShiftX(0.00);
    deformator.setShiftY(0.00);


    deformator.setAspect(1.0);
    deformator.setScale (1.0);

    deformator.mKoeff.clear();
    deformator.mKoeff.push_back( 0);
    deformator.mKoeff.push_back( -0.65545);
    deformator.mKoeff.push_back( 0);
    deformator.mKoeff.push_back( 8.2439);
//    deformator.mKoeff.push_back( 0);
//    deformator.mKoeff.push_back( 8.01);
#endif


    RadialCorrection T(deformator);
    PreciseTimer timer;

    cout << "Initial deformation... " << endl;
    cout << T.mParams << flush;;

    cout << "Starting deformation... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *deformed = image->doReverseDeformationBlTyped<RadialCorrection>(&T);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;

    /* */
    int inversionGridStep = 30;

    cout << "Starting invertion... " << flush;
    RadialCorrection invert = T.invertCorrection(image->h, image->w, inversionGridStep);
    cout << "done" << endl;

    cout << "Starting backprojection... " << flush;
    timer = PreciseTimer::currentTime();
    RGB24Buffer *backproject = deformed->doReverseDeformationBlTyped<RadialCorrection>(&invert);
    cout << "done in: " << timer.usecsToNow() << "us" << endl;
    cout << "done" << endl;


    RGB24Buffer *debug = new RGB24Buffer(image->getSize());
    /* Show visual */
    double dh = (double)image->h / (inversionGridStep - 1);
    double dw = (double)image->w / (inversionGridStep - 1);
    for (int i = 0; i < inversionGridStep; i++)
    {
        for (int j = 0; j < inversionGridStep; j++)
        {
             Vector2dd point(dw * j, dh * i);
             debug->drawCrosshare1(point, RGBColor::Yellow());
             Vector2dd deformed    = T.mapToUndistorted(point); /* this could be cached */
             Vector2dd backproject = invert.mapToUndistorted(deformed);

             debug->drawCrosshare1(backproject, RGBColor::Green());
        }
    }

    BMPLoader().save("input.bmp"      , image);
    BMPLoader().save("debug.bmp"      , debug);
    BMPLoader().save("forward.bmp"    , deformed);
    BMPLoader().save("backproject.bmp", backproject);

    delete_safe(image);
    delete_safe(debug);
    delete_safe(deformed);
    delete_safe(backproject);
}