int svlOSGImage::Process( svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput ){ syncOutput = syncInput; _SkipIfAlreadyProcessed( syncInput, syncOutput ); _SkipIfDisabled(); svlSampleImage* img = dynamic_cast<svlSampleImage*>(syncInput); unsigned int videochannels = img->GetVideoChannels(); // Set the transformation osg::ref_ptr<osg::Image> osgimg = new osg::Image; osgimg->setImage( img->GetWidth( 0 ), img->GetHeight( 0 ), 1, 3, GL_BGR, GL_UNSIGNED_BYTE, img->GetUCharPointer( 0 ), osg::Image::NO_DELETE ); image->SetImage( osgimg ); return SVL_OK; }
int svlFilterImageBlobTracker::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfAlreadyProcessed(syncInput, syncOutput); _SkipIfDisabled(); svlSampleBlobs *in_blobs = 0; _OnSingleThread(procInfo) { // Attempting to pull a sample out of the 'blobs' input svlFilterInput *input = GetInput("blobs"); if (input) in_blobs = dynamic_cast<svlSampleBlobs*>(input->PullSample(true, 0.0)); if (in_blobs) { // TO DO: implement tracking } } _SynchronizeThreads(procInfo); _OnSingleThread(procInfo) { if (in_blobs) GetOutput("blobs")->PushSample(in_blobs); } return SVL_OK; }
int svlFilterImageColorSegmentation::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = OutputImage; _SkipIfAlreadyProcessed(syncInput, syncOutput); _SkipIfDisabled(); svlSampleImage* img = dynamic_cast<svlSampleImage*>(syncInput); unsigned int videochannels = img->GetVideoChannels(); unsigned int idx; _ParallelLoop(procInfo, idx, videochannels) { ComputeSegmentation(img, idx); }
int svlFilterImageExposureCorrection::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfAlreadyProcessed(syncInput, syncOutput); _SkipIfDisabled(); svlSampleImage* img = dynamic_cast<svlSampleImage*>(syncInput); const unsigned int videochannels = img->GetVideoChannels(); unsigned int vch; _ParallelLoop(procInfo, vch, videochannels) { svlImageProcessing::SetExposure(img, vch, Brightness, Contrast, Gamma, Exposure[vch]); }
int svlFilterSplitter::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfDisabled(); _OnSingleThread(procInfo) { // Non-trunk outputs copy the input sample into their buffers const unsigned int size = static_cast<unsigned int>(AsyncOutputs.size()); for (unsigned int i = 0; i < size; i ++) { if (AsyncOutputs[i]) AsyncOutputs[i]->PushSample(syncInput); } } return SVL_OK; }
int svlFilterImageWindow::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfDisabled(); svlSampleImage* img = dynamic_cast<svlSampleImage*>(syncInput); unsigned int videochannels = img->GetVideoChannels(); unsigned int idx; _OnSingleThread(procInfo) { WindowManager->LockBuffers(); } _SynchronizeThreads(procInfo); _ParallelLoop(procInfo, idx, videochannels) { WindowManager->SetImageBuffer(img->GetUCharPointer(idx), img->GetDataSize(idx), idx); }
int svlFilterVideoFileWriter::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfDisabled(); _OnSingleThread(procInfo) { ErrorInProcess = false; if (Action) { CaptureLength = TargetCaptureLength; ActionTime = TargetActionTime; Action = false; } CS.Enter(); } _SynchronizeThreads(procInfo); if (CaptureLength == 0) { if (ActionTime < syncInput->GetTimestamp()) { IsRecording = false; _OnSingleThread(procInfo) CS.Leave(); return SVL_OK; } // Process remaining samples in the buffer when paused } else { // Drop frames when restarted if (ActionTime > syncInput->GetTimestamp()) { _OnSingleThread(procInfo) CS.Leave(); return SVL_OK; } } IsRecording = true; svlSampleImage* img = dynamic_cast<svlSampleImage*>(syncOutput); const unsigned int videochannels = img->GetVideoChannels(); unsigned int idx; if (CodecsMultithreaded) { // Codecs are multithreaded, so it's worth // splitting work between all threads for (idx = 0; idx < videochannels; idx ++) { // Codec is responsible for thread synchronzation if (Codec[idx] && Codec[idx]->Write(procInfo, *img, idx) != SVL_OK) { CMN_LOG_CLASS_INIT_ERROR << "Process: failed to write video frame on channel: " << idx << ", in thread: " << procInfo->ID << std::endl; ErrorOnChannel[idx] = true; ErrorInProcess = true; } } } else { // Codecs are not multithreaded, so assigning // each video channel to a single thread _ParallelLoop(procInfo, idx, videochannels) { if (Codec[idx] && Codec[idx]->Write(0, *img, idx) != SVL_OK) { CMN_LOG_CLASS_INIT_ERROR << "Process: failed to write video frame on channel: " << idx << std::endl; ErrorOnChannel[idx] = true; ErrorInProcess = true; } } } _SynchronizeThreads(procInfo); _OnSingleThread(procInfo) { for (idx = 0; idx < videochannels; idx ++) { if (Codec[idx]) { FramesWritten[idx] ++; if (TimestampsFile[idx]) { double time = syncInput->GetTimestamp(); if (FramesWritten[idx] > 1) { time -= FirstTimestamp[idx]; } else { FirstTimestamp[idx] = time; } std::stringstream ts; ts << (FramesWritten[idx] - 1) << " " << std::fixed << std::setprecision(4) << time << "\r\n"; long long int len = ts.str().length(); if (TimestampsFile[idx]->Write(ts.str().c_str(), len) < len) { CMN_LOG_CLASS_INIT_WARNING << "Process: failed to write timestamp on channel: " << idx << std::endl; } } } } CS.Leave(); if (ErrorInProcess) { for (idx = 0; idx < videochannels; idx ++) { if (ErrorOnChannel[idx]) { CMN_LOG_CLASS_INIT_ERROR << "Process: attempting to close video file on channel: " << idx << std::endl; CloseFile(idx); } } IsRecording = false; } if (CaptureLength > 0) { IsRecording = true; CaptureLength --; } } return SVL_OK; }
int svlFilterImageCenterFinder::Process(svlProcInfo* procInfo, svlSample* syncInput, svlSample* &syncOutput) { syncOutput = syncInput; _SkipIfAlreadyProcessed(syncInput, syncOutput); _SkipIfDisabled(); svlSampleImage* image = dynamic_cast<svlSampleImage*>(syncInput); const unsigned int videochannels = image->GetVideoChannels(); unsigned int a, i, j, x, y, rx, ry, width, height, vch, hsum, vsum, pix, stride; unsigned int *h, *h2, *v, *v2; unsigned char *img; const unsigned int thrsh = ThresholdLevel * 3; _ParallelLoop(procInfo, vch, videochannels) { width = image->GetWidth(vch); height = image->GetHeight(vch); stride = width * 3 - 2; img = image->GetUCharPointer(vch); v = ProjectionV[vch].Pointer(); h = ProjectionH[vch].Pointer(); memset(v, 0, sizeof(unsigned int) * width); // Generate projections for (j = 0; j < height; j ++) { v2 = v; hsum = 0; for (i = 0; i < width; i ++) { pix = *img; img ++; pix += *img; img ++; pix += *img; img ++; if (pix >= thrsh) { hsum += 255; *v2 += 255; } v2 ++; } *h = hsum / width; h ++; } for (i = 0; i < width; i ++) { *v /= height; v ++; } // Find center of weight v = ProjectionV[vch].Pointer(); h = ProjectionH[vch].Pointer(); x = vsum = 0; for (i = 0; i < width; i ++) { a = *v; v ++; vsum += a; x += a * i; } if (vsum != 0) x /= vsum; else x = CenterXInternal[vch]; y = hsum = 0; for (j = 0; j < height; j ++) { a = *h; h ++; hsum += a; y += a * j; } if (hsum != 0) y /= hsum; else y = CenterYInternal[vch]; // Find radii v = v2 = ProjectionV[vch].Pointer() + x; h = h2 = ProjectionH[vch].Pointer() + y; // Set threshold to 60% of the whole mass hsum = hsum * MassRatio / 100; vsum = vsum * MassRatio / 100; a = rx = 0; while (a < vsum) { a += *v + *v2; v ++; v2 --; rx ++; } a = ry = 0; while (a < hsum) { a += *h + *h2; h ++; h2 --; ry ++; } // Smoothing results if (FrameCounter > 0 && Smoothing > 0.0) { x = static_cast<int>((Smoothing * CenterXInternal[vch] + x ) / (1.0 + Smoothing)); y = static_cast<int>((Smoothing * CenterYInternal[vch] + y ) / (1.0 + Smoothing)); rx = static_cast<int>((Smoothing * RadiusXInternal[vch] + rx) / (1.0 + Smoothing)); ry = static_cast<int>((Smoothing * RadiusYInternal[vch] + ry) / (1.0 + Smoothing)); } // Storing results CenterXInternal[vch] = x; CenterYInternal[vch] = y; RadiusXInternal[vch] = rx; RadiusYInternal[vch] = ry; if (EllipseFittingEnabled) { if (FindEllipse(image, vch, x, y, Ellipse[vch])) { // Adjust with margin Ellipse[vch].rx = std::max(0, Ellipse[vch].rx - EllipseMargin); Ellipse[vch].ry = std::max(0, Ellipse[vch].ry - EllipseMargin); svlRect bounding; Ellipse[vch].GetBoundingRect(bounding); CenterXInternal[vch] = Ellipse[vch].cx; CenterYInternal[vch] = Ellipse[vch].cy; RadiusXInternal[vch] = (bounding.right - bounding.left) / 2; RadiusYInternal[vch] = (bounding.bottom - bounding.top) / 2; if (EllipseFittingDrawEllipse) svlDraw::Ellipse(image, vch, Ellipse[vch], svlRGB(255, 255, 255)); if (EllipseMaskEnabled) UpdateMaskImage(vch, Ellipse[vch]); } } }