Пример #1
0
static int transitionGetImage(mlt_frame aFrame, uint8_t **image, mlt_image_format *format, int *width, int *height, int /*writable*/) {
    int error = 0;

    mlt_frame bFrame = mlt_frame_pop_frame(aFrame);
    mlt_transition transition = (mlt_transition)mlt_frame_pop_service(aFrame);

    mlt_position position = mlt_transition_get_position(transition, aFrame);
    mlt_position length = mlt_transition_get_length(transition);

    // Get the aFrame image, we will write our output to it
    *format = mlt_image_rgb24;
    if ((error = mlt_frame_get_image(aFrame, image, format, width, height, 1)) != 0)
        return error;
    // Get the bFrame image, we won't write to it
    uint8_t *bImage = NULL;
    int bWidth = 0, bHeight = 0;
    if ((error = mlt_frame_get_image(bFrame, &bImage, format, &bWidth, &bHeight, 0)) != 0)
        return error;

    { // Scope the lock
        MLTWebVfx::ServiceLocker locker(MLT_TRANSITION_SERVICE(transition));
        if (!locker.initialize(*width, *height))
            return 1;

        MLTWebVfx::ServiceManager* manager = locker.getManager();
        WebVfx::Image renderedImage(*image, *width, *height,
                                    *width * *height * WebVfx::Image::BytesPerPixel);
        manager->setImageForName(manager->getSourceImageName(), &renderedImage);
        WebVfx::Image targetImage(bImage, bWidth, bHeight,
                                  bWidth * bHeight * WebVfx::Image::BytesPerPixel);
        manager->setImageForName(manager->getTargetImageName(), &targetImage);
        manager->render(&renderedImage, position, length);
    }

    return error;
}
Пример #2
0
int main(int argc, char * argv[])
{
	if(argc < 2)
	{
		showUsage();
	}

	bool inv = false;
	for(int i=1; i<argc-1; ++i)
	{
		if(strcmp(argv[i], "-inv") == 0)
		{
			inv = true;
			printf(" Inversing option activated...\n");
			continue;
		}
		if(argc > 3)
		{
			showUsage();
			printf(" Not recognized option: \"%s\"\n", argv[i]);
		}
	}

	std::string path, pathRight;

	if(argc == 3 && !inv)
	{
		//two paths
		path = argv[1];
		pathRight = argv[2];

		printf(" Path left = %s\n", path.c_str());
		printf(" Path right = %s\n", pathRight.c_str());
	}
	else
	{
		path = argv[argc-1];
		printf(" Path = %s\n", path.c_str());
	}

	UDirectory dir(path, "jpg bmp png tiff jpeg");
	UDirectory dirRight(pathRight, "jpg bmp png tiff jpeg");
	if(!dir.isValid() || (!pathRight.empty() && !dirRight.isValid()))
	{
		printf("Path invalid!\n");
		exit(-1);
	}

	std::string targetDirectory = path+"_joined";
	UDirectory::makeDir(targetDirectory);
	printf(" Creating directory \"%s\"\n", targetDirectory.c_str());


	std::string fileNameA = dir.getNextFilePath();
	std::string fileNameB;
	if(dirRight.isValid())
	{
		fileNameB = dirRight.getNextFilePath();
	}
	else
	{
		fileNameB = dir.getNextFilePath();
	}

	int i=1;
	while(!fileNameA.empty() && !fileNameB.empty())
	{
		if(inv)
		{
			std::string tmp = fileNameA;
			fileNameA = fileNameB;
			fileNameB = tmp;
		}

		std::string ext = UFile::getExtension(fileNameA);

		std::string targetFilePath = targetDirectory+UDirectory::separator()+uNumber2Str(i++)+"."+ext;

		cv::Mat imageA = cv::imread(fileNameA.c_str());
		cv::Mat imageB = cv::imread(fileNameB.c_str());

		fileNameA.clear();
		fileNameB.clear();

		if(!imageA.empty() && !imageB.empty())
		{
			cv::Size sizeA = imageA.size();
			cv::Size sizeB = imageB.size();
			cv::Size targetSize(0,0);
			targetSize.width = sizeA.width + sizeB.width;
			targetSize.height = sizeA.height > sizeB.height ? sizeA.height : sizeB.height;
			cv::Mat targetImage(targetSize, imageA.type());

			cv::Mat roiA(targetImage, cv::Rect( 0, 0, sizeA.width, sizeA.height ));
			imageA.copyTo(roiA);
			cv::Mat roiB( targetImage, cvRect( sizeA.width, 0, sizeB.width, sizeB.height ) );
			imageB.copyTo(roiB);

			if(!cv::imwrite(targetFilePath.c_str(), targetImage))
			{
				printf("Error : saving to \"%s\" goes wrong...\n", targetFilePath.c_str());
			}
			else
			{
				printf("Saved \"%s\" \n", targetFilePath.c_str());
			}

			fileNameA = dir.getNextFilePath();
			if(dirRight.isValid())
			{
				fileNameB = dirRight.getNextFilePath();
			}
			else
			{
				fileNameB = dir.getNextFilePath();
			}
		}
		else
		{
			printf("Error: loading images failed!\n");
		}
	}
	printf("%d files processed\n", i-1);

	return 0;
}
Пример #3
0
int main(int argc, char * argv[])
{
	ULogger::setType(ULogger::kTypeConsole);
	ULogger::setLevel(ULogger::kInfo);
	//ULogger::setPrintTime(false);
	//ULogger::setPrintWhere(false);

	int driver = 0;
	std::string stereoSavePath;
	float rate = 0.0f;
	std::string fourcc = "MJPG";
	if(argc < 2)
	{
		showUsage();
	}
	else
	{
		for(int i=1; i<argc; ++i)
		{
			if(strcmp(argv[i], "-rate") == 0)
			{
				++i;
				if(i < argc)
				{
					rate = uStr2Float(argv[i]);
					if(rate < 0.0f)
					{
						showUsage();
					}
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "-save_stereo") == 0)
			{
				++i;
				if(i < argc)
				{
					stereoSavePath = argv[i];
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "-fourcc") == 0)
			{
				++i;
				if(i < argc)
				{
					fourcc = argv[i];
					if(fourcc.size() != 4)
					{
						UERROR("fourcc should be 4 characters.");
						showUsage();
					}
				}
				else
				{
					showUsage();
				}
				continue;
			}
			if(strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0)
			{
				showUsage();
			}
			else if(i< argc-1)
			{
				printf("Unrecognized option \"%s\"", argv[i]);
				showUsage();
			}

			// last
			driver = atoi(argv[i]);
			if(driver < 0 || driver > 8)
			{
				UERROR("driver should be between 0 and 8.");
				showUsage();
			}
		}
	}
	UINFO("Using driver %d", driver);

	rtabmap::Camera * camera = 0;
	if(driver < 6)
	{
		if(!stereoSavePath.empty())
		{
			UWARN("-save_stereo option cannot be used with RGB-D drivers.");
			stereoSavePath.clear();
		}

		if(driver == 0)
		{
			camera = new rtabmap::CameraOpenni();
		}
		else if(driver == 1)
		{
			if(!rtabmap::CameraOpenNI2::available())
			{
				UERROR("Not built with OpenNI2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNI2();
		}
		else if(driver == 2)
		{
			if(!rtabmap::CameraFreenect::available())
			{
				UERROR("Not built with Freenect support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect();
		}
		else if(driver == 3)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(false);
		}
		else if(driver == 4)
		{
			if(!rtabmap::CameraOpenNICV::available())
			{
				UERROR("Not built with OpenNI from OpenCV support...");
				exit(-1);
			}
			camera = new rtabmap::CameraOpenNICV(true);
		}
		else if(driver == 5)
		{
			if(!rtabmap::CameraFreenect2::available())
			{
				UERROR("Not built with Freenect2 support...");
				exit(-1);
			}
			camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeColor2DepthSD);
		}
	}
	else if(driver == 6)
	{
		if(!rtabmap::CameraStereoDC1394::available())
		{
			UERROR("Not built with DC1394 support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoDC1394();
	}
	else if(driver == 7)
	{
		if(!rtabmap::CameraStereoFlyCapture2::available())
		{
			UERROR("Not built with FlyCapture2/Triclops support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoFlyCapture2();
	}
	else if(driver == 8)
	{
		if(!rtabmap::CameraStereoZed::available())
		{
			UERROR("Not built with ZED sdk support...");
			exit(-1);
		}
		camera = new rtabmap::CameraStereoZed(0);
	}
	else
	{
		UFATAL("");
	}

	if(!camera->init())
	{
		printf("Camera init failed! Please select another driver (see \"--help\").\n");
		delete camera;
		exit(1);
	}

	rtabmap::SensorData data = camera->takeImage();
	if(data.imageRaw().cols != data.depthOrRightRaw().cols || data.imageRaw().rows != data.depthOrRightRaw().rows)
	{
		UWARN("RGB (%d/%d) and depth (%d/%d) frames are not the same size! The registered cloud cannot be shown.",
				data.imageRaw().cols, data.imageRaw().rows, data.depthOrRightRaw().cols, data.depthOrRightRaw().rows);
	}
	pcl::visualization::CloudViewer * viewer = 0;
	if(!data.stereoCameraModel().isValidForProjection() && (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UWARN("Camera not calibrated! The registered cloud cannot be shown.");
	}
	else
	{
		viewer = new pcl::visualization::CloudViewer("cloud");
	}
	rtabmap::Transform t(1, 0, 0, 0,
						 0, -1, 0, 0,
						 0, 0, -1, 0);

	cv::VideoWriter videoWriter;
	UDirectory dir;
	if(!stereoSavePath.empty() &&
	   !data.imageRaw().empty() &&
	   !data.rightRaw().empty())
	{
		if(UFile::getExtension(stereoSavePath).compare("avi") == 0)
		{
			if(data.imageRaw().size() == data.rightRaw().size())
			{
				if(rate <= 0)
				{
					UERROR("You should set the input rate when saving stereo images to a video file.");
					showUsage();
				}
				cv::Size targetSize = data.imageRaw().size();
				targetSize.width *= 2;
				UASSERT(fourcc.size() == 4);
				videoWriter.open(
						stereoSavePath,
						CV_FOURCC(fourcc.at(0), fourcc.at(1), fourcc.at(2), fourcc.at(3)),
						rate,
						targetSize,
						data.imageRaw().channels() == 3);
			}
			else
			{
				UERROR("Images not the same size, cannot save stereo images to the video file.");
			}
		}
		else if(UDirectory::exists(stereoSavePath))
		{
			UDirectory::makeDir(stereoSavePath+"/"+"left");
			UDirectory::makeDir(stereoSavePath+"/"+"right");
		}
		else
		{
			UERROR("Directory \"%s\" doesn't exist.", stereoSavePath.c_str());
			stereoSavePath.clear();
		}
	}

	// to catch the ctrl-c
	signal(SIGABRT, &sighandler);
	signal(SIGTERM, &sighandler);
	signal(SIGINT, &sighandler);

	int id=1;
	while(!data.imageRaw().empty() && (viewer==0 || !viewer->wasStopped()) && running)
	{
		cv::Mat rgb = data.imageRaw();
		if(!data.depthRaw().empty() && (data.depthRaw().type() == CV_16UC1 || data.depthRaw().type() == CV_32FC1))
		{
			// depth
			cv::Mat depth = data.depthRaw();
			if(depth.type() == CV_32FC1)
			{
				depth = rtabmap::util2d::cvtDepthFromFloat(depth);
			}

			if(rgb.cols == depth.cols && rgb.rows == depth.rows &&
					data.cameraModels().size() &&
					data.cameraModels()[0].isValidForProjection())
			{
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromDepthRGB(
						rgb, depth,
						data.cameraModels()[0]);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				if(viewer)
					viewer->showCloud(cloud, "cloud");
			}
			else if(!depth.empty() &&
					data.cameraModels().size() &&
					data.cameraModels()[0].isValidForProjection())
			{
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::cloudFromDepth(
						depth,
						data.cameraModels()[0]);
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				viewer->showCloud(cloud, "cloud");
			}

			cv::Mat tmp;
			unsigned short min=0, max = 2048;
			uMinMax((unsigned short*)depth.data, depth.rows*depth.cols, min, max);
			depth.convertTo(tmp, CV_8UC1, 255.0/max);

			cv::imshow("Video", rgb); // show frame
			cv::imshow("Depth", tmp);
		}
		else if(!data.rightRaw().empty())
		{
			// stereo
			cv::Mat right = data.rightRaw();
			cv::imshow("Left", rgb); // show frame
			cv::imshow("Right", right);

			if(rgb.cols == right.cols && rgb.rows == right.rows && data.stereoCameraModel().isValidForProjection())
			{
				if(right.channels() == 3)
				{
					cv::cvtColor(right, right, CV_BGR2GRAY);
				}
				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromStereoImages(
						rgb, right,
						data.stereoCameraModel());
				cloud = rtabmap::util3d::transformPointCloud(cloud, t);
				if(viewer)
					viewer->showCloud(cloud, "cloud");
			}
		}

		int c = cv::waitKey(10); // wait 10 ms or for key stroke
		if(c == 27)
			break; // if ESC, break and quit

		if(videoWriter.isOpened())
		{
			cv::Mat left = data.imageRaw();
			cv::Mat right = data.rightRaw();
			if(left.size() == right.size())
			{
				cv::Size targetSize = left.size();
				targetSize.width *= 2;
				cv::Mat targetImage(targetSize, left.type());
				if(right.type() != left.type())
				{
					cv::Mat tmp;
					cv::cvtColor(right, tmp, left.channels()==3?CV_GRAY2BGR:CV_BGR2GRAY);
					right = tmp;
				}
				UASSERT(left.type() == right.type());

				cv::Mat roiA(targetImage, cv::Rect( 0, 0, left.size().width, left.size().height ));
				left.copyTo(roiA);
				cv::Mat roiB( targetImage, cvRect( left.size().width, 0, left.size().width, left.size().height ) );
				right.copyTo(roiB);

				videoWriter.write(targetImage);
				printf("Saved frame %d to \"%s\"\n", id, stereoSavePath.c_str());
			}
			else
			{
				UERROR("Left and right images are not the same size!?");
			}
		}
		else if(!stereoSavePath.empty())
		{
			cv::imwrite(stereoSavePath+"/"+"left/"+uNumber2Str(id) + ".jpg", data.imageRaw());
			cv::imwrite(stereoSavePath+"/"+"right/"+uNumber2Str(id) + ".jpg", data.rightRaw());
			printf("Saved frames %d to \"%s/left\" and \"%s/right\" directories\n", id, stereoSavePath.c_str(), stereoSavePath.c_str());
		}
		++id;
		data = camera->takeImage();
	}
	printf("Closing...\n");
	if(viewer)
	{
		delete viewer;
	}
	cv::destroyWindow("Video");
	cv::destroyWindow("Depth");
	delete camera;
	return 0;
}