예제 #1
0
	ImageDataWriteThread::DataCache ImageDataWriteThread::getData4Write()
	{
		DataCache cache;
		m_writeMutex.lock();
		if(m_data4Write.empty())
		{
			if(m_totalCaches.size() < MAX_DATA_CACHE_LEN)
			{
				assert(bufferSize != 0); // bufferSize must be set!

				//增加申请内存失败判断
				cache.buffer = new (std::nothrow) unsigned char[m_bufferSize];

				if(cache.buffer == nullptr)
				{
					CGE_LOG_ERROR("Fatal Error: 内存不足, 申请内存失败!");
				}

				m_totalCaches.push_back(cache);
				CGE_LOG_INFO("Cache grow: %d", (int)m_totalCaches.size());
			}
			else
			{
				cache.buffer = nullptr;
				CGE_LOG_INFO("write data hungry, cache size: %d", (int)m_totalCaches.size());
			}
		}
		else
		{
			cache = m_data4Write.front();
			m_data4Write.pop();
		}
		m_writeMutex.unlock();
		return cache;
	}
예제 #2
0
	CGEFrameRecorder::~CGEFrameRecorder()
	{
		CGE_LOG_INFO("CGEFrameRecorder::~CGEFrameRecorder");


		endRecording(false);

		if(m_recordThread != nullptr)
		{
			CGE_LOG_INFO("m_recordThread kill before...");

			bool bShoudWait = true;

			m_recordThread->run(CGEThreadPool::Work([&](void*){
				CGE_LOG_INFO("Delete offscreen context...");
				delete m_offscreenContext;
				m_offscreenContext = nullptr;
				bShoudWait = false;
			}));

			while(bShoudWait || m_recordThread->isActive())
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(1));
			}

			CGE_LOG_INFO("m_recordThread kill after...");

			m_recordThread->quit();

			delete m_recordThread;
			m_recordThread = nullptr;
		}
	}
예제 #3
0
	bool CGEFrameRecorder::startRecording(int fps, const char* filename, int bitRate)
	{
		delete m_encoder;
		m_encoder = new CGEVideoEncoderMP4();
		m_encoder->setRecordDataFormat(CGEVideoEncoderMP4::FMT_RGBA8888);
		if(!m_encoder->init(filename, fps, m_dstSize.width, m_dstSize.height, true, bitRate))
		{
			delete m_encoder;
			m_encoder = nullptr;
			CGE_LOG_ERROR("CGEFrameRecorder::startRecording - start recording failed!");
			return false;
		}

		CGE_LOG_INFO("encoder created!");

		if(m_offscreenContext == nullptr || m_recordThread == nullptr)
			_createOffscreenContext(); //offscreen context 将从另一个线程创建 

		int bufferLen = m_dstSize.width * m_dstSize.height * 4;

		m_recordImageThread = new ImageDataWriteThread();

		m_recordImageThread->setBufferAllocSize(bufferLen);

		m_recordImageThread->setTask([&](const ImageDataWriteThread::DataCache& data) {

			// auto tm = getCurrentTimeMillis();

			CGEVideoEncoderMP4::ImageData imageData;
			imageData.width = m_dstSize.width;
			imageData.height = m_dstSize.height;
			imageData.linesize[0] = m_dstSize.width * 4;
			imageData.data[0] = data.buffer;
			imageData.pts = data.pts;

			// CGE_LOG_ERROR("PTS: %d", (int)data.pts);

			if(!m_encoder->record(imageData))
			{
				CGE_LOG_ERROR("record frame failed!");
			}

			// CGE_LOG_ERROR("pts sequence: %d, time: %g", (int)data.pts, getCurrentTimeMillis() - tm);
		});

		m_recordFPS = fps;
		m_currentPTS = -1;
		m_isRecording = true;
		m_isRecordingPaused = false;
		m_recordingTimestamp = 0.0;

		CGE_LOG_INFO("CGEFrameRecorder::startRecording...");

		return true;
	}
예제 #4
0
	void CGEFrameRecorder::_createOffscreenContext()
	{
		EGLContext sharedContext = eglGetCurrentContext();

		if(sharedContext == EGL_NO_CONTEXT)
		{
			CGE_LOG_ERROR("Context creation must be in the GL thread!");
			return;
		}

		if(m_recordThread == nullptr)
			m_recordThread = new CGEThreadPool();

		m_recordThread->run(CGEThreadPool::Work([&](void*){

			delete m_offscreenContext;
			m_offscreenContext = CGESharedGLContext::create(sharedContext, m_dstSize.width, m_dstSize.height, CGESharedGLContext::RECORDABLE_ANDROID);
			if(m_offscreenContext == nullptr)
			{
				CGE_LOG_ERROR("CGESharedGLContext : RECORDABLE_ANDROID is not supported!");
				m_offscreenContext = CGESharedGLContext::create(sharedContext, m_dstSize.width, m_dstSize.height, CGESharedGLContext::PBUFFER);
				if(m_offscreenContext == nullptr)
					CGE_LOG_ERROR("Fatal Error: Create Context Failed!");
			}

			if(m_offscreenContext != nullptr)
			{
				glViewport(0, 0, m_dstSize.width, m_dstSize.height);
				CGE_LOG_INFO("Info from offscreen context thread (begin)....");
				cgePrintGLInfo();
				CGE_LOG_INFO("Info from offscreen context thread (end)....");
			}
			else
			{
				CGE_LOG_ERROR("创建 OpenGL 子线程失败, 当前设备在录制视频时可能性能较差!");
			}
		}));

		while(m_recordThread->isActive())
			std::this_thread::sleep_for(std::chrono::milliseconds(1));

		//创建shared context失败, 将不使用OpenGL子线程
		if(m_offscreenContext == nullptr)
		{
			m_recordThread->quit();
			m_recordThread = nullptr;
		}
	}
    JNIEXPORT jboolean JNICALL Java_org_wysaid_nativePort_CGEFFmpegNativeLibrary_nativeGenerateVideoWithFilter(JNIEnv *env, jclass cls, jstring outputFilename, jstring inputFilename, jstring filterConfig, jfloat filterIntensity, jobject blendImage, jint blendMode, jfloat blendIntensity, jboolean mute)
    {
        CGE_LOG_INFO("##### nativeGenerateVideoWithFilter!!!");
        
        if(outputFilename == nullptr || inputFilename == nullptr)
            return false;
        
        CGESharedGLContext* glContext = CGESharedGLContext::create(2048, 2048); //Max video resolution size of 2k.
        
        if(glContext == nullptr)
        {
            CGE_LOG_ERROR("Create GL Context Failed!");
            return false;
        }
        
        glContext->makecurrent();
        
        CGETextureResult texResult = {0};
        
        jclass nativeLibraryClass = env->FindClass("org/wysaid/nativePort/CGENativeLibrary");
        
        if(blendImage != nullptr)
            texResult = cgeLoadTexFromBitmap_JNI(env, nativeLibraryClass, blendImage);
        
        const char* outFilenameStr = env->GetStringUTFChars(outputFilename, 0);
        const char* inFilenameStr = env->GetStringUTFChars(inputFilename, 0);
        const char* configStr = filterConfig == nullptr ? nullptr : env->GetStringUTFChars(filterConfig, 0);

        CGETexLoadArg texLoadArg;
        texLoadArg.env = env;
        texLoadArg.cls = env->FindClass("org/wysaid/nativePort/CGENativeLibrary");
        
        bool retStatus = CGE::cgeGenerateVideoWithFilter(outFilenameStr, inFilenameStr, configStr, filterIntensity, texResult.texID, (CGETextureBlendMode)blendMode, blendIntensity, mute, &texLoadArg);
        
        env->ReleaseStringUTFChars(outputFilename, outFilenameStr);
        env->ReleaseStringUTFChars(inputFilename, inFilenameStr);
        
        if(configStr != nullptr)
            env->ReleaseStringUTFChars(filterConfig, configStr);
        
        CGE_LOG_INFO("generate over!\n");
        
        delete glContext;
        
        return retStatus;
    }
예제 #6
0
	bool CGEFrameRecorder::endRecording(bool shouldSave)
	{
		m_isRecording = false;

		CGE_LOG_INFO("Waiting for the recording threads...");

		//等待录制结束
		while((m_recordThread != nullptr && m_recordThread->isActive()) || (m_recordImageThread != nullptr && m_recordImageThread->isActive())) 
			std::this_thread::sleep_for(std::chrono::milliseconds(1));

		delete m_recordImageThread;
		m_recordImageThread = nullptr;

		CGE_LOG_INFO("threads sync.");

		if(m_encoder == nullptr)
			return false;

		bool ret = true;
		CGE_LOG_INFO("CGEFrameRecorder::endRecording...");
		
		if(shouldSave)
		{
			ret = m_encoder->save();
		}
		else
		{
			m_encoder->drop();
		}

		CGE_LOG_INFO("delete encoder...");

		delete m_encoder;
		m_encoder = nullptr;

		CGE_LOG_INFO("CGEFrameRecorder::endRecording OK...");
		return ret;
	}
예제 #7
0
	void CGEFrameRecorder::runProc()
	{	
		//processingFilters 将可能改变 targetTextureID和bufferTextureID, lock 以保证其他线程使用
		std::unique_lock<std::mutex> uniqueLock(m_resultMutex);

		if(m_globalFilter != nullptr)
		{
			m_frameHandler->processingWithFilter(m_globalFilter);
		}

		m_frameHandler->processingFilters();

		if(isRecordingStarted() && !m_isRecordingPaused)
		{

			//第一帧必然记录
			if(m_recordingTimestamp == 0.0)
			{
				m_recordingTimestamp = 0.0001; //设置为 0.0001 ms, 表示已经开始
				m_lastRecordingTime = getCurrentTimeMillis();
				CGE_LOG_INFO("first frame...");
			}
			else
			{
				double currentTime = getCurrentTimeMillis();
				m_recordingTimestamp += currentTime - m_lastRecordingTime;
				m_lastRecordingTime = currentTime;
				// CGE_LOG_INFO("time stamp %g...", m_recordingTimestamp);
			}

			int ptsInFact = m_recordingTimestamp * (m_recordFPS / 1000.0);

			if(ptsInFact < m_currentPTS)
			{
				CGE_LOG_INFO("帧速过快, 丢弃帧...");
				return ;
			}
			else if(ptsInFact > m_currentPTS + 3)
			{
				CGE_LOG_INFO("帧速较慢, 填补帧...");
				m_currentPTS = ptsInFact;
			}
			else
			{
				// CGE_LOG_INFO("帧速合适的很...");
				if(m_currentPTS == ptsInFact)
					m_currentPTS = ptsInFact + 1;
				else
					m_currentPTS = ptsInFact;
			}

			if(m_recordThread != nullptr)
			{
				m_frameHandler->useImageFBO();
				glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_frameHandler->getBufferTextureID(), 0);

				glViewport(0, 0, m_dstSize.width, m_dstSize.height);
				m_cacheDrawer->drawTexture(m_frameHandler->getTargetTextureID());
				glFinish();
				glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_frameHandler->getTargetTextureID(), 0);

				if(m_recordThread->isActive() && m_recordThread->totalWorks() != 0)
					return;

				m_recordThread->run(CGEThreadPool::Work(m_recordingWork, (void*)m_currentPTS));
			}
			else
			{
				auto bufferCache = m_recordImageThread->getData4Write();

				if(bufferCache.buffer != nullptr)
				{
					// auto tm = getCurrentTimeMillis();

					m_frameHandler->useImageFBO();

					// CGE_LOG_ERROR("draw texture 时间: %g", (getCurrentTimeMillis() - tm));

					glReadPixels(0, 0, m_dstSize.width, m_dstSize.height, GL_RGBA, GL_UNSIGNED_BYTE, bufferCache.buffer);

					// CGE_LOG_ERROR("录制readpixel时间: %g", (getCurrentTimeMillis() - tm));
					bufferCache.pts = m_currentPTS;
					m_recordImageThread->putData4Read(bufferCache);
				}
			}
		}
	}
	bool CGEVideoDecodeHandler::open(const char* filename)
	{		
		

		if(avformat_open_input(&m_context->pFormatCtx, filename, nullptr, nullptr)!=0 ||
			avformat_find_stream_info(m_context->pFormatCtx, nullptr)<0)
		{
			return false;  //解码失败
		}

		av_dump_format(m_context->pFormatCtx, 0, filename, 0);
		m_context->videoStreamIndex = -1;
		m_context->audioStreamIndex = -1;

		for(unsigned int i = 0; i < m_context->pFormatCtx->nb_streams; i++)  
		{
			if(m_context->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
			{  
				m_context->videoStreamIndex = i;
				m_context->pVideoStream = m_context->pFormatCtx->streams[i];
				m_context->pVideoCodecCtx = m_context->pFormatCtx->streams[i]->codec;
			}
			else if(m_context->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
			{
				m_context->audioStreamIndex = i;
				m_context->pAudioStream = m_context->pFormatCtx->streams[i];
				m_context->pAudioCodecCtx = m_context->pFormatCtx->streams[i]->codec;
			}
		}

		if(m_context->videoStreamIndex == -1)
		{
			return false; //找不到视频文件流
		}

		if(m_context->audioStreamIndex == -1)
		{
			CGE_LOG_INFO("未找到音频流, 视频将处于静音状态...\n");
		}

		//////////////////////////////////////////////////////////////////////////

		m_context->pVideoCodec = avcodec_find_decoder(m_context->pVideoCodecCtx->codec_id);

		if(m_context->pVideoCodec == nullptr || avcodec_open2(m_context->pVideoCodecCtx, m_context->pVideoCodec, nullptr) < 0)
		{
			return false; //视频解码失败
		}

		if(m_context->audioStreamIndex != -1)
		{
			m_context->pAudioCodec = avcodec_find_decoder(m_context->pAudioCodecCtx->codec_id);

			if(m_context->pAudioCodec == nullptr || avcodec_open2(m_context->pAudioCodecCtx, m_context->pAudioCodec, nullptr) < 0)
			{
				CGE_LOG_ERROR("音频解码失败! 静音处理...\n");
				m_context->audioStreamIndex = -1;
				m_context->pAudioCodec = nullptr;
				m_context->pAudioCodecCtx = nullptr;
			}
		}

		m_width = m_context->pVideoCodecCtx->width;
		m_height = m_context->pVideoCodecCtx->height;

		m_context->pVideoFrame = av_frame_alloc();
		m_context->pAudioFrame = av_frame_alloc();

		av_init_packet(&m_context->packet);
		m_context->packet.data = nullptr;
		m_context->packet.size = 0;

		return m_context->pVideoFrame != nullptr && m_context->pAudioFrame != nullptr;// && initFrameRGB();
	}