Exemple #1
0
	bool CGEFrameRecorder::startRecording(int fps, const char* filename, int bitRate)
	{
		delete m_encoder;
		m_encoder = new CGEVideoEncoderMP4();
		m_encoder->setRecordDataFormat(CGEVideoEncoderMP4::FMT_RGBA8888);
		if(!m_encoder->init(filename, fps, m_dstSize.width, m_dstSize.height, true, bitRate))
		{
			delete m_encoder;
			m_encoder = nullptr;
			CGE_LOG_ERROR("CGEFrameRecorder::startRecording - start recording failed!");
			return false;
		}

		CGE_LOG_INFO("encoder created!");

		if(m_offscreenContext == nullptr || m_recordThread == nullptr)
			_createOffscreenContext(); //offscreen context 将从另一个线程创建 

		int bufferLen = m_dstSize.width * m_dstSize.height * 4;

		m_recordImageThread = new ImageDataWriteThread();

		m_recordImageThread->setBufferAllocSize(bufferLen);

		m_recordImageThread->setTask([&](const ImageDataWriteThread::DataCache& data) {

			// auto tm = getCurrentTimeMillis();

			CGEVideoEncoderMP4::ImageData imageData;
			imageData.width = m_dstSize.width;
			imageData.height = m_dstSize.height;
			imageData.linesize[0] = m_dstSize.width * 4;
			imageData.data[0] = data.buffer;
			imageData.pts = data.pts;

			// CGE_LOG_ERROR("PTS: %d", (int)data.pts);

			if(!m_encoder->record(imageData))
			{
				CGE_LOG_ERROR("record frame failed!");
			}

			// CGE_LOG_ERROR("pts sequence: %d, time: %g", (int)data.pts, getCurrentTimeMillis() - tm);
		});

		m_recordFPS = fps;
		m_currentPTS = -1;
		m_isRecording = true;
		m_isRecordingPaused = false;
		m_recordingTimestamp = 0.0;

		CGE_LOG_INFO("CGEFrameRecorder::startRecording...");

		return true;
	}
Exemple #2
0
	void CGEFrameRecorder::_createOffscreenContext()
	{
		EGLContext sharedContext = eglGetCurrentContext();

		if(sharedContext == EGL_NO_CONTEXT)
		{
			CGE_LOG_ERROR("Context creation must be in the GL thread!");
			return;
		}

		if(m_recordThread == nullptr)
			m_recordThread = new CGEThreadPool();

		m_recordThread->run(CGEThreadPool::Work([&](void*){

			delete m_offscreenContext;
			m_offscreenContext = CGESharedGLContext::create(sharedContext, m_dstSize.width, m_dstSize.height, CGESharedGLContext::RECORDABLE_ANDROID);
			if(m_offscreenContext == nullptr)
			{
				CGE_LOG_ERROR("CGESharedGLContext : RECORDABLE_ANDROID is not supported!");
				m_offscreenContext = CGESharedGLContext::create(sharedContext, m_dstSize.width, m_dstSize.height, CGESharedGLContext::PBUFFER);
				if(m_offscreenContext == nullptr)
					CGE_LOG_ERROR("Fatal Error: Create Context Failed!");
			}

			if(m_offscreenContext != nullptr)
			{
				glViewport(0, 0, m_dstSize.width, m_dstSize.height);
				CGE_LOG_INFO("Info from offscreen context thread (begin)....");
				cgePrintGLInfo();
				CGE_LOG_INFO("Info from offscreen context thread (end)....");
			}
			else
			{
				CGE_LOG_ERROR("创建 OpenGL 子线程失败, 当前设备在录制视频时可能性能较差!");
			}
		}));

		while(m_recordThread->isActive())
			std::this_thread::sleep_for(std::chrono::milliseconds(1));

		//创建shared context失败, 将不使用OpenGL子线程
		if(m_offscreenContext == nullptr)
		{
			m_recordThread->quit();
			m_recordThread = nullptr;
		}
	}
Exemple #3
0
	ImageDataWriteThread::DataCache ImageDataWriteThread::getData4Write()
	{
		DataCache cache;
		m_writeMutex.lock();
		if(m_data4Write.empty())
		{
			if(m_totalCaches.size() < MAX_DATA_CACHE_LEN)
			{
				assert(bufferSize != 0); // bufferSize must be set!

				//增加申请内存失败判断
				cache.buffer = new (std::nothrow) unsigned char[m_bufferSize];

				if(cache.buffer == nullptr)
				{
					CGE_LOG_ERROR("Fatal Error: 内存不足, 申请内存失败!");
				}

				m_totalCaches.push_back(cache);
				CGE_LOG_INFO("Cache grow: %d", (int)m_totalCaches.size());
			}
			else
			{
				cache.buffer = nullptr;
				CGE_LOG_INFO("write data hungry, cache size: %d", (int)m_totalCaches.size());
			}
		}
		else
		{
			cache = m_data4Write.front();
			m_data4Write.pop();
		}
		m_writeMutex.unlock();
		return cache;
	}
Exemple #4
0
	void CGEFrameRecorder::setGlobalFilterIntensity(float intensity)
	{
 		if(m_globalFilter != nullptr)
        {
            m_globalFilter->setIntensity(intensity);
        }
        else
        {
            CGE_LOG_ERROR("You must set a tracking filter first!\n");
        }
	}
	JNIEXPORT jlong JNICALL Java_org_wysaid_nativePort_CGENativeLibrary_cgeCreateCustomNativeFilter
	(JNIEnv *env, jclass cls, jint index, jfloat intensity)
	{
		if(index < 0 || index >= CGE_CUSTOM_FILTER_TOTAL_NUMBER)
		{
			CGE_LOG_ERROR("Invalid filter index!");
			return 0;
		}

		return (jlong)cgeCreateCustomFilter((CustomFilterType)index, intensity);
	}
	JNIEXPORT jobject JNICALL Java_org_wysaid_nativePort_CGENativeLibrary_cgeFilterImageWithCustomFilter
	(JNIEnv *env, jclass cls, jobject bmp, jint index, jfloat intensity, jboolean hasContext)
	{
		if(index < 0 || index >= CGE_CUSTOM_FILTER_TOTAL_NUMBER)
		{
			CGE_LOG_ERROR("Invalid filter index!");
			return bmp;
		}

		return cgeFilterImage_CustomFilters(env, bmp, (CustomFilterType)index, intensity, hasContext);
	}
    JNIEXPORT jboolean JNICALL Java_org_wysaid_nativePort_CGEFFmpegNativeLibrary_nativeGenerateVideoWithFilter(JNIEnv *env, jclass cls, jstring outputFilename, jstring inputFilename, jstring filterConfig, jfloat filterIntensity, jobject blendImage, jint blendMode, jfloat blendIntensity, jboolean mute)
    {
        CGE_LOG_INFO("##### nativeGenerateVideoWithFilter!!!");
        
        if(outputFilename == nullptr || inputFilename == nullptr)
            return false;
        
        CGESharedGLContext* glContext = CGESharedGLContext::create(2048, 2048); //Max video resolution size of 2k.
        
        if(glContext == nullptr)
        {
            CGE_LOG_ERROR("Create GL Context Failed!");
            return false;
        }
        
        glContext->makecurrent();
        
        CGETextureResult texResult = {0};
        
        jclass nativeLibraryClass = env->FindClass("org/wysaid/nativePort/CGENativeLibrary");
        
        if(blendImage != nullptr)
            texResult = cgeLoadTexFromBitmap_JNI(env, nativeLibraryClass, blendImage);
        
        const char* outFilenameStr = env->GetStringUTFChars(outputFilename, 0);
        const char* inFilenameStr = env->GetStringUTFChars(inputFilename, 0);
        const char* configStr = filterConfig == nullptr ? nullptr : env->GetStringUTFChars(filterConfig, 0);

        CGETexLoadArg texLoadArg;
        texLoadArg.env = env;
        texLoadArg.cls = env->FindClass("org/wysaid/nativePort/CGENativeLibrary");
        
        bool retStatus = CGE::cgeGenerateVideoWithFilter(outFilenameStr, inFilenameStr, configStr, filterIntensity, texResult.texID, (CGETextureBlendMode)blendMode, blendIntensity, mute, &texLoadArg);
        
        env->ReleaseStringUTFChars(outputFilename, outFilenameStr);
        env->ReleaseStringUTFChars(inputFilename, inFilenameStr);
        
        if(configStr != nullptr)
            env->ReleaseStringUTFChars(filterConfig, configStr);
        
        CGE_LOG_INFO("generate over!\n");
        
        delete glContext;
        
        return retStatus;
    }
Exemple #8
0
	void CGEFrameRecorder::pauseRecording()
	{
		//暂时不提供
		// m_isRecordingPaused = true;
		CGE_LOG_ERROR("Pause function is not completed by now!!");
	}
	const CGEAudioFrameBufferData* CGEVideoDecodeHandler::getCurrentAudioFrame()
	{
		if(m_context->pSwrCtx == nullptr)
		{
			if(m_context->pAudioStream->codec->sample_fmt != AV_SAMPLE_FMT_S16)
			{
				m_context->pSwrCtx = swr_alloc();
				if(m_context->pSwrCtx == nullptr)
				{
					CGE_LOG_ERROR("Allocate resampler context failed!\n");
					return nullptr;
				}

				auto ctx = m_context->pSwrCtx;
				auto c = m_context->pAudioStream->codec;

				av_opt_set_int       (ctx, "in_channel_count",   c->channels,       0);
				av_opt_set_int       (ctx, "in_sample_rate",     c->sample_rate,    0);
				av_opt_set_sample_fmt(ctx, "in_sample_fmt",      c->sample_fmt, 0);
				av_opt_set_int       (ctx, "out_channel_count",  1,       0);
				av_opt_set_int       (ctx, "out_sample_rate",    c->sample_rate,    0);
				av_opt_set_sample_fmt(ctx, "out_sample_fmt",     AV_SAMPLE_FMT_S16,     0);

				int ret;

				if ((ret = swr_init(ctx)) < 0)
				{
					CGE_LOG_ERROR("Failed to initialize the resampling context: %d\n", ret);
					return nullptr;
				}

				m_context->maxDstNbSamples = c->codec->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE ?
			10000 : c->frame_size;

				ret = av_samples_alloc_array_and_samples(&m_context->dstSampleData, &m_context->dstSamplesLinesize, c->channels, m_context->maxDstNbSamples, c->sample_fmt, 0);

				if (ret < 0)
				{
					CGE_LOG_ERROR("Could not allocate destination samples\n");
					return nullptr;
				}

				m_context->dstSamplesSize = av_samples_get_buffer_size(NULL, c->channels, m_context->maxDstNbSamples, c->sample_fmt, 0);

			}
			else
			{
				CGE_LOG_ERROR("errorxxxx");
			}
		}

		int ret = swr_convert(m_context->pSwrCtx, m_context->dstSampleData, m_context->dstSamplesSize, (const uint8_t**)m_context->pAudioFrame->data, m_context->pAudioFrame->nb_samples);

		if(ret <= 0)
			return nullptr;

		m_cachedAudioFrame.timestamp = av_frame_get_best_effort_timestamp(m_context->pAudioFrame);
		m_cachedAudioFrame.data = m_context->dstSampleData[0];
		m_cachedAudioFrame.nbSamples = m_context->pAudioFrame->nb_samples;
		m_cachedAudioFrame.channels = 1;//av_frame_get_channels(m_context->pAudioFrame);
		m_cachedAudioFrame.bytesPerSample = 2;
		m_cachedAudioFrame.linesize = m_context->dstSamplesSize;
		m_cachedAudioFrame.format = CGE_SAMPLE_FMT_S16;
		return &m_cachedAudioFrame;
	}
	bool CGEVideoDecodeHandler::open(const char* filename)
	{		
		

		if(avformat_open_input(&m_context->pFormatCtx, filename, nullptr, nullptr)!=0 ||
			avformat_find_stream_info(m_context->pFormatCtx, nullptr)<0)
		{
			return false;  //解码失败
		}

		av_dump_format(m_context->pFormatCtx, 0, filename, 0);
		m_context->videoStreamIndex = -1;
		m_context->audioStreamIndex = -1;

		for(unsigned int i = 0; i < m_context->pFormatCtx->nb_streams; i++)  
		{
			if(m_context->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
			{  
				m_context->videoStreamIndex = i;
				m_context->pVideoStream = m_context->pFormatCtx->streams[i];
				m_context->pVideoCodecCtx = m_context->pFormatCtx->streams[i]->codec;
			}
			else if(m_context->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
			{
				m_context->audioStreamIndex = i;
				m_context->pAudioStream = m_context->pFormatCtx->streams[i];
				m_context->pAudioCodecCtx = m_context->pFormatCtx->streams[i]->codec;
			}
		}

		if(m_context->videoStreamIndex == -1)
		{
			return false; //找不到视频文件流
		}

		if(m_context->audioStreamIndex == -1)
		{
			CGE_LOG_INFO("未找到音频流, 视频将处于静音状态...\n");
		}

		//////////////////////////////////////////////////////////////////////////

		m_context->pVideoCodec = avcodec_find_decoder(m_context->pVideoCodecCtx->codec_id);

		if(m_context->pVideoCodec == nullptr || avcodec_open2(m_context->pVideoCodecCtx, m_context->pVideoCodec, nullptr) < 0)
		{
			return false; //视频解码失败
		}

		if(m_context->audioStreamIndex != -1)
		{
			m_context->pAudioCodec = avcodec_find_decoder(m_context->pAudioCodecCtx->codec_id);

			if(m_context->pAudioCodec == nullptr || avcodec_open2(m_context->pAudioCodecCtx, m_context->pAudioCodec, nullptr) < 0)
			{
				CGE_LOG_ERROR("音频解码失败! 静音处理...\n");
				m_context->audioStreamIndex = -1;
				m_context->pAudioCodec = nullptr;
				m_context->pAudioCodecCtx = nullptr;
			}
		}

		m_width = m_context->pVideoCodecCtx->width;
		m_height = m_context->pVideoCodecCtx->height;

		m_context->pVideoFrame = av_frame_alloc();
		m_context->pAudioFrame = av_frame_alloc();

		av_init_packet(&m_context->packet);
		m_context->packet.data = nullptr;
		m_context->packet.size = 0;

		return m_context->pVideoFrame != nullptr && m_context->pAudioFrame != nullptr;// && initFrameRGB();
	}