Пример #1
0
////////////////////////////////////////////////////////////////
// ビデオキャプチャ開始
//
// 引数:	filename	出力ファイル名
//			sw			スクリーンの幅
//			sh			スクリーンの高さ
//			vrate		フレームレート(fps)
//			arate		音声サンプリングレート(Hz)
//			bpp			色深度(16,24,32)
// 返値:	bool		true:成功 false:失敗
////////////////////////////////////////////////////////////////
bool AVI6::StartAVI( const char *filename, int sw, int sh, int vrate, int arate, int bpp )
{
#ifndef NOAVI
	cCritical::Lock();
	Init();
	
	ABPP = bpp;

	// オーディオバッファ作成
	ABuf.InitBuffer( arate / vrate * 2 );
	
	// 出力コンテキスト作成
	avformat_alloc_output_context2(&oc, NULL, NULL, filename);
	if (!oc) return false;

	fmt = oc->oformat;

	// 音声、ビデオストリームを作成
	if (fmt->video_codec != AV_CODEC_ID_NONE) {
		// ビデオコーデックにVP9を選択されると画像が崩れるため、暫定措置として強制的にVP8にする。
		fmt->video_codec = AV_CODEC_ID_VP8;
		AddStream(&video_st, oc, &video_codec, fmt->video_codec, sw, sh);
	}
	if (fmt->audio_codec != AV_CODEC_ID_NONE) {
		// オーディオコーデックにOPUSを選択されると落ちるため、暫定措置として強制的にVORBISにする。
		fmt->audio_codec = AV_CODEC_ID_VORBIS;
		AddStream(&audio_st, oc, &audio_codec, fmt->audio_codec, sw, sh);
	}

	OpenVideo(oc, video_codec, &video_st, opt);
	OpenAudio(oc, audio_codec, &audio_st, opt, arate);

	av_dump_format(oc, 0, filename, 1);

	int ret = 0;
	// ファイルを開く
	if (!(fmt->flags & AVFMT_NOFILE)) {
		ret = avio_open(&oc->pb, filename, AVIO_FLAG_WRITE);
		if (0 > ret) {
			return false;
		}
	}

	// ストリームヘッダを書き込み
	ret = avformat_write_header(oc, &opt);
	if (0 > ret) {
		return false;
	}

	isAVI = true;
	cCritical::UnLock();
	return true;
#else
	return false;
#endif
}
Пример #2
0
OpenNi2Video::OpenNi2Video(const std::string& device_uri)
{
    InitialiseOpenNI();

    const int dev_id = AddDevice(device_uri);
    AddStream(OpenNiStreamMode( OpenNiDepth_1mm, ImageDim(), 30, dev_id) );
    AddStream(OpenNiStreamMode( OpenNiRgb,   ImageDim(), 30, dev_id) );

    SetupStreamModes();
    Start();
}
Пример #3
0
BOOL CStreamMuxerImp1::AddAudioStream(int samplerate, int bitrate, unsigned __int8 *extradata, int extradata_size)
{
    if(m_pAudioStream)
    {
        return FALSE;
    }
    AudioSettings settings;
    settings.samplerate = samplerate;
    settings.bitrate = bitrate;
    settings.extra_data = extradata;
    settings.extradata_size = extradata_size;

    m_pAudioStream = new CAudioStream(this, &settings);
    if(!m_pAudioStream->OpenChannelWriter())
    {
        delete m_pAudioStream;
        m_pAudioStream = NULL;
        Sleep(1);
        return FALSE;
    }
    BOOL bRet = AddStream(m_pAudioStream);
    if(!bRet)
    {
        delete m_pAudioStream;
        m_pAudioStream = NULL;
        Sleep(1);
        return FALSE;
    }
    return bRet;
}
Пример #4
0
static int Send(sout_stream_t *stream, sout_stream_id_sys_t *id,
                block_t *block)
{
    sout_stream_sys_t *sys = stream->p_sys;

    for (block_t *next = block->p_next; block != NULL; block = next)
    {
        block->p_next = NULL;

        /* FIXME: deal with key frames properly */
        while (sys->clock(block) >= sys->next->offset)
        {
            DelStream(stream);
            AddStream(stream, sys->next->chain);

            sys->next->offset += sys->period;
            sys->next = sys->next->next;
            if (sys->next == NULL)
                sys->next = sys->start;
        }

        if (sys->stream != NULL)
            sout_StreamIdSend(sys->stream, id->id, block);
        else
            block_Release(block);
    }
    return VLC_SUCCESS;
}
Пример #5
0
HTTPLiveStream::HTTPLiveStream(QString srcFile, uint16_t width, uint16_t height,
                               uint32_t bitrate, uint32_t abitrate,
                               uint16_t maxSegments, uint16_t segmentSize,
                               uint32_t aobitrate, int32_t srate)
  : m_writing(false),
    m_streamid(-1),              m_sourceFile(srcFile),
    m_sourceWidth(0),            m_sourceHeight(0),
    m_segmentSize(segmentSize),  m_maxSegments(maxSegments),
    m_segmentCount(0),           m_startSegment(0),
    m_curSegment(0),
    m_height(height),            m_width(width),
    m_bitrate(bitrate),
    m_audioBitrate(abitrate),    m_audioOnlyBitrate(aobitrate),
    m_sampleRate(srate),
    m_created(MythDate::current()),
    m_lastModified(MythDate::current()),
    m_percentComplete(0),
    m_status(kHLSStatusUndefined)
{
    if ((m_width == 0) && (m_height == 0))
        m_width = 640;

    if (m_bitrate == 0)
        m_bitrate = 800000;

    if (m_audioBitrate == 0)
        m_audioBitrate = 64000;

    if (m_segmentSize == 0)
        m_segmentSize = 10;

    if (m_audioOnlyBitrate == 0)
        m_audioOnlyBitrate = 32000;

    m_sourceHost = gCoreContext->GetHostName();

    QFileInfo finfo(m_sourceFile);
    m_outBase = finfo.fileName() +
        QString(".%1x%2_%3kV_%4kA").arg(m_width).arg(m_height)
                .arg(m_bitrate/1000).arg(m_audioBitrate/1000);

    SetOutputVars();

    m_fullURL     = m_httpPrefix + m_outBase + ".m3u8";
    m_relativeURL = m_httpPrefixRel + m_outBase + ".m3u8";

    StorageGroup sgroup("Streaming", gCoreContext->GetHostName());
    m_outDir = sgroup.GetFirstDir();
    QDir outDir(m_outDir);

    if (!outDir.exists() && !outDir.mkdir(m_outDir))
    {
        LOG(VB_RECORD, LOG_ERR, "Unable to create HTTP Live Stream output "
            "directory, Live Stream will not be created");
        return;
    }

    AddStream();
}
Пример #6
0
instruction *GenChoice(void)
{
    instruction *ins;

    ins = NewIns( INS_CHOICE );
    AddStream( LastIns, ins );
    return( ins );
}
Пример #7
0
void GenSemCall( unsigned num )
{
    instruction *ins;

    ins = NewIns( INS_SEMANTIC );
    ins->operand = num;
    CheckLong( ins );
    AddStream( LastIns, ins );
}
Пример #8
0
void GenLblCall( instruction *lbl )
{
    instruction *ins;

    ins = NewIns( INS_CALL );
    ins->ptr = lbl;
    lbl->operand++;
    AddStream( LastIns, ins );
}
Пример #9
0
void GenSetResult( unsigned value )
{
    instruction *ins;

    ins = NewIns( INS_SET_RESULT );
    ins->operand = value;
    CheckLong( ins );
    AddStream( LastIns, ins );
}
Пример #10
0
void GenSetParm( unsigned value )
{
    instruction *ins;

    ins = NewIns( INS_SET_PARM );
    ins->operand = value;
    CheckLong( ins );
    AddStream( LastIns, ins );
}
Пример #11
0
void GenJump( instruction *lbl )
{
    instruction *ins;

    ins = NewIns( INS_JUMP );
    ins->ptr = lbl;
    lbl->operand++;
    AddStream( LastIns, ins );
}
Пример #12
0
void GenError( unsigned value )
{
    instruction *ins;

    ins = NewIns( INS_ERROR );
    ins->operand = value;
    CheckLong( ins );
    AddStream( LastIns, ins );
}
Пример #13
0
void GenOutput( unsigned value )
{
    instruction *ins;

    ins = NewIns( INS_OUTPUT );
    ins->operand = value;
    CheckLong( ins );
    AddStream( LastIns, ins );
}
Пример #14
0
const bool CTsNetworkSender::InputMedia(CMediaData *pMediaData, const DWORD dwInputIndex)
{
	CBlockLock Lock(&m_DecoderLock);

	if (m_bEnableQueueing) {
		AddStream(pMediaData->GetData(), pMediaData->GetSize());
	}

	return true;
}
Пример #15
0
OpenNi2Video::OpenNi2Video(ImageDim dim, int fps)
{
    InitialiseOpenNI();

    openni::Array<openni::DeviceInfo> deviceList;
    openni::OpenNI::enumerateDevices(&deviceList);

    if (deviceList.getSize() < 1) {
        throw VideoException("No OpenNI Devices available. Ensure your camera is plugged in.");
    }

    for(int i = 0 ; i < deviceList.getSize(); i ++) {
        const char*  device_uri = deviceList[i].getUri();
        const int dev_id = AddDevice(device_uri);
        AddStream(OpenNiStreamMode( OpenNiDepth_1mm, dim, fps, dev_id) );
        AddStream(OpenNiStreamMode( OpenNiRgb, dim, fps, dev_id) );
    }

    SetupStreamModes();
    Start();
}
Пример #16
0
OpenNi2Video::OpenNi2Video(const std::string& device_uri, std::vector<OpenNiStreamMode> &stream_modes)
{
    InitialiseOpenNI();

    AddDevice(device_uri);

    for(size_t i=0; i < stream_modes.size(); ++i) {
        OpenNiStreamMode& mode = stream_modes[i];
        AddStream(mode);
    }

    SetupStreamModes();
    Start();
}
Пример #17
0
VideoEncoder* Muxer::AddVideoEncoder(const QString& codec_name, const std::vector<std::pair<QString, QString> >& codec_options,
									 unsigned int bit_rate, unsigned int width, unsigned int height, unsigned int frame_rate) {
	AVCodec *codec = FindCodec(codec_name);
	AVStream *stream = AddStream(codec);
	VideoEncoder *encoder;
	AVDictionary *options = NULL;
	try {
		VideoEncoder::PrepareStream(stream, codec, &options, codec_options, bit_rate, width, height, frame_rate);
		m_encoders[stream->index] = encoder = new VideoEncoder(this, stream, codec, &options);
		av_dict_free(&options);
	} catch(...) {
		av_dict_free(&options);
		throw;
	}
	return encoder;
}
Пример #18
0
AudioEncoder* Muxer::AddAudioEncoder(const QString& codec_name, const std::vector<std::pair<QString, QString> >& codec_options,
									 unsigned int bit_rate, unsigned int channels, unsigned int sample_rate) {
	AVCodec *codec = FindCodec(codec_name);
	AVStream *stream = AddStream(codec);
	AudioEncoder *encoder;
	AVDictionary *options = NULL;
	try {
		AudioEncoder::PrepareStream(stream, codec, &options, codec_options, bit_rate, channels, sample_rate);
		m_encoders[stream->index] = encoder = new AudioEncoder(this, stream, codec, &options);
		av_dict_free(&options);
	} catch(...) {
		av_dict_free(&options);
		throw;
	}
	return encoder;
}
Пример #19
0
bool Multiplexer::CreateAndRestoreSavedStreamProperties()
{
    DataStream *dstr = new DataStream( this,
                                       port_save,
                                       baud_rate_save,
                                       port_type_save,
                                       priority_save,
                                       bGarmin_GRMN_mode_save
                                     );
    dstr->SetInputFilter(input_sentence_list_save);
    dstr->SetInputFilterType(input_sentence_list_type_save);
    dstr->SetOutputFilter(output_sentence_list_save);
    dstr->SetOutputFilterType(output_sentence_list_type_save);
    dstr->SetChecksumCheck(bchecksum_check_save);

    AddStream(dstr);

    return true;
}
Пример #20
0
void Multiplexer::StartAllStreams( void )
{
    for ( size_t i = 0; i < g_pConnectionParams->Count(); i++ )
    {
        ConnectionParams *cp = g_pConnectionParams->Item(i);
        if( cp->bEnabled ) {

#ifdef __WXGTK__
            if( cp->GetDSPort().Contains(_T("Serial"))) {
                if( ! g_bserial_access_checked ){
                    if( !CheckSerialAccess() ){
                    }
                    g_bserial_access_checked = true;
                }
            }
#endif

            dsPortType port_type = cp->IOSelect;
            DataStream *dstr = new DataStream( this,
                                               cp->Type,
                                               cp->GetDSPort(),
                                               wxString::Format(wxT("%i"),cp->Baudrate),
                                               port_type,
                                               cp->Priority,
                                               cp->Garmin
                                               );
                                               dstr->SetInputFilter(cp->InputSentenceList);
                                               dstr->SetInputFilterType(cp->InputSentenceListType);
                                               dstr->SetOutputFilter(cp->OutputSentenceList);
                                               dstr->SetOutputFilterType(cp->OutputSentenceListType);
                                               dstr->SetChecksumCheck(cp->ChecksumCheck);

            cp->b_IsSetup = true;

            AddStream(dstr);
        }
    }

}
Пример #21
0
VideoEncoder* Muxer::AddVideoEncoder(const QString& codec_name, const std::vector<std::pair<QString, QString> >& codec_options,
									 unsigned int bit_rate, unsigned int width, unsigned int height, unsigned int frame_rate) {
	AVCodec *codec = FindCodec(codec_name);
	AVCodecContext *codec_context = NULL;
	AVStream *stream = AddStream(codec, &codec_context);
	VideoEncoder *encoder;
	AVDictionary *options = NULL;
	try {
		VideoEncoder::PrepareStream(stream, codec_context, codec, &options, codec_options, bit_rate, width, height, frame_rate);
		m_encoders[stream->index] = encoder = new VideoEncoder(this, stream, codec_context, codec, &options);
#if SSR_USE_AVSTREAM_CODECPAR
		if(avcodec_parameters_from_context(stream->codecpar, codec_context) < 0) {
			Logger::LogError("[Muxer::AddVideoEncoder] " + Logger::tr("Error: Can't copy parameters to stream!"));
			throw LibavException();
		}
#endif
		av_dict_free(&options);
	} catch(...) {
		av_dict_free(&options);
		throw;
	}
	return encoder;
}
Пример #22
0
int CStreamMuxerImp1::PushStream(BYTE* pData, int len, bool isSyncPoint, LONGLONG ts)
{
    HRESULT hr = S_OK;
    if(!m_hSSFMux)
    {
        Sleep(1);
        return TRUE;
    }
    BOOL ret = m_pVideoStream->AddFrame(pData,len,isSyncPoint,ts);
    if(m_pVideoStream->GetState() == NEED_TO_BE_ADDED)
    {
        if(!m_pVideoStream->OpenChannelWriter())
        {
            return FALSE;
        }
        if(AddStream(m_pVideoStream))
        {
            //≈ще разок, в первый был только анализ.
            m_pVideoStream->InitStart();
            ret = m_pVideoStream->AddFrame(pData,len,isSyncPoint,ts);
        }
    }
    return ret;
}
Пример #23
0
OpenNi2Video::OpenNi2Video(std::vector<OpenNiStreamMode>& stream_modes)
{
    InitialiseOpenNI();

    openni::Array<openni::DeviceInfo> deviceList;
    openni::OpenNI::enumerateDevices(&deviceList);

    if (deviceList.getSize() < 1) {
        throw VideoException("OpenNI2: No devices available. Ensure your camera is plugged in.");
    }

    for(int i = 0 ; i < deviceList.getSize(); i ++) {
        const char*  device_uri = deviceList[i].getUri();
        AddDevice(device_uri);
    }

    for(size_t i=0; i < stream_modes.size(); ++i) {
        OpenNiStreamMode& mode = stream_modes[i];
        AddStream(mode);
    }

    SetupStreamModes();
    Start();
}
Пример #24
0
bool CNscContext::ReadNextLine (bool fInComment)
{

	//
	// Read the next line
	//
try_again:;
	for (;;)
	{
		m_pStreamTop ->nLine++;
		if (m_pStreamTop ->pStream ->ReadLine (
			m_pStreamTop ->pszLine, Max_Line_Length) == NULL)
		{
			if (fInComment || m_pStreamTop ->pNext == NULL)
				return false;
			RemoveTopStream ();
		}
		else
			break;
	}

	//
	// If we aren't in a comment, then test for preprocessor
	//

	if (!fInComment)
	{

		//
		// Search for the first non-white character
		//

		char *p = m_pStreamTop ->pszLine;
		while (*p != 0 && (*p <= ' ' || *p > 126))
			p++;

		//
		// If this is a pre-processor statement
		//

		if (*p == '#')
		{

			//
			// If we have an include
			//

			if (strncmp (p, "#include", 8) == 0)
			{
				//
				// Force a new line read on this stream on return
				//

				m_pStreamTop ->pszNextTokenPos = NULL;

				//
				// Extract the name
				//

				p += 8;
				while (*p && *p != '"')
					p++;
				p++;
				char *pszNameStart = p;
				while (*p && *p != '"')
					p++;
				int nCount = (int) (p - pszNameStart);
				char *pszTemp = (char *) alloca (nCount + 5);
				memmove (pszTemp, pszNameStart, nCount);
				pszTemp [nCount] = 0;
				//
				// Remove any extension
				//
                                
				p = strchr (pszTemp, '.');
				if (p)
					*p = 0;
				
				// make sure we use lower case
				for (p = pszTemp; *p != '\0'; p++)
					*p = (char)tolower(*p);

				//
				// Search the current list of included files and see
				// if we have already done it
				//

				size_t i;
				for (i = 0; i < m_asFiles .GetCount (); i++)
				{
					if (stricmp (m_asFiles [i] .strName .c_str (), pszTemp) == 0)
						break;
				}

				//
				// If this isn't a duplicate
				//

				if (i >= m_asFiles .GetCount ())
				{

					//
					// Try to load the resource
					//

					bool fAllocated = false;
					UINT32 ulSize = 0;
					unsigned char *pauchData = NULL;

					if (m_pLoader)
					{
						pauchData = m_pLoader ->LoadResource (
							pszTemp, NwnResType_NSS, &ulSize, 
							&fAllocated);
					}
					if (pauchData == NULL)
					{
						GenerateError ("Unable to open the "
							"include file \"%s\"", pszTemp);
						return false;
					}

					//
					// Add stream
					//

					strcat (pszTemp, ".nss");
					CNwnStream *pStream = new CNwnMemoryStream (
						pszTemp, pauchData, ulSize, fAllocated);
					AddStream (pStream);
				}

				//
				// Read the next line
				//
				goto try_again;
			}

			//
			// If we have a define
			//

			if (strncmp (p, "#define", 7) == 0)
			{
				p += 7;

				//
				// Get the first parameter
				//

				while (*p && (*p <= ' ' || *p > 126))
					p++;
				if (*p == 0)
				{
					GenerateError ("#define syntax");
					goto try_again;
				}
				char *pszDefine = p;
				while (*p && (*p > ' ' && *p <= 126))
					p++;
				char *pszDefineEnd = p;

				//
				// Get the second parameter
				//

				while (*p && (*p <= ' ' || *p > 126))
					p++;
				if (*p == 0)
				{
					GenerateError ("#define syntax");
					goto try_again;
				}
				char *pszValue = p;
				while (*p && (*p > ' ' && *p <= 126))
					p++;
				char *pszValueEnd = p;

				//
				// Make sure there isn't anything at the end
				//

				while (*p && (*p <= ' ' || *p > 126))
					p++;
				if (*p != 0)
				{
					GenerateError ("#define syntax");
					goto try_again;
				}

				//
				// Copy the two strings into temp arrays
				//

				int nDefine = (int) (pszDefineEnd - pszDefine);
				char *pszDTmp = (char *) alloca (nDefine + 1);
				memcpy (pszDTmp, pszDefine, nDefine);
				pszDTmp [nDefine] = 0;

				int nValue = (int) (pszValueEnd - pszValue);
				char *pszVTmp = (char *) alloca (nValue + 1);
				memcpy (pszVTmp, pszValue, nValue);
				pszVTmp [nValue] = 0;

				//
				// If this is the number of engine structures
				//

				if (strcmp (pszDTmp, "ENGINE_NUM_STRUCTURES") == 0)
				{
					// Bah, we ignore this
				}
				
				//
				// If this is an engine structure
				//

				else if (strncmp (pszDTmp, "ENGINE_STRUCTURE_", 17) == 0)
				{
					p = &pszDTmp [17];
					int nIndex = atol (p);
					g_astrNscEngineTypes [nIndex] = pszVTmp;
					if (g_sNscReservedWords .Find (pszVTmp) == NULL)
					{
						NscSymbol *pSymbol = g_sNscReservedWords .Add (
							pszVTmp, NscSymType_Token);
						pSymbol ->nToken = ENGINE_TYPE;
						pSymbol ->nEngineObject = nIndex;
					}
				}

				//
				// Otherwise, unknown
				//

				else 
				{
					GenerateError ("#define unknown or invalid definition");
				}
				goto try_again;
			}
		}
	}

	//
	// Set the starting pointer
	//

	m_pStreamTop ->pszNextTokenPos = m_pStreamTop ->pszLine;
	return true;
}
Пример #25
0
int main(){
	auto pLogger = CB::Log::CLogger::GetInstance();
	pLogger->AddStream(CB::IO::File::Open(L"main.log", CB::IO::File::AccessType::WriteOnly, CB::IO::File::OpenAction::AlwaysCreate).Cast<CB::IO::IStream>());
	pLogger->AddStream(CB::IO::Console::Create().Cast<CB::IO::IStream>(), CB::Log::CTextFormatter::Create(CB::String::Encoding::ANSI).Cast<CB::Log::IEntryFormatter>());
	pLogger->SetDebugMode(true);

	try{
		auto pWinDriver = CB::Window::LoadDriver(L"MSWindowDriver");
		auto pGraphicDriver = CB::Graphic::LoadDriver(L"OGLGraphicDriver");
		{
			auto pWinManager = pWinDriver->CreateManager();
			auto pGraphicManager = pGraphicDriver->CreateManager(pWinManager);
			
			CB::Math::CSize outSize(640, 480);
			auto pWindow = pWinManager->CreateWindow(L"GraphicTest", CB::Window::Style::Single, outSize);

			auto pGraphicAdapter = pGraphicManager->GetDefaultAdapter();

			CB::Graphic::CDisplayMode dispMode(pWindow->GetSize(), 0, CB::Graphic::BufferFormat::B8G8R8X8);
			CB::Graphic::CDeviceDesc devDesc(pWindow, dispMode, CB::Graphic::BufferFormat::D24S8, false);

			CB::Collection::CList<CB::Graphic::FeatureLevel> featureLevels;
			featureLevels.Add(CB::Graphic::FeatureLevel::Level_1);

			auto pGraphicDevice = pGraphicAdapter->CreateDevice(pWindow, devDesc, featureLevels);

			pWindow->OnClose += CB::Signals::CFunc<const bool, CB::CRefPtr<CB::Window::IWindow>>(CloseEvent);

			pWindow->SetVisible(true);

			CB::Graphic::CDepthStencilStateDesc depthDesc;
			depthDesc.bDepthTestEnabled = true;
			depthDesc.uDepthFunction = CB::Graphic::CompareFunc::LessEqual;
			auto pDepthState = pGraphicDevice->CreateState(depthDesc);
			pGraphicDevice->SetState(pDepthState.Cast<CB::Graphic::IDeviceState>());

			CB::Graphic::CRasterizerStateDesc rastDesc;
			rastDesc.uCullMode = CB::Graphic::CullMode::None;
			auto pRastState = pGraphicDevice->CreateState(rastDesc);
			pGraphicDevice->SetState(pRastState.Cast<CB::Graphic::IDeviceState>());

			CB::Graphic::CBlendStateDesc blendDesc;
			blendDesc.ColorBlend.uDestOperand = CB::Graphic::BlendOption::OneMinusSourceAlpha;
			blendDesc.ColorBlend.uSourceOperand = CB::Graphic::BlendOption::SourceAlpha;
			blendDesc.ColorBlend.uOperation = CB::Graphic::BlendOperation::Add;
			blendDesc.AlphaBlend.uDestOperand = CB::Graphic::BlendOption::OneMinusSourceAlpha;
			blendDesc.AlphaBlend.uSourceOperand = CB::Graphic::BlendOption::SourceAlpha;
			blendDesc.AlphaBlend.uOperation = CB::Graphic::BlendOperation::Add;
			blendDesc.bEnabled[0] = true;
			auto pBlendState = pGraphicDevice->CreateState(blendDesc);
			pGraphicDevice->SetState(pBlendState.Cast<CB::Graphic::IDeviceState>());

			auto pFontManager = CB::Font::CManager::Create();

			auto pFontStream = CB::IO::File::Open(L"Assets/font.ttf").Cast<CB::IO::IStream>();
			auto pFont = pFontManager->Load(pFontStream);
			
			pFont->SelectFace(0);
			pFont->SetSize(24);

			CB::Collection::CList<CB::Tools::CFontCharDesc> charDescs;
			CB::Tools::CFontTextureGenerator fontGen(pGraphicDevice);

			fontGen.MaxTextureSize.Set(512, 512);
			auto pTexture = fontGen.Generate(pFont, charDescs);

			CB::Tools::CTextMeshGenerator textGen(charDescs);
			CB::Tools::CMeshRawIVT textMesh;

			textGen.Generate(L"Marek	M³ynarski!", textMesh);

			CB::Collection::CList<CB::Graphic::CVertexElement> vEls;
			vEls.Add(CB::Graphic::CVertexElement(0, L"vinput.vPosition", CB::Graphic::VertexType::Float, 3, 0));
			vEls.Add(CB::Graphic::CVertexElement(1, L"vinput.vTexCoord", CB::Graphic::VertexType::Float, 2, 0));

			GraphicTest::CShaderLoader shaders(pGraphicDevice, L"Shaders/TextureShader.cg");

			auto pTextDecl = pGraphicDevice->CreateVertexDeclaration(shaders.pVertexShader, vEls);

			auto pTextVertexBuffer = pGraphicDevice->CreateBuffer(CB::Graphic::BufferType::Vertex, CB::Graphic::BufferUsage::Dynamic, CB::Graphic::BufferAccess::Write, textMesh.Vertices);
			auto pTextTCoordBuffer = pGraphicDevice->CreateBuffer(CB::Graphic::BufferType::Vertex, CB::Graphic::BufferUsage::Dynamic, CB::Graphic::BufferAccess::Write, textMesh.TexCoords);
			auto pTextIndexBuffer = pGraphicDevice->CreateBuffer(CB::Graphic::BufferType::Index, CB::Graphic::BufferUsage::Dynamic, CB::Graphic::BufferAccess::Write, textMesh.Indices);

			float32 fAspect = (float32)outSize.Width / (float32)outSize.Height;
			CB::Math::CMatrix mProj = CB::Math::CMatrix::GetPerspective(fAspect, 60.0f, 1.0f, 100.0f);
			CB::Math::CMatrix mView = CB::Math::CMatrix::GetTranslation(-4.0f, 0.0f, -3.4f);
			CB::Math::CMatrix mModel = CB::Math::CMatrix::GetIdentity();

			shaders.pFragmentShader->SetSampler(L"texDiffuse", pTexture.Cast<CB::Graphic::IBaseTexture>());
			pTexture->SetFilters(CB::Graphic::TextureFilter::Linear, CB::Graphic::TextureFilter::Linear, CB::Graphic::TextureFilter::Linear);
			pTexture->SetAnisotropy(8);
			//g_pTexture = texture.pTexture;			

			while(g_bRun){
				pGraphicDevice->Clear(1.0f, 1);
				pGraphicDevice->Clear(CB::Math::CColor(1.0f, 0.5f, 0.0f, 1.0f));

				pGraphicDevice->BeginRender();

				pGraphicDevice->SetShader(shaders.pVertexShader);
				pGraphicDevice->SetShader(shaders.pFragmentShader);

				static float32 fV = 0.0f;
				fV += 20 * g_Timer.GetTimeDelta();
				mModel = CB::Math::CMatrix::GetRotation(CB::Math::AxisOrientation::AxisX, fV);

				shaders.pVertexShader->SetUniform(L"vinput.mProj", mProj);
				shaders.pVertexShader->SetUniform(L"vinput.mView", mView);
				shaders.pVertexShader->SetUniform(L"vinput.mModel", mModel);

				pGraphicDevice->SetVertexDeclaration(pTextDecl);
				pGraphicDevice->SetVertexBuffer(0, pTextVertexBuffer);
				pGraphicDevice->SetVertexBuffer(1, pTextTCoordBuffer);
				pGraphicDevice->SetIndexBuffer(pTextIndexBuffer);

				pGraphicDevice->RenderIndexed(textMesh.uNumberOfPolygons);

				pGraphicDevice->EndRender();

				g_Timer.Update();
				pWinManager->ProcessEvents();

				float fFPS = 1.0f / (g_Timer.GetTimeDelta() == 0.0f ? 1.0f : g_Timer.GetTimeDelta());
				uint32 uFPS = (uint32)fFPS;

				textMesh.Clear();
				textGen.Generate(L"FPS: " + CB::String::ToString(uFPS), textMesh);

				pTextVertexBuffer->LoadData(textMesh.Vertices);
				pTextTCoordBuffer->LoadData(textMesh.TexCoords);
				pTextIndexBuffer->LoadData(textMesh.Indices);

				pGraphicDevice->Swap();
			}

			g_pTexture.Release();
		}
	}
	catch(CB::Exception::CException& Exception){
		CB::Log::Write(Exception, CB::Log::LogLevel::Fatal);
		CB::Message::Show(Exception, CB::Message::Icon::Error);
	}
	return 0;
}
Пример #26
0
PUBLIC Stream *
Open(
     Object *	object,
     string	name,
     word	mode )
{
	word rc = Err_Null;
	Stream *stream = NULL;
	MCB *mcb;
	IOCReply1 *rep;
	word stlen;
	Port reply;

#ifdef SYSDEB
	SysDebug(ioc)("Open(%O,%N,%x)",object,name,mode);
#endif

	if( CheckObject(object,C_Locate) != Err_Null ) return Null(Stream);

	reply = NewPort();

	mcb = NewMsgBuf(0);
	rep = (IOCReply1 *)mcb->Control;

	InitMCB(mcb,MsgHdr_Flags_preserve,
		MyTask->IOCPort,reply,FC_GSP|FG_Open|object->FnMod);

	MarshalCommon(mcb,object,name);

	MarshalWord(mcb,mode);

	if( (rc = IOCMsg(mcb,NULL)) < Err_Null ) goto Done;

	stlen = sizeof(Stream) + (word)strlen(mcb->Data+rep->Pathname) + SafetyMargin;

	stream = (Stream *)Malloc(stlen);

	if( stream == NULL ) 
	{
		rc = EC_Error|SS_SysLib|EG_NoMemory|EO_Stream;
		goto Done;
	}
	else memclr( (void *)stream, (int)stlen );

	if( SetupStream( stream, mcb ) )
	{
		stream->Flags |= mode&Flags_SaveMode;
		InitSemaphore( &stream->Mutex, 1 );
		stream->Pos = 0;
	}

	AddStream( stream );	

	rc = Err_Null;
	
	if( mode & Flags_Append ) Seek(stream, S_End, 0);
    Done:
#ifdef SYSDEB
	SysDebug(ioc)("Open: %E stream: %S",rc,stream);
#endif
	FreeMsgBuf(mcb);

	if( rc < Err_Null ) FreePort(reply);

	object->Result2 = rc;
	return stream;
}
Пример #27
0
DemuxPacket* CDVDDemuxFFmpeg::Read()
{
  AVPacket pkt;
  DemuxPacket* pPacket = NULL;
  // on some cases where the received packet is invalid we will need to return an empty packet (0 length) otherwise the main loop (in CDVDPlayer) 
  // would consider this the end of stream and stop.
  bool bReturnEmpty = false;
  Lock();
  if (m_pFormatContext)
  {
    // assume we are not eof
    if(m_pFormatContext->pb)
      m_pFormatContext->pb->eof_reached = 0;

    // timeout reads after 100ms
    g_urltimeout = GetTickCount() + 100;
    //g_urltimeout = 0;
    int result = 0;
    try
    {
      result = m_dllAvFormat.av_read_frame(m_pFormatContext, &pkt);
    }
    catch(const win32_exception &e)
    {
      e.writelog(__FUNCTION__);
      result = AVERROR(EFAULT);
    }
    g_urltimeout = 0;

    if (result == AVERROR(EINTR) || result == AVERROR(EAGAIN))
    {
      // timeout, probably no real error, return empty packet
      bReturnEmpty = true;
    }
    else if (result < 0)
    {
      Flush();
    }
    else
    {
      // XXX, in some cases ffmpeg returns a negative packet size
      if (pkt.size < 0 || pkt.stream_index >= MAX_STREAMS)
      {
        CLog::Log(LOGERROR, "CDVDDemuxFFmpeg::Read() no valid packet");
        bReturnEmpty = true;
      }
      else
      {
        AVStream *stream = m_pFormatContext->streams[pkt.stream_index];

        if (m_pFormatContext->nb_programs)
        {
          /* check so packet belongs to selected program */
          for (unsigned int i = 0; i < m_pFormatContext->programs[m_program]->nb_stream_indexes; i++)
          {
            if(pkt.stream_index == (int)m_pFormatContext->programs[m_program]->stream_index[i])
            {
              pPacket = CDVDDemuxUtils::AllocateDemuxPacket(pkt.size);
              break;
            }
          }

          if (!pPacket)
            bReturnEmpty = true;
        }
        else
          pPacket = CDVDDemuxUtils::AllocateDemuxPacket(pkt.size);

        if (pPacket)
        {
          // lavf sometimes bugs out and gives 0 dts/pts instead of no dts/pts
          // since this could only happens on initial frame under normal
          // circomstances, let's assume it is wrong all the time
          if(pkt.dts == 0)
            pkt.dts = AV_NOPTS_VALUE;
          if(pkt.pts == 0)
            pkt.pts = AV_NOPTS_VALUE;

          if(m_bMatroska && stream->codec && stream->codec->codec_type == CODEC_TYPE_VIDEO)
          { // matroska can store different timestamps
            // for different formats, for native stored
            // stuff it is pts, but for ms compatibility
            // tracks, it is really dts. sadly ffmpeg
            // sets these two timestamps equal all the
            // time, so we select it here instead
            if(stream->codec->codec_tag == 0)
              pkt.dts = AV_NOPTS_VALUE;
            else
              pkt.pts = AV_NOPTS_VALUE;
          }

          // copy contents into our own packet
          pPacket->iSize = pkt.size;

          // maybe we can avoid a memcpy here by detecting where pkt.destruct is pointing too?
          if (pkt.data)
            memcpy(pPacket->pData, pkt.data, pPacket->iSize);

          pPacket->pts = ConvertTimestamp(pkt.pts, stream->time_base.den, stream->time_base.num);
          pPacket->dts = ConvertTimestamp(pkt.dts, stream->time_base.den, stream->time_base.num);
          pPacket->duration =  DVD_SEC_TO_TIME((double)pkt.duration * stream->time_base.num / stream->time_base.den);

          // used to guess streamlength
          if (pPacket->dts != DVD_NOPTS_VALUE && (pPacket->dts > m_iCurrentPts || m_iCurrentPts == DVD_NOPTS_VALUE))
            m_iCurrentPts = pPacket->dts;


          // check if stream has passed full duration, needed for live streams
          if(pkt.dts != (int64_t)AV_NOPTS_VALUE)
          {
              int64_t duration;
              duration = pkt.dts;
              if(stream->start_time != (int64_t)AV_NOPTS_VALUE)
                duration -= stream->start_time;

              if(duration > stream->duration)
              {
                stream->duration = duration;
                duration = m_dllAvUtil.av_rescale_rnd(stream->duration, stream->time_base.num * AV_TIME_BASE, stream->time_base.den, AV_ROUND_NEAR_INF);
                if(m_pFormatContext->duration == (int64_t)AV_NOPTS_VALUE && m_pFormatContext->file_size > 0
                || m_pFormatContext->duration != (int64_t)AV_NOPTS_VALUE && duration > m_pFormatContext->duration)
                  m_pFormatContext->duration = duration;
              }
          }

          // check if stream seem to have grown since start
          if(m_pFormatContext->file_size > 0 && m_pFormatContext->pb)
          {
            if(m_pFormatContext->pb->pos > m_pFormatContext->file_size)
              m_pFormatContext->file_size = m_pFormatContext->pb->pos;
          }

          pPacket->iStreamId = pkt.stream_index; // XXX just for now
        }
      }
      av_free_packet(&pkt);
    }
  }
  Unlock();

  if (bReturnEmpty && !pPacket)
  {
      pPacket = CDVDDemuxUtils::AllocateDemuxPacket(0);
      if(pPacket)
      {
        pPacket->dts = DVD_NOPTS_VALUE;
        pPacket->pts = DVD_NOPTS_VALUE;
        pPacket->iStreamId = -1;
      }
  }

  if (!pPacket) return NULL;

  // check streams, can we make this a bit more simple?
  if (pPacket && pPacket->iStreamId >= 0 && pPacket->iStreamId <= MAX_STREAMS)
  {
    if (!m_streams[pPacket->iStreamId] ||
        m_streams[pPacket->iStreamId]->pPrivate != m_pFormatContext->streams[pPacket->iStreamId] ||
        m_streams[pPacket->iStreamId]->codec != m_pFormatContext->streams[pPacket->iStreamId]->codec->codec_id)
    {
      // content has changed, or stream did not yet exist
      AddStream(pPacket->iStreamId);
    }
    // we already check for a valid m_streams[pPacket->iStreamId] above
    else if (m_streams[pPacket->iStreamId]->type == STREAM_AUDIO)
    {
      if (((CDemuxStreamAudio*)m_streams[pPacket->iStreamId])->iChannels != m_pFormatContext->streams[pPacket->iStreamId]->codec->channels ||
          ((CDemuxStreamAudio*)m_streams[pPacket->iStreamId])->iSampleRate != m_pFormatContext->streams[pPacket->iStreamId]->codec->sample_rate)
      {
        // content has changed
        AddStream(pPacket->iStreamId);
      }
    }
    else if (m_streams[pPacket->iStreamId]->type == STREAM_VIDEO)
    {
      if (((CDemuxStreamVideo*)m_streams[pPacket->iStreamId])->iWidth != m_pFormatContext->streams[pPacket->iStreamId]->codec->width ||
          ((CDemuxStreamVideo*)m_streams[pPacket->iStreamId])->iHeight != m_pFormatContext->streams[pPacket->iStreamId]->codec->height)
      {
        // content has changed
        AddStream(pPacket->iStreamId);
      }
    }
  }
  return pPacket;
}
Пример #28
0
bool CDVDDemuxFFmpeg::Open(CDVDInputStream* pInput)
{
  AVInputFormat* iformat = NULL;
  std::string strFile;
  m_iCurrentPts = DVD_NOPTS_VALUE;
  m_speed = DVD_PLAYSPEED_NORMAL;

  if (!pInput) return false;

  if (!m_dllAvUtil.Load() || !m_dllAvCodec.Load() || !m_dllAvFormat.Load())  {
    CLog::Log(LOGERROR,"CDVDDemuxFFmpeg::Open - failed to load ffmpeg libraries");
    return false;
  }

  // register codecs
  m_dllAvFormat.av_register_all();
  m_dllAvFormat.url_set_interrupt_cb(interrupt_cb);

  // could be used for interupting ffmpeg while opening a file (eg internet streams)
  // url_set_interrupt_cb(NULL);

  m_pInput = pInput;
  strFile = m_pInput->GetFileName();

  bool streaminfo = true; /* set to true if we want to look for streams before playback*/

  if( m_pInput->GetContent().length() > 0 )
  {
    std::string content = m_pInput->GetContent();

    /* check if we can get a hint from content */
    if( content.compare("audio/aacp") == 0 )
      iformat = m_dllAvFormat.av_find_input_format("aac");
    else if( content.compare("audio/aac") == 0 )
      iformat = m_dllAvFormat.av_find_input_format("aac");
    else if( content.compare("audio/mpeg") == 0  )  
      iformat = m_dllAvFormat.av_find_input_format("mp3");
    else if( content.compare("video/mpeg") == 0 )
      iformat = m_dllAvFormat.av_find_input_format("mpeg");
    else if( content.compare("video/flv") == 0 )
      iformat = m_dllAvFormat.av_find_input_format("flv");
    else if( content.compare("video/x-flv") == 0 )
      iformat = m_dllAvFormat.av_find_input_format("flv");

    /* these are likely pure streams, and as such we don't */
    /* want to try to look for streaminfo before playback */
    if( iformat )
      streaminfo = false;
  }

  if( m_pInput->IsStreamType(DVDSTREAM_TYPE_FFMPEG) )
  {
    g_urltimeout = GetTickCount() + 10000;

    // special stream type that makes avformat handle file opening
    // allows internal ffmpeg protocols to be used
    if( m_dllAvFormat.av_open_input_file(&m_pFormatContext, strFile.c_str(), iformat, FFMPEG_FILE_BUFFER_SIZE, NULL) < 0 )
    {
      CLog::Log(LOGDEBUG, "Error, could not open file %s", strFile.c_str());
      Dispose();
      return false;
    }
  }
  else
  {
    g_urltimeout = 0;

    // initialize url context to be used as filedevice
    URLContext* context = (URLContext*)m_dllAvUtil.av_mallocz(sizeof(struct URLContext) + strFile.length() + 1);
    context->prot = &dvd_file_protocol;
    context->priv_data = (void*)m_pInput;
    context->max_packet_size = FFMPEG_FILE_BUFFER_SIZE;

    if (m_pInput->IsStreamType(DVDSTREAM_TYPE_DVD))
    {
      context->max_packet_size = FFMPEG_DVDNAV_BUFFER_SIZE;
      context->is_streamed = 1;
    }
    if (m_pInput->IsStreamType(DVDSTREAM_TYPE_TV))
    {
      if(m_pInput->Seek(0, SEEK_POSSIBLE) == 0)
        context->is_streamed = 1;

      // this actually speeds up channel changes by almost a second
      // however, it alsa makes player not buffer anything, this
      // leads to buffer underruns in audio renderer
      //if(context->is_streamed)
      //  streaminfo = false;
    }
    else
    {
      if(m_pInput->Seek(0, SEEK_POSSIBLE) == 0)
        context->is_streamed = 1;
    }

#if LIBAVFORMAT_VERSION_INT >= (52<<16)
    context->filename = (char *) &context[1];
#endif

    strcpy(context->filename, strFile.c_str());  

    // open our virtual file device
    if(m_dllAvFormat.url_fdopen(&m_ioContext, context) < 0)
    {
      CLog::Log(LOGERROR, "%s - Unable to init io context", __FUNCTION__);
      m_dllAvUtil.av_free(context);
      Dispose();
      return false;
    }

    if( iformat == NULL )
    {
      // let ffmpeg decide which demuxer we have to open
      AVProbeData pd;
      BYTE probe_buffer[2048];

      // init probe data
      pd.buf = probe_buffer;
      pd.filename = strFile.c_str();

      // read data using avformat's buffers
      pd.buf_size = m_dllAvFormat.get_buffer(m_ioContext, pd.buf, sizeof(probe_buffer));            
      if (pd.buf_size == 0)
      {
        CLog::Log(LOGERROR, "%s - error reading from input stream, %s", __FUNCTION__, strFile.c_str());
        return false;
      }
      // restore position again
      m_dllAvFormat.url_fseek(m_ioContext , 0, SEEK_SET);

      iformat = m_dllAvFormat.av_probe_input_format(&pd, 1);
      if (!iformat)
      {
        CLog::Log(LOGERROR, "%s - error probing input format, %s", __FUNCTION__, strFile.c_str());
        return false;
      }
    }


    // open the demuxer
    if (m_dllAvFormat.av_open_input_stream(&m_pFormatContext, m_ioContext, strFile.c_str(), iformat, NULL) < 0)
    {
      CLog::Log(LOGERROR, "Error, could not open file %s", strFile.c_str());
      Dispose();
      return false;
    }
  }
  
  // we need to know if this is matroska later
  m_bMatroska = strcmp(m_pFormatContext->iformat->name, "matroska") == 0;

  // in combination with libdvdnav seek, av_find_stream_info wont work
  // so we do this for files only
  if (streaminfo)
  {
    if (m_pInput->IsStreamType(DVDSTREAM_TYPE_TV))
    {
      /* too speed up livetv channel changes, only analyse very short */ 
      if(m_pInput->Seek(0, SEEK_POSSIBLE) == 0)
        m_pFormatContext->max_analyze_duration = 500000;
    }


    CLog::Log(LOGDEBUG, "%s - av_find_stream_info starting", __FUNCTION__);
    int iErr = m_dllAvFormat.av_find_stream_info(m_pFormatContext);
    if (iErr < 0)
    {
      CLog::Log(LOGWARNING,"could not find codec parameters for %s", strFile.c_str());
      if (m_pFormatContext->nb_streams == 1 && m_pFormatContext->streams[0]->codec->codec_id == CODEC_ID_AC3)
      {
        // special case, our codecs can still handle it.
      }
      else
      {
        Dispose();
        return false;
      }
    }
    CLog::Log(LOGDEBUG, "%s - av_find_stream_info finished", __FUNCTION__);
  }
  // reset any timeout
  g_urltimeout = 0;

  // if format can be nonblocking, let's use that
  m_pFormatContext->flags |= AVFMT_FLAG_NONBLOCK;

  // print some extra information
  m_dllAvFormat.dump_format(m_pFormatContext, 0, strFile.c_str(), 0);

  UpdateCurrentPTS();

  // add the ffmpeg streams to our own stream array
  m_program = 0;
  if (m_pFormatContext->nb_programs)
  {
    // discard nonselected programs
    for (unsigned int i = 0; i < m_pFormatContext->nb_programs; i++)
    {
      if(i != m_program)
        m_pFormatContext->programs[m_program]->discard = AVDISCARD_ALL;
    }
    // add streams from selected program
    for (unsigned int i = 0; i < m_pFormatContext->programs[m_program]->nb_stream_indexes; i++)
      AddStream(m_pFormatContext->programs[m_program]->stream_index[i]);
  }
  else
  {
    for (unsigned int i = 0; i < m_pFormatContext->nb_streams; i++)
      AddStream(i);
  }

  return true;
}
Пример #29
0
bool OMXReader::GetStreams()
{
  if(!m_pFormatContext)
    return false;

  unsigned int    m_program         = UINT_MAX;

  ClearStreams();

  if (m_pFormatContext->nb_programs)
  {
    // look for first non empty stream and discard nonselected programs
    for (unsigned int i = 0; i < m_pFormatContext->nb_programs; i++)
    {
      if(m_program == UINT_MAX && m_pFormatContext->programs[i]->nb_stream_indexes > 0)
        m_program = i;

      if(i != m_program)
        m_pFormatContext->programs[i]->discard = AVDISCARD_ALL;
    }
      if(m_program != UINT_MAX)
      {
        // add streams from selected program
        for (unsigned int i = 0; i < m_pFormatContext->programs[m_program]->nb_stream_indexes; i++)
          AddStream(m_pFormatContext->programs[m_program]->stream_index[i]);
      }
    }

  // if there were no programs or they were all empty, add all streams
  if (m_program == UINT_MAX)
  {
    for (unsigned int i = 0; i < m_pFormatContext->nb_streams; i++)
      AddStream(i);
  }

  if(m_video_count)
    SetActiveStreamInternal(OMXSTREAM_VIDEO, 0);

  if(m_audio_count)
    SetActiveStreamInternal(OMXSTREAM_AUDIO, 0);

  if(m_subtitle_count)
    SetActiveStreamInternal(OMXSTREAM_SUBTITLE, 0);

  int i = 0;
  for(i = 0; i < MAX_OMX_CHAPTERS; i++)
  {
    m_chapters[i].name      = "";
    m_chapters[i].seekto_ms = 0;
    m_chapters[i].ts        = 0;
  }

  m_chapter_count = 0;

  if(m_video_index != -1)
  {
    //m_current_chapter = 0;
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(52,14,0)
    m_chapter_count = (m_pFormatContext->nb_chapters > MAX_OMX_CHAPTERS) ? MAX_OMX_CHAPTERS : m_pFormatContext->nb_chapters;
    for(i = 0; i < m_chapter_count; i++)
    {
      if(i > MAX_OMX_CHAPTERS)
        break;

      AVChapter *chapter = m_pFormatContext->chapters[i];
      if(!chapter)
        continue;

      //m_chapters[i].seekto_ms = ConvertTimestamp(chapter->start, chapter->time_base.den, chapter->time_base.num) / 1000;
      m_chapters[i].seekto_ms = ConvertTimestamp(chapter->start, &chapter->time_base) / 1000;
      m_chapters[i].ts        = m_chapters[i].seekto_ms / 1000;

#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(52,83,0)
      AVDictionaryEntry *titleTag = m_dllAvUtil.av_dict_get(m_pFormatContext->chapters[i]->metadata,"title", NULL, 0);
      if (titleTag)
        m_chapters[i].name = titleTag->value;
#else
      if(m_pFormatContext->chapters[i]->title)
        m_chapters[i].name = m_pFormatContext->chapters[i]->title;
#endif
      printf("Chapter : \t%d \t%s \t%8.2f\n", i, m_chapters[i].name.c_str(), m_chapters[i].ts);
    }
  }
#endif

  return true;
}
Пример #30
0
void *InitAviWrite(void *packetqueue, char *filename)
{
	if (!packetqueue)
	{
		return NULL;
	}
	pWPacketQueue = (SWPacketQueue *)packetqueue;
	SWAviWrite *paviwrite = (SWAviWrite *)malloc(sizeof(SWAviWrite));
	
	if (!paviwrite)
	{
		return NULL;
	}
	
	memset(paviwrite,0,sizeof(SWAviWrite));
	av_register_all();
	
	if (NULL == filename)
	{
		filename = "test.avi";
	}
	
	avformat_alloc_output_context2(&paviwrite->pFormatCtx, NULL, NULL, filename);
	
	if (!paviwrite->pFormatCtx)
	{
		return NULL;
	}
	
	AVCodec *acodec, *vcodec;
	paviwrite->pOutFormat = paviwrite->pFormatCtx->oformat;
	paviwrite->pVideoSt = (AVStream *)AddStream(paviwrite->pFormatCtx,
												&vcodec,
												AV_CODEC_ID_MPEG1VIDEO,
												AVMEDIA_TYPE_VIDEO);
													
	paviwrite->pAudioSt = (AVStream *)AddStream(paviwrite->pFormatCtx,
												&acodec,
												AV_CODEC_ID_WMAV2,
												AVMEDIA_TYPE_AUDIO);

	if (!paviwrite->pAudioSt || !paviwrite->pVideoSt)
	{
		avformat_close_input(&paviwrite->pFormatCtx);
		free(paviwrite);
		paviwrite = NULL;
		return NULL;
	}
	
	av_dump_format(paviwrite->pFormatCtx, 0, filename, 1);
	
	if (!(paviwrite->pOutFormat->flags & AVFMT_NOFILE))
	{
		if (avio_open(&paviwrite->pFormatCtx->pb, filename, AVIO_FLAG_WRITE) < 0)
		{
			
			int i;
			for (i = 0; i < paviwrite->pFormatCtx->nb_streams; i++) 
			{
				av_freep(&paviwrite->pFormatCtx->streams[i]->codec);
				av_freep(&paviwrite->pFormatCtx->streams[i]);
			}
			
			if (paviwrite->pOutFormat)
			{
				if (!(paviwrite->pOutFormat->flags & AVFMT_NOFILE))
					/* Close the output file. */
					avio_close(paviwrite->pFormatCtx->pb);
			}
			
			avformat_close_input(&paviwrite->pFormatCtx);
			free(paviwrite);
			paviwrite = NULL;
			return NULL;
		}
	}
	
	avformat_write_header(paviwrite->pFormatCtx, NULL);
	return paviwrite;	
}