bool	PlaybackHelper::setupDeckLinkOutput()
{
	bool							result = false;
    IDeckLinkDisplayModeIterator*	displayModeIterator = NULL;
    IDeckLinkDisplayMode*			deckLinkDisplayMode = NULL;
	
	m_width = -1;
	
	// set callback
	m_deckLinkOutput->SetScheduledFrameCompletionCallback(this);
	
	// get frame scale and duration for the video mode
    if (m_deckLinkOutput->GetDisplayModeIterator(&displayModeIterator) != S_OK)
		goto bail;
	
    while (displayModeIterator->Next(&deckLinkDisplayMode) == S_OK)
    {
		if (deckLinkDisplayMode->GetDisplayMode() == bmdModeNTSC)
		{
			m_width = deckLinkDisplayMode->GetWidth();
			m_height = deckLinkDisplayMode->GetHeight();
			deckLinkDisplayMode->GetFrameRate(&m_frameDuration, &m_timeScale);
			deckLinkDisplayMode->Release();
			
			break;
		}
		
		deckLinkDisplayMode->Release();
    }
	
    displayModeIterator->Release();
	
	if (m_width == -1)
	{
		fprintf(stderr, "Unable to find requested video mode\n");
		goto bail;
	}
	
	// enable video output
	if (m_deckLinkOutput->EnableVideoOutput(bmdModeNTSC, bmdVideoOutputFlagDefault) != S_OK)
	{
		fprintf(stderr, "Could not enable video output\n");
		goto bail;
	}
	
	// create coloured frames
	if (! createFrames())
		goto bail;
	
	result = true;
	
bail:
	if (! result)
	{
		// release coloured frames
		releaseFrames();
	}
	
	return result;
}
Exemple #2
0
void createWindow(int flag) {
    if(flag==0)createFrames();
    else createFramesFromUser();
    for (int i = 0; i < totalFrame; i++) {
        W.insertFrame(*F[i], i);
    }
}
//virtual
void PixmapFilmstripObject::createFrames(const QSize& frameSize,quint32 numFrames,FrameDirection::Enum direction,const QPoint& startOffset)
{
	if ((numFrames == 0) || (direction == FrameDirection::INVALID) || frameSize.isEmpty() || (!frameSize.isValid()))
	{
		return;
	}
	QPoint adder;
	switch (direction)
	{
	case FrameDirection::North:
		adder = QPoint(0,-frameSize.height());
		break;
	case FrameDirection::South:
		adder = QPoint(0,frameSize.height());
		break;
	case FrameDirection::East:
		adder = QPoint(-frameSize.width(),0);
		break;
	case FrameDirection::West:
		adder = QPoint(frameSize.width(),0);
	default:
		break;
	}
	QPoint currentOrigin = startOffset;
	QList<QRect> frameCoordinates;
	for (quint32 i = 0;i<numFrames;++i,currentOrigin+=adder)
	{
		frameCoordinates << QRect(currentOrigin,frameSize);
	}
	createFrames(frameCoordinates);
}
Exemple #4
0
void PacketReceiver::init()
{
	initAVLibs();
	mainWindow->log("Oeffne Stream...", INFO);
	openStream();
	mainWindow->log("Stream goeffnet", INFO);
	createPictureConverter();
	createFrames();
}
PixmapFilmstripObject::PixmapFilmstripObject(const QList<QRect> frameCoordinates,const QString & fileName, const char * format, Qt::ImageConversionFlags flags)
: PixmapObject(fileName,format,flags)
, m_currentFrameIndex(0)
, m_currentFrameRect(QRect())
{
	createFrames(frameCoordinates);
	if (!m_frames.empty())
	{
		m_currentFrameRect = m_frames[0];
	}
}
PixmapFilmstripObject::PixmapFilmstripObject(const QSize& frameSize,quint32 numFrames,FrameDirection::Enum direction,
						const QString& fileName,
						const char * format, Qt::ImageConversionFlags flags,
						const QPoint& startOffset)
: PixmapObject(fileName,format,flags)
, m_currentFrameIndex(0)
, m_currentFrameRect(QRect())
{
	createFrames(frameSize,numFrames,direction,startOffset);
	if (!m_frames.empty())
	{
		m_currentFrameRect = m_frames[0];
	}
}
Exemple #7
0
void Parser::Config()
{
    try
    {
        nbBuffers = av_sample_fmt_is_planar((AVSampleFormat)sampleFormatIn) ? this->nbChannelsIn : 1;

        // cria o contexto de saída
        CreateContext();

        // seta os dados do encoder
        setStream();
        av_dump_format(fmt_ctx_out, 0, fileName.c_str(), 1);

        createFrames();

        // inicia o resample
        InitResampler();
    }
    catch(...)
    {
        throw;
    }
}
Exemple #8
0
	int main(void)
	{
		int i = 0;
		double (*magPhase)[2];
		double * x;
		
		double (*y)[1024];
		double **outputy;
		double * timeStretched;
		double * previousPhaseFrame;
		double * synthesizedOutput;


		//hard coded the test values ie hop, windowSize and the input length
		hop = 256;
		windowSize =1024;
		xlength =  10643328 + hop*3; // adds zeros to the input... think this is for time scaling sort of
		step = 7;
		alpha = pow(2,(step/(float)12));
		hopOut = round(alpha*hop);
		numberSlices = (((xlength-windowSize)/hop));
		
		phaseCumulative = malloc(windowSize * sizeof(double));
		x = malloc(xlength * sizeof(double));

		outputy = malloc(numberSlices * sizeof(double *));
		outputy[0] = malloc(numberSlices * windowSize * sizeof(double));

		previousPhaseFrame = malloc(windowSize * sizeof(double));
		magPhase = malloc(2*windowSize*sizeof(double));
		init();
	
		//reads fromt the file
		FILE *fopen(), *fp;

		fp = fopen("input.txt","r"); //hard coded the test input source
		int index =0;
		for(index = 0;index<hop*3;index++)
		{
			x[index] = 0;
		}
		while (!feof(fp) )
		{
			index++;
			fscanf(fp,"%lg",&x[index]);
			if(debug == 1)
			{
				printf("%lg",x[index]);
				printf("%s","\n");
			}
		}
		fclose(fp);
		
		for(i = 1; i < numberSlices; i++)
			outputy[i] = outputy[0] + i * windowSize;
		
		if(xlength %hop != 0)
			xlength = numberSlices*hop + windowSize;
		y = malloc((xlength/hop)*windowSize* sizeof(double *)); //note the number of column is different from the number of slicerSlices
		
		createFrames(x,y);
		
		for(index = 0;index<windowSize;index++)
		{
			previousPhaseFrame[index] = 0;
		}
		for(index = 0;index<numberSlices;index++)
		{
			analyse(y[index],magPhase);
			process(magPhase,previousPhaseFrame);
			outputy[index] = synthesis(magPhase);			
		}
		
		timeStretched = fusionFrames(outputy);//should check this timeStretched value before linear interpolation, to see what comes out

		// do the linear interpolation
		synthesizedOutput = interpolate(timeStretched);

		for(i = 0;i<interlength;i++)
		{
			printf("%f",synthesizedOutput[i]);
			printf("%s","\n");
		}
		return 0;
	}