예제 #1
0
void
GTelnet::telnet_do(void)
{
  unsigned char
    buf[4];

//printf("GTelnet::telnet_do(): ");    
  switch(*telnet_input_data)
  {
    case TERMINAL_TYPE :
//printf("sending terminal type...\n");    
      buf[0] = IAC;
      buf[1] = WILL;
      buf[2] = TERMINAL_TYPE;
      ProcessOutput(3, buf);
      buf[0] = IAC;
      buf[1] = SB;
      buf[2] = TERMINAL_TYPE;
      buf[3] = 0;
      ProcessOutput(4, buf);
      ProcessOutput(strlen(telnet_termid), (unsigned char *)telnet_termid);
      buf[0] = IAC;
      buf[1] = SE;
      ProcessOutput(2, buf);
    break;
   
    case TRANSMIT_BINARY :
//printf("enabling binary recv mode...\n");    
      telnet_binary_recv = 1;
    break;
  }
  telnet_process_data = 0;
}
예제 #2
0
void
WMFMediaDataDecoder::ProcessDecode(MediaRawData* aSample)
{
  {
    MonitorAutoLock mon(mMonitor);
    if (mIsFlushing) {
      // Skip sample, to be released by runnable.
      return;
    }
  }

  HRESULT hr = mMFTManager->Input(aSample);
  if (FAILED(hr)) {
    NS_WARNING("MFTManager rejected sample");
    mCallback->Error();
    if (!mRecordedError) {
      SendTelemetry(hr);
      mRecordedError = true;
    }
    return;
  }

  mLastStreamOffset = aSample->mOffset;

  ProcessOutput();
}
예제 #3
0
void GTerm::request_param()
{
    char str[40];

    sprintf(str, "\033[%d;1;1;120;120;1;0x", param[0] + 2);
    ProcessOutput(strlen(str), (unsigned char * )str);
}
예제 #4
0
파일: pdf.c 프로젝트: db00/reader
void Text_printf(char * data,int length,char * out,int outsize)
{
	z_stream zstrm;
	memset(&zstrm, 0,sizeof(zstrm));

	zstrm.avail_in = length + 1;
	zstrm.avail_out = outsize;
	zstrm.next_in = (Bytef*)data;
	zstrm.next_out = (Bytef*)out;

	int rsti = inflateInit(&zstrm);
	if (rsti == Z_OK)
	{
		int rst2 = inflate (&zstrm, Z_FINISH);
		if (rst2 >= 0)
		{
			size_t totout = zstrm.total_out;
			ProcessOutput(stdout, out, totout);
		}else{
			printf("error\n");
		}
	}else{
		printf("error\n");
	}
}
예제 #5
0
void clTernServer::OnTernWorkerThreadDone(const clTernWorkerThread::Reply& reply)
{
    m_workerThread->Stop();
    wxDELETE(m_workerThread);
    RecycleIfNeeded();

    m_entries.clear();
    CL_DEBUGS(reply.json);

    switch(reply.requestType) {
    case clTernWorkerThread::kFunctionTip:
        m_jsCCManager->OnFunctionTipReady(ProcessCalltip(reply.json), reply.filename);
        break;
    case clTernWorkerThread::kCodeCompletion:
        ProcessOutput(reply.json, m_entries);
        m_jsCCManager->OnCodeCompleteReady(m_entries, reply.filename);
        break;
    case clTernWorkerThread::kFindDefinition: {
        clTernDefinition loc;
        if(ProcessDefinitionOutput(reply.json, loc)) {
            m_jsCCManager->OnDefinitionFound(loc);
        }
    }
    break;
    case clTernWorkerThread::kReparse: {
        // TODO ??
    } break;
    case clTernWorkerThread::kReset:
        // TODO ??
        break;
    }
}
예제 #6
0
STDMETHODIMP CDecWMV9::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity)
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  hr = m_pDMO->GetInputStatus(0, &dwStatus);
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> GetInputStatus() failed with hr: 0x%x", hr));
    return S_FALSE;
  }
  if (!(dwStatus & DMO_INPUT_STATUSF_ACCEPT_DATA))
    return S_FALSE;
  
  DWORD dwFlags = 0;
  if (bSyncPoint)
    dwFlags |= DMO_INPUT_DATA_BUFFERF_SYNCPOINT;
  if (rtStart != AV_NOPTS_VALUE)
    dwFlags |= DMO_INPUT_DATA_BUFFERF_TIME;
  if (rtStop != AV_NOPTS_VALUE)
    dwFlags |= DMO_INPUT_DATA_BUFFERF_TIMELENGTH;

  if (m_vc1Header && (m_bManualReorder || m_bNeedKeyFrame)) {
    AVPictureType pictype = parse_picture_type(buffer, buflen, m_vc1Header);
    if (m_bManualReorder) {
      if (pictype == AV_PICTURE_TYPE_I || pictype == AV_PICTURE_TYPE_P) {
        if (m_bReorderBufferValid)
          m_timestampQueue.push(m_rtReorderBuffer);
        m_rtReorderBuffer = rtStart;
        m_bReorderBufferValid = TRUE;
      } else {
        m_timestampQueue.push(rtStart);
      }
    }

    if (m_bNeedKeyFrame) {
      if (pictype != AV_PICTURE_TYPE_I) {
        if (m_bManualReorder)
          m_timestampQueue.pop();
        return S_OK;
      } else {
        m_bNeedKeyFrame = FALSE;
        dwFlags |= DMO_INPUT_DATA_BUFFERF_SYNCPOINT;
      }
    }
  }

  CMediaBuffer *pInBuffer = new CMediaBufferDecode(buffer, buflen);
  hr = m_pDMO->ProcessInput(0, pInBuffer, dwFlags, rtStart, rtStop - rtStart);
  SafeRelease(&pInBuffer);

  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> ProcessInput failed with hr: %0x%x", hr));
    return E_FAIL;
  }
  if (S_FALSE == hr)
    return S_FALSE;

  return ProcessOutput();
}
예제 #7
0
void GTerm::status_report()
{
    char str[20];

    if (param[0] == 5)
    {
        char * str = "\033[0n";

        ProcessOutput(strlen(str), (unsigned char * )str);
    }
    else if (param[0] == 6)
    {
        sprintf(str, "\033[%d;%dR", cursor_y + 1, cursor_x + 1);

        ProcessOutput(strlen(str), (unsigned char * )str);
    }
}
void
GonkMediaDataDecoder::ProcessDrain()
{
  // Notify decoder input EOS by sending a null data.
  ProcessDecode(nullptr);
  mSignaledEOS = true;
  ProcessOutput();
}
예제 #9
0
STDMETHODIMP CDecodeThread::Decode(IMediaSample *pSample)
{
  CAutoLock decoderLock(this);

  if (!CAMThread::ThreadExists() || !m_pDecoder)
    return E_UNEXPECTED;

  // Wait until the queue is empty
  while(HasSample())
    Sleep(1);

  // Re-init the decoder, if requested
  // Doing this inside the worker thread alone causes problems
  // when switching from non-sync to sync, so ensure we're in sync.
  if (m_bDecoderNeedsReInit) {
    CAMThread::CallWorker(CMD_REINIT);
    while (!m_evEOSDone.Check()) {
      m_evSample.Wait();
      ProcessOutput();
    }
  }

  m_evDeliver.Reset();
  m_evSample.Reset();
  m_evDecodeDone.Reset();

  pSample->AddRef();

  // Send data to worker thread, and wake it (if it was waiting)
  PutSample(pSample);

  // If we don't have thread safe buffers, we need to synchronize
  // with the worker thread and deliver them when they are available
  // and then let it know that we did so
  if (m_bSyncToProcess) {
    while (!m_evDecodeDone.Check()) {
      m_evSample.Wait();
      ProcessOutput();
    }
  }

  ProcessOutput();

  return S_OK;
}
예제 #10
0
STDMETHODIMP CDecWMV9::EndOfStream()
{
  if (m_bReorderBufferValid)
    m_timestampQueue.push(m_rtReorderBuffer);
  m_bReorderBufferValid = FALSE;
  m_rtReorderBuffer = AV_NOPTS_VALUE;
  m_pDMO->Discontinuity(0);
  ProcessOutput();
  return S_OK;
}
예제 #11
0
void
WMFMediaDataDecoder::ProcessDrain()
{
  // Order the decoder to drain...
  if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) {
    NS_WARNING("Failed to send DRAIN command to MFT");
  }
  // Then extract all available output.
  ProcessOutput();
}
예제 #12
0
STDMETHODIMP CDecWMV9MFT::EndOfStream()
{
  if (m_bReorderBufferValid)
    m_timestampQueue.push(m_rtReorderBuffer);
  m_bReorderBufferValid = FALSE;
  m_rtReorderBuffer = AV_NOPTS_VALUE;

  m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
  ProcessOutput();
  return S_OK;
}
예제 #13
0
void
WMFMediaDataDecoder::ProcessDrain()
{
  if (!mIsFlushing && mMFTManager) {
    // Order the decoder to drain...
    mMFTManager->Drain();
    // Then extract all available output.
    ProcessOutput();
  }
  mCallback->DrainComplete();
}
예제 #14
0
파일: NetManager.cpp 프로젝트: ff78/son
VOID CNetManager::WaitPacket(VOID)
{
	if(!m_Socket.isValid())
	{
        if (connect_state_ == 1)
        {
            if (show_ == true)
            {
                show_ = false;
                const char* szString = DICTIONARY_CONFIG_MGR::instance()->get_string_by_id(SERVER_DISCONNECT);
                UI_ModalDialogue_Layer::DoModal("",szString,UI_ModalDialogue_Layer::DT_OK,[]()
                {
                    Account_Logic::releaseData();
#if (CC_TARGET_PLATFORM == CC_PLATFORM_WP8) || (CC_TARGET_PLATFORM == CC_PLATFORM_WINRT)
                    MessageBox("You pressed the close button. Windows Store Apps do not implement a close button.","Alert");
                    return;
#endif
                    
                    Director::getInstance()->end();
                    
#if (CC_TARGET_PLATFORM == CC_PLATFORM_IOS)
                    exit(0);
#endif
                }
                );

                //UI_ModalDialogue_Layer::DoModal("",szString,UI_ModalDialogue_Layer::DT_OK,Account_Logic::releaseData);
            }            
        }
        
		return;
	}
	
	//网络数据流操作
	if(!Select() || !ProcessExcept() || !ProcessInput() || !ProcessOutput())
	{
		//CEventSystem::GetMe()->PushEvent(GE_NET_CLOSE);
	}
	
	/*
	int s=Select() ;
	CCLog("--------------------s is %d",s);
	int p1=ProcessExcept();
	CCLog("--------------------p1 is %d",p1);
	int p2=ProcessInput();
	CCLog("--------------------p2 is %d",p2);
	int p3=ProcessOutput();
	CCLog("--------------------p3 is %d",p3);
	*/
	//Packet操作
	ProcessCommands( ) ;

}
예제 #15
0
STDMETHODIMP CDecodeThread::EndOfStream()
{
  CAutoLock decoderLock(this);

  if (!CAMThread::ThreadExists() || !m_pDecoder)
    return E_UNEXPECTED;

  m_evDeliver.Reset();
  m_evSample.Reset();

  CAMThread::CallWorker(CMD_EOS);

  while (!m_evEOSDone.Check()) {
    m_evSample.Wait();
    ProcessOutput();
  }

  ProcessOutput();

  return S_OK;
}
예제 #16
0
void GTerm::pc_begin( void )
{
    //printf("pc_begin...\n");
    set_mode_flag(PC);

    //printf("pc_begin: mode_flags = %x\n", mode_flags);
    ProcessOutput((unsigned int)strlen(pc_machinename) + 1, (unsigned char * )pc_machinename);
    pc_oldWidth = Width();
    pc_oldHeight = Height();
    ResizeTerminal(80, 25);
    update_changes();
}
예제 #17
0
void
WMFMediaDataDecoder::ProcessDecode(mp4_demuxer::MP4Sample* aSample)
{
  HRESULT hr = mMFTManager->Input(aSample);
  if (FAILED(hr)) {
    NS_WARNING("MFTManager rejected sample");
    mCallback->Error();
    return;
  }

  mLastStreamOffset = aSample->byte_offset;

  ProcessOutput();
}
void
GonkMediaDataDecoder::ProcessDecode(mp4_demuxer::MP4Sample* aSample)
{
  nsresult rv = mManager->Input(aSample);
  if (rv != NS_OK) {
    NS_WARNING("GonkAudioDecoder failed to input data");
    GMDD_LOG("Failed to input data err: %d",rv);
    mCallback->Error();
    return;
  }
  if (aSample) {
    mLastStreamOffset = aSample->byte_offset;
  }
  ProcessOutput();
}
예제 #19
0
/* remap the supplied data into out, which must be pre-allocated */
void CPCMRemap::Remap(void *data, void *out, unsigned int samples, float gain /*= 1.0f*/)
{
  CheckBufferSize(samples * m_outChannels * sizeof(float));

  //set output buffer to 0
  memset(out, 0, samples * m_outChannels * m_inSampleSize);

  //set intermediate buffer to 0
  memset(m_buf, 0, m_bufsize);

  ProcessInput(data, out, samples, gain);
  AddGain(m_buf, samples * m_outChannels, gain);
  ProcessLimiter(samples, gain);
  ProcessOutput(out, samples, gain);
}
예제 #20
0
void
WMFMediaDataDecoder::ProcessDrain()
{
  bool isFlushing;
  {
    MonitorAutoLock mon(mMonitor);
    isFlushing = mIsFlushing;
  }
  if (!isFlushing && mMFTManager) {
    // Order the decoder to drain...
    mMFTManager->Drain();
    // Then extract all available output.
    ProcessOutput();
  }
  mCallback->DrainComplete();
}
예제 #21
0
void
WMFMediaDataDecoder::ProcessDecode(mp4_demuxer::MP4Sample* aSample)
{
  const uint8_t* data = &aSample->data->front();
  uint32_t length = aSample->data->size();
  HRESULT hr = mDecoder->Input(data, length, aSample->composition_timestamp);
  if (FAILED(hr)) {
    NS_WARNING("WMFAudioDecoder failed to input data");
    mCallback->Error();
    return;
  }

  mLastStreamOffset = aSample->byte_offset;

  ProcessOutput();
}
예제 #22
0
VOID CNetManager::WaitPacket(VOID)
{
	if(!m_Socket.isValid())
	{
		return;
	}

	//网络数据流操作
	if(!Select() || !ProcessExcept() || !ProcessInput() || !ProcessOutput())
	{
		CEventSystem::GetMe()->PushEvent(GE_NET_CLOSE);
	}

	//Packet操作
	ProcessCommands( ) ;
}
// FRunnable interface
uint32 FInteractiveProcess::Run()
{
	// control and interact with the process
	StartTime = FDateTime::UtcNow();
	{
		do
		{
			FPlatformProcess::Sleep(SleepTime);

			// Read pipe and redirect it to ProcessOutput function
			ProcessOutput(FPlatformProcess::ReadPipe(ReadPipeParent));

			// Write to process if there is a message
			SendMessageToProcessIf();

			// If wanted to stop program
			if (bCanceling == true)
			{
				FPlatformProcess::TerminateProc(ProcessHandle, bKillTree);
				CanceledDelegate.ExecuteIfBound();

				UE_LOG(LogInteractiveProcess, Log, TEXT("The process is being canceled"));

				return 0;
			}
		} while (FPlatformProcess::IsProcRunning(ProcessHandle) == true);
	}

	// close pipes
	FPlatformProcess::ClosePipe(ReadPipeParent, WritePipeChild);
	ReadPipeParent = WritePipeChild = nullptr;
	FPlatformProcess::ClosePipe(ReadPipeChild, WritePipeParent);
	ReadPipeChild = WritePipeParent = nullptr;

	// get completion status
	if (FPlatformProcess::GetProcReturnCode(ProcessHandle, &ReturnCode) == false)
	{
		ReturnCode = -1;
	}

	EndTime = FDateTime::UtcNow();

	CompletedDelegate.ExecuteIfBound(ReturnCode, bCanceling);

	return 0;
}
예제 #24
0
void
WMFMediaDataDecoder::ProcessDrain()
{
  bool isFlushing;
  {
    MonitorAutoLock mon(mMonitor);
    isFlushing = mIsFlushing;
  }
  if (!isFlushing && mDecoder) {
    // Order the decoder to drain...
    if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) {
      NS_WARNING("Failed to send DRAIN command to MFT");
    }
    // Then extract all available output.
    ProcessOutput();
  }
  mCallback->DrainComplete();
}
예제 #25
0
EncoderOutput EncodeTransform::EncodeData(char *pRGBAData, size_t numBytes)
{	
	VTUNE_TASK(g_pDomain, "EncodeData");

	const size_t numRGBAPixels = numBytes >> 2; // 4 bytes per pixel (TODO:Cleanup)
	
	EncoderOutput etn;

	etn.pEncodedData = NULL;
	etn.numBytes = 0;
	etn.returnCode = S_FALSE;

	size_t yuy2PixelIndex = 0;
	// The first step is to compress to YUV by taking in the current and next pixel (for averaging) n1 and n2, n2 and n3, n3 and n4,....

	DWORD *pRGBADataAsDWORD = reinterpret_cast<DWORD*> (pRGBAData);

	for (size_t rgbaIndex = 0; rgbaIndex < numRGBAPixels; rgbaIndex = rgbaIndex + 2)
	{
		//Here we convert RGB to YUY2 and add to the encode buffer (omiting intermediate step, go to function for more detail)
		mCompressedBuffer[yuy2PixelIndex++] = ColorConversion::RGBtoYUY2(pRGBADataAsDWORD[rgbaIndex], 
																		pRGBADataAsDWORD[rgbaIndex + 1], 
																		mbEncodeBackgroundPixels,
																		mEncodingThreshold);
	}

	SendStreamEndMessage();

	// Add a sample with the frame timestamp
	 HRESULT hr = AddSample(mTimeStamp);

	if (SUCCEEDED(hr))
	{
		etn.returnCode = ProcessOutput(etn);
	}

	mTimeStamp += VIDEO_FRAME_DURATION;

	return etn;
}
예제 #26
0
uint32 FMonitoredProcess::Run()
{
	// monitor the process
	StartTime = FDateTime::UtcNow();
	{
		do
		{
			FPlatformProcess::Sleep(0.0);

			ProcessOutput(FPlatformProcess::ReadPipe(ReadPipe));

			if (Canceling)
			{
				FPlatformProcess::TerminateProc(ProcessHandle, KillTree);
				CanceledDelegate.ExecuteIfBound();
				Thread = nullptr;

				return 0;
			}
		}
		while (FPlatformProcess::IsProcRunning(ProcessHandle));
	}
	EndTime = FDateTime::UtcNow();

	// close output pipes
	FPlatformProcess::ClosePipe(ReadPipe, WritePipe);
	ReadPipe = WritePipe = nullptr;

	// get completion status
	if (!FPlatformProcess::GetProcReturnCode(ProcessHandle, &ReturnCode))
	{
		ReturnCode = -1;
	}

	CompletedDelegate.ExecuteIfBound(ReturnCode);
	Thread = nullptr;

	return 0;
}
예제 #27
0
파일: pcx11e.c 프로젝트: cjg/grads
static void
ProcessEvent()
{
    char intext[10];
    KeySym keysym;
    XComposeStatus cs;
    int i, n, count;
    XEvent ev;
    HIST_ENTRY *hist;  /* History entry */
    char is_hist;

    XNextEvent (condpy, &ev);

    is_hist = 0;
    switch (ev.type) {

    case Expose:
        if (ev.xexpose.count == 0)
            Redraw ();
        break;
    case ConfigureNotify:
        width = ev.xconfigure.width;
        height = ev.xconfigure.height;
        Resize (width / font_width, height / font_height);
        break;

    case KeyPress:
//    count = XLookupString (&ev.xkey, intext, 10, &keysym, &cs);
//    intext[count] = 0;
        count = 1;
        intext[0] = ev.xkey.keycode;
        intext[1] = 0;
        if (count == 1) {
            switch (intext[0]) {
            case '\r':
                intext[0] = '\n';
                break;
#if !defined(STNDALN)
            case '':
                hist = previous_history();
                is_hist = 1;
                break;
            case '':
                hist = next_history();
                is_hist = 1;
                break;
#endif
            }
        }

#if !defined(STNDALN)
        /* Process history */
        if ( is_hist ) {
            if(hist) {
                char *dst = text + ncols * row + 4;
                memset(dst,' ',col-3);                /* clean to EOL */
                col = 4;
                PutString(hist->line);
                PutChar (0);
                RedrawRow();
            }
            break;
        }
#endif

#ifdef OBSOLETE               /* as XLookupString is broken */
        else switch (keysym) {
            case XK_Return:
            case XK_Linefeed:
                intext[0] = '\n';
                intext[1] = 0;
                break;
            case XK_Tab:
                intext[0] = '\t';
                intext[1] = 0;
                break;
            case XK_BackSpace:
                intext[0] = '\b';
                intext[1] = 0;
                break;
            case XK_Delete:
                break;
            case XK_Left:
                break;
            case XK_Right:
                break;
            case XK_Down:
                break;
            case XK_Up:
                break;
            }
#endif /* OBSOLETE */

        if (intext[0] ) {
            if (intext[0] == '\n')
            {
                char *dst = text + ncols * row + plen;
                strncpy(grads_cmd,dst,1024);
                got_grads_cmd = 1;    /* signal nxtcmd() that we got a command */
            }
            PutChar (intext[0]);
            RedrawRow();
            strcat (my_stdin_buf, intext);
            if ( intext[0] == '\n' ) {
                ProcessOutput (my_stdin_buf);
                my_stdin_buf[0] = 0;
            }
        }
        break;

    }
}
예제 #28
0
파일: NVENCEncoder.cpp 프로젝트: FBirth/OBS
bool NVENCEncoder::Encode(LPVOID picIn, List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD timestamp, DWORD &out_pts)
{
    NVENCSTATUS nvStatus;
    int i = -1;

    NV_ENC_PIC_PARAMS picParams = { 0 };
    picParams.version = NV_ENC_PIC_PARAMS_VER;

    OSMutexLocker locker(frameMutex);

    packets.Clear();
    packetTypes.Clear();

    if (picIn)
    {
        mfxFrameSurface1 *inputSurface = (mfxFrameSurface1*)picIn;
        mfxFrameData &data = inputSurface->Data;
        assert(data.MemId);

        NVENCEncoderSurface *surf = &inputSurfaces[(unsigned int)data.MemId - 1];

        if (surf->locked)
        {
            nvStatus = pNvEnc->nvEncUnlockInputBuffer(encoder, surf->inputSurface);
            if (nvStatus != NV_ENC_SUCCESS)
            {
                NvLog(TEXT("Unlocking surface failed with 0x%x"), nvStatus);
                return false;
            }
        }

        for (i = 0; i < maxSurfaceCount; ++i)
        {
            if (!outputSurfaces[i].busy)
                break;
        }

        if (i == maxSurfaceCount)
        {
            NvLog(TEXT("Out of output buffers!"));
            surf->locked = false;
            return false;
        }

        surf->locked = false;
        surf->useCount += 1;

        outputSurfaces[i].timestamp = timestamp;
        outputSurfaces[i].inputTimestamp = data.TimeStamp;
        outputSurfaces[i].inSurf = surf;

        picParams.inputBuffer = surf->inputSurface;
        picParams.bufferFmt = NV_ENC_BUFFER_FORMAT_NV12_PL;
        picParams.inputWidth = width;
        picParams.inputHeight = height;
        picParams.outputBitstream = outputSurfaces[i].outputSurface;
        picParams.completionEvent = 0;
        picParams.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
        picParams.encodePicFlags = 0;
        picParams.inputTimeStamp = data.TimeStamp;
        picParams.inputDuration = 0;
        picParams.codecPicParams.h264PicParams.sliceMode = encodeConfig.encodeCodecConfig.h264Config.sliceMode;
        picParams.codecPicParams.h264PicParams.sliceModeData = encodeConfig.encodeCodecConfig.h264Config.sliceModeData;
        memcpy(&picParams.rcParams, &encodeConfig.rcParams, sizeof(NV_ENC_RC_PARAMS));
    }
    else
    {
        picParams.encodePicFlags = NV_ENC_PIC_FLAG_EOS;
    }

    nvStatus = pNvEnc->nvEncEncodePicture(encoder, &picParams);

    if (picIn && nvStatus == NV_ENC_ERR_NEED_MORE_INPUT)
    {
        outputSurfaceQueue.push(&outputSurfaces[i]);
        outputSurfaces[i].busy = true;
    }

    if (nvStatus != NV_ENC_SUCCESS && nvStatus != NV_ENC_ERR_NEED_MORE_INPUT)
    {
        NvLog(TEXT("nvEncEncodePicture failed with error 0x%x"), nvStatus);
        return false;
    }

    if (nvStatus != NV_ENC_ERR_NEED_MORE_INPUT)
    {
        while (!outputSurfaceQueue.empty())
        {
            NVENCEncoderOutputSurface *qSurf = outputSurfaceQueue.front();
            outputSurfaceQueue.pop();
            outputSurfaceQueueReady.push(qSurf);
        }

        if (picIn)
        {
            outputSurfaceQueueReady.push(&outputSurfaces[i]);
            outputSurfaces[i].busy = true;
        }
    }

    if (!outputSurfaceQueueReady.empty())
    {
        NVENCEncoderOutputSurface *qSurf = outputSurfaceQueueReady.front();
        outputSurfaceQueueReady.pop();

        ProcessOutput(qSurf, packets, packetTypes, out_pts);

        qSurf->busy = false;

        assert(qSurf->inSurf->useCount);
        qSurf->inSurf->useCount -= 1;
    }

    return true;
}
예제 #29
0
STDMETHODIMP CDecWMV9::ProcessOutput()
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  BYTE *pBuffer = GetBuffer(m_pRawBufferSize);
  CMediaBuffer *pOutBuffer = new CMediaBuffer(pBuffer, m_pRawBufferSize, true);
  pOutBuffer->SetLength(0);

  DMO_OUTPUT_DATA_BUFFER OutputBufferStructs[1];
  memset(&OutputBufferStructs[0], 0, sizeof(DMO_OUTPUT_DATA_BUFFER));
  OutputBufferStructs[0].pBuffer  = pOutBuffer;

  hr = m_pDMO->ProcessOutput(0, 1, OutputBufferStructs, &dwStatus);
  if (FAILED(hr)) {
    ReleaseBuffer(pBuffer);
    DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr));
    return S_FALSE;
  }
  if (hr == S_FALSE) {
    ReleaseBuffer(pBuffer);
    return S_FALSE;
  }

  LAVFrame *pFrame = NULL;
  AllocateFrame(&pFrame);

  BITMAPINFOHEADER *pBMI = NULL;
  videoFormatTypeHandler(mtOut, &pBMI);
  pFrame->width     = pBMI->biWidth;
  pFrame->height    = pBMI->biHeight;
  pFrame->format    = m_OutPixFmt;
  pFrame->key_frame = (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_SYNCPOINT);

  AVRational display_aspect_ratio;
  int64_t num = (int64_t)m_StreamAR.num * pBMI->biWidth;
  int64_t den = (int64_t)m_StreamAR.den * pBMI->biHeight;
  av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, num, den, 1 << 30);

  BYTE contentType = 0;
  DWORD dwPropSize = 1;
  pOutBuffer->GetProperty(WM_SampleExtensionGUID_ContentType, &contentType, &dwPropSize);
  pFrame->interlaced = !!(contentType & WM_CT_INTERLACED);
  pFrame->repeat     = !!(contentType & WM_CT_REPEAT_FIRST_FIELD);

  LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
  pFrame->tff           = (fo == DeintFieldOrder_Auto) ? !!(contentType & WM_CT_TOP_FIELD_FIRST) : (fo == DeintFieldOrder_TopFieldFirst);

  if (pFrame->interlaced && !m_bInterlaced)
    m_bInterlaced = TRUE;

  pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);

  if (m_bManualReorder) {
    if (!m_timestampQueue.empty()) {
      pFrame->rtStart = m_timestampQueue.front();
      m_timestampQueue.pop();
      if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) {
        pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength;
      }
    }
  } else {
    if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIME) {
      pFrame->rtStart = OutputBufferStructs[0].rtTimestamp;
      if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) {
        pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength;
      }
    }
  }

  // Check alignment
  // If not properly aligned, we need to make the data aligned.
  int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32;
  if ((pFrame->width % alignment) != 0) {
    AllocLAVFrameBuffers(pFrame);
    size_t ySize = pFrame->width * pFrame->height;
    memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height);
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      memcpy_plane(pFrame->data[1], pBuffer+ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2);
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      size_t uvSize = ySize / 4;
      memcpy_plane(pFrame->data[2], pBuffer+ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2);
      memcpy_plane(pFrame->data[1], pBuffer+ySize+uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2);
    }
    ReleaseBuffer(pBuffer);
  } else {
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      pFrame->data[0] = pBuffer;
      pFrame->data[1] = pBuffer + pFrame->width * pFrame->height;
      pFrame->stride[0] = pFrame->stride[1] = pFrame->width;
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      pFrame->data[0] = pBuffer;
      pFrame->data[2] = pBuffer + pFrame->width * pFrame->height;
      pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2);
      pFrame->stride[0] = pFrame->width;
      pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2;
    }
    pFrame->destruct = wmv9_buffer_destruct;
    pFrame->priv_data = this;
  }
  pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
  Deliver(pFrame);

  SafeRelease(&pOutBuffer);

  if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE)
    return ProcessOutput();
  return hr;
}
예제 #30
0
파일: pdf.cpp 프로젝트: june-y/pdf_parse
int _tmain(int argc, _TCHAR* argv[])
{
	//Discard existing output:
	FILE* fileo = fopen("c:\\pdf\\output2.txt", "w");
	if (fileo) fclose(fileo);
	fileo = fopen("c:\\pdf\\output2.txt", "a");

	//Open the PDF source file:
	FILE* filei = fopen("c:\\pdf\\somepdf.pdf", "rb");

	if (filei && fileo)
	{
		//Get the file length:
		int fseekres = fseek(filei,0, SEEK_END);   //fseek==0 if ok
		long filelen = ftell(filei);
		fseekres = fseek(filei,0, SEEK_SET);

		//Read ethe ntire file into memory (!):
		char* buffer = new char [filelen]; ZeroMemory(buffer, filelen);
		size_t actualread = fread(buffer, filelen, 1 ,filei);  //must return 1

		bool morestreams = true;

		//Now search the buffer repeated for streams of data:
		while (morestreams)
		{
			//Search for stream, endstream. We ought to first check the filter
			//of the object to make sure it if FlateDecode, but skip that for now!
			size_t streamstart = FindStringInBuffer (buffer, "stream", filelen);
			size_t streamend   = FindStringInBuffer (buffer, "endstream", filelen);
			if (streamstart>0 && streamend>streamstart)
			{
				//Skip to beginning and end of the data stream:
				streamstart += 6;

				if (buffer[streamstart]==0x0d && buffer[streamstart+1]==0x0a) streamstart+=2;
				else if (buffer[streamstart]==0x0a) streamstart++;

				if (buffer[streamend-2]==0x0d && buffer[streamend-1]==0x0a) streamend-=2;
				else if (buffer[streamend-1]==0x0a) streamend--;

				//Assume output will fit into 10 times input buffer:
				size_t outsize = (streamend - streamstart)*10;
				char* output = new char [outsize]; ZeroMemory(output, outsize);

				//Now use zlib to inflate:
				z_stream zstrm; ZeroMemory(&zstrm, sizeof(zstrm));

				zstrm.avail_in = streamend - streamstart + 1;
				zstrm.avail_out = outsize;
				zstrm.next_in = (Bytef*)(buffer + streamstart);
				zstrm.next_out = (Bytef*)output;

				int rsti = inflateInit(&zstrm);
				if (rsti == Z_OK)
				{
					int rst2 = inflate (&zstrm, Z_FINISH);
					if (rst2 >= 0)
					{
						//Ok, got something, extract the text:
						size_t totout = zstrm.total_out;
						ProcessOutput(fileo, output, totout);
					}
				}
				delete[] output; output=0;
				buffer+= streamend + 7;
				filelen = filelen - (streamend+7);
			}
			else
			{
				morestreams = false;
			}
		}
		fclose(filei);
	}
	if (fileo) fclose(fileo);
	return 0;
}