Ejemplo n.º 1
0
//-----------------------------------------------------------------------------
// Name: XBUtil_FindMediaFile()
// Desc: Returns a valid path to a media file.
//-----------------------------------------------------------------------------
HRESULT XBUtil_FindMediaFile( CHAR* strPath, const CHAR* strFilename )
{
    // Check for valid arguments
    if( NULL==strFilename || NULL==strPath )
    {
        OUTPUT_DEBUG_STRING( "XBUtil_FindMediaFile(): Invalid arguments\n" );
        return E_INVALIDARG;
    }

    // Default path is the filename itself as a fully qualified path
    strcpy( strPath, strFilename );

    // Check for the ':' character to see if the filename is a fully
    // qualified path. If not, pre-pend the media directory
    if( strFilename[1] != ':' )
        sprintf( strPath, "%s%s", g_strMediaPath, strFilename );

    // Try to open the file
    HANDLE hFile = CreateFile( strPath, GENERIC_READ, FILE_SHARE_READ, NULL, 
                               OPEN_EXISTING, 0, NULL );
    if( INVALID_HANDLE_VALUE == hFile )
    {
        // Return error
        CHAR strBuffer[80];
        sprintf( strBuffer, "XBUtil_FindMediaFile(): Could not find file [%s]\n", 
                            strFilename );
        OUTPUT_DEBUG_STRING( strBuffer );
        return 0x82000004;
    }

    // Found the file. Close the file and return
    CloseHandle( hFile );

    return S_OK;
}
Ejemplo n.º 2
0
void subsessionByeHandler(void* clientData) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	MediaSubsession* subsession = (MediaSubsession*)clientData;
	RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
	UsageEnvironment& env = rtspClient->envir(); // alias

	//env << *rtspClient << "Received RTCP \"BYE\" on \"" << *subsession << "\" subsession\n";
	OUTPUT_DEBUG_STRING("Received RTCP \"BYE\" on subsession\n");
	// Now act as if the subsession had closed:
	subsessionAfterPlaying(subsession);
}
Ejemplo n.º 3
0
void setupNextSubsession(RTSPClient* rtspClient) {

	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	UsageEnvironment& env = rtspClient->envir(); // alias
	StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

	scs.subsession = scs.iter->next();
	if (scs.subsession != NULL) {
		if (!scs.subsession->initiate()) {
			//env << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n";
			OUTPUT_DEBUG_STRING("Failed to initiate the \" %p \" subsession: %s \n", *scs.subsession, env.getResultMsg());
			setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
		} else {
			OUTPUT_DEBUG_STRING("Initiated the subsession %p (client ports %u-%u)\n", \
				scs.subsession, \
				scs.subsession->clientPortNum(), \
				scs.subsession->clientPortNum() + 1);

			//env << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession (client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1 << ")\n";

			// Continue setting up this subsession, by sending a RTSP "SETUP" command:

			Boolean stream_over_tcp = False;

			if (gStream_Over_Protocol == STREAMING_OVER_TCP)
			{
				stream_over_tcp = True;
			}
			else if (gStream_Over_Protocol == STREAMING_OVER_UDP)
			{
				stream_over_tcp = False;
			}
			
			
			rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, stream_over_tcp);
		}
		return;
	}

	// We've finished setting up all of the subsessions.  Now, send a RTSP "PLAY" command to start the streaming:
	if (scs.session->absStartTime() != NULL) {
		// Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
		rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime());
	} else {
		scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
		rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
	}
}
Ejemplo n.º 4
0
int udp_recv(udp_recv_callback callback, void* args)
{
	char recv_buf[MAX_PACK_NET_SIZE] = {0};
	unsigned long ioctl_argp = 0L;
	int recv_size = 0;
	int count = 0;

	while(true){

		if(ioctlsocket(__socket_impl.socket_, FIONREAD, &ioctl_argp) == 0){

			if(ioctl_argp > 0){

				recv_size = recv(__socket_impl.socket_, recv_buf, sizeof(recv_buf), 0);

				if(recv_size == SOCKET_ERROR){

					OUTPUT_DEBUG_STRING(IDS_STRING143, recv_size);
					break;
				}
				else if(recv_size == 0x0){

					OUTPUT_DEBUG_STRING(IDS_STRING144);
					break;
				}
				else if(recv_size > MAX_PACK_NET_SIZE){

					OUTPUT_DEBUG_STRING(IDS_STRING145);
					break;
				}
				else{

					callback(args, recv_buf, recv_size);
					count += recv_size;
				}
			}
			else
				break;
		}
		else{
			break;
		}
	}

	return count;
}
Ejemplo n.º 5
0
void RenderingManagerC::DrawPlayer()
{
	//D3DXVECTOR2 playerScale = D3DXVECTOR2(47.0f/64.0f, 77.0f/128.0f);
	D3DXVECTOR2 playerScale = D3DXVECTOR2(1.0f, 1.0f);
	D3DXVECTOR2 flippedScale = D3DXVECTOR2(-1.0f, 1.0f);
	D3DXVECTOR2 playerPos ;
	D3DXVECTOR2 playerBox;
	D3DXVECTOR2 playerHandPos;


	playerPos.x = PhysicsManagerC::GetInstance()->GetPlayerPosition().x * METERS_TO_PIXELS ;
	playerPos.y = PhysicsManagerC::GetInstance()->GetPlayerPosition().y * METERS_TO_PIXELS ;

	playerBox.x = PhysicsManagerC::GetInstance()->GetPlayerPosition().x * METERS_TO_PIXELS ;
	playerBox.y = PhysicsManagerC::GetInstance()->GetPlayerPosition().y * METERS_TO_PIXELS ;

	playerBox.x -= PLAYER_WIDHT/2;
	playerBox.y -= PLAYER_HEIGHT/2;

	//DrawBox(playerBox.x, playerBox.y, playerBox.x + PLAYER_WIDHT, playerBox.y + PLAYER_HEIGHT, 0x00000000, 0xFFFFFFFF);

	playerPos.x -= PLAYER_WIDHT/2;
	playerPos.y -= PLAYER_HEIGHT/2;

	if( (prevX != playerPos.x) || (prevY != playerPos.y))
	{
		char debugStr[200];
		sprintf(debugStr, "Player Position : %f,%f\n", playerPos.x, playerPos.y);
		OUTPUT_DEBUG_STRING(debugStr);
		prevX = playerPos.x;
		prevY = playerPos.y;
	}
	
	b2Vec2 playerPosition = PhysicsManagerC::GetInstance()->GetPlayerPosition();
	b2Vec2 rayCastPosition;

	rayCastPosition.x = playerPosition.x * METERS_TO_PIXELS + (PLAYER_RAYCAST_OFFSET_X - PLAYER_WIDHT / 2) ;
	rayCastPosition.y = playerPosition.y * METERS_TO_PIXELS + (PLAYER_RAYCAST_OFFSET_Y - PLAYER_HEIGHT / 2) ;

	playerHandPos.x = playerPos.x + 15.0f;
	playerHandPos.y = playerPos.y + 10.;

	float angleOfRotation = atanf((rayCastPosition.y - InputManagerC::GetInstance()->getCurrentCrossHairPosition()->y) / (rayCastPosition.x - InputManagerC::GetInstance()->getCurrentCrossHairPosition()->x)) ;
	
	if(InputManagerC::GetInstance()->getCurrentCrossHairPosition()->x > playerPos.x)
	{
		player->Draw(g_playerSpriteSheetTexture, &AnimationManagerC::GetInstance()->CurrentSpriteRect , &playerScale, NULL, 0.0f, &playerPos, 0xFFFFFFFF);
		playerHand->Draw(g_playerSpriteSheetTexture, &AnimationManagerC::GetInstance()->PlayerHand, &playerScale, NULL, -angleOfRotation, &playerHandPos, 0xFFFFFFFF);
	}
	else
	{
		playerPos.x = playerPos.x + (AnimationManagerC::GetInstance()->CurrentSpriteRect.right - AnimationManagerC::GetInstance()->CurrentSpriteRect.left);
		playerHandPos.x = playerHandPos.x + 10.0f;
		player->Draw(g_playerSpriteSheetTexture, &AnimationManagerC::GetInstance()->CurrentSpriteRect , &flippedScale, NULL, 0.0f, &playerPos, 0xFFFFFFFF);
		playerHand->Draw(g_playerSpriteSheetTexture, &AnimationManagerC::GetInstance()->PlayerHand, &flippedScale, NULL, -angleOfRotation, &playerHandPos, 0xFFFFFFFF);
	}
}
Ejemplo n.º 6
0
void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString) {
	Boolean success = False;
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);

	do {
		UsageEnvironment& env = rtspClient->envir(); // alias
		StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

		if (resultCode != 0) {
			//env << *rtspClient << "Failed to start playing session: " << resultString << "\n";
			OUTPUT_DEBUG_STRING("Failed to start playing session: %s \n", resultString);
			break;
		}

		// Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end
		// using a RTCP "BYE").  This is optional.  If, instead, you want to keep the stream active - e.g., so you can later
		// 'seek' back within it and do another RTSP "PLAY" - then you can omit this code.
		// (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.)
		if (scs.duration > 0) {
			unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration.  (This is optional.)
			scs.duration += delaySlop;
			unsigned uSecsToDelay = (unsigned)(scs.duration*1000000);
			scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
		}

		//env << *rtspClient << "Started playing session";
		OUTPUT_DEBUG_STRING("Started playing session");
		if (scs.duration > 0) {
			//env << " (for up to " << scs.duration << " seconds)";
			OUTPUT_DEBUG_STRING("(for up to %d seconds)", scs.duration);
		}
		//env << "...\n";
		OUTPUT_DEBUG_STRING("...\n");

		success = True;
	} while (0);
	delete[] resultString;

	if (!success) {
		// An unrecoverable error occurred with this stream.
		shutdownStream(rtspClient);
	}
}
Ejemplo n.º 7
0
//-----------------------------------------------------------------------------
// Name: XBUtil_CreatePixelShader()
// Desc: Creates a file-based pixel shader
//-----------------------------------------------------------------------------
HRESULT XBUtil_CreatePixelShader( LPDIRECT3DDEVICE8 pd3dDevice, 
                                  const CHAR* strFilename, DWORD* pdwPixelShader )
{
    HRESULT hr;

    // Find the media file
    CHAR strShaderPath[512];
    if( FAILED( hr = XBUtil_FindMediaFile( strShaderPath, strFilename ) ) )
        return hr;

    // Open the pixel shader file
    HANDLE hFile;
    DWORD dwNumBytesRead;
    hFile = CreateFile(strShaderPath, GENERIC_READ, FILE_SHARE_READ, NULL,
                       OPEN_EXISTING, FILE_ATTRIBUTE_READONLY, NULL);
    if(hFile == INVALID_HANDLE_VALUE)
        return E_FAIL;


    // Load the pre-compiled pixel shader microcode
    D3DPIXELSHADERDEF_FILE psdf;
    
    ReadFile( hFile, &psdf, sizeof(D3DPIXELSHADERDEF_FILE), &dwNumBytesRead, NULL );
    
    // Make sure the pixel shader is valid
    if( psdf.FileID != D3DPIXELSHADERDEF_FILE_ID )
    {
        OUTPUT_DEBUG_STRING( "XBUtil_CreatePixelShader(): Invalid pixel shader file\n" );
        return E_FAIL;
    }

    // Create the pixel shader
    if( FAILED( hr = pd3dDevice->CreatePixelShader( &(psdf.Psd), pdwPixelShader ) ) )
    {
        OUTPUT_DEBUG_STRING( "XBUtil_CreatePixelShader(): Could not create pixel shader\n" );
        return hr;
    }

    // cleanup
    CloseHandle( hFile );

    return S_OK;
}
Ejemplo n.º 8
0
void streamTimerHandler(void* clientData) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	ourRTSPClient* rtspClient = (ourRTSPClient*)clientData;
	StreamClientState& scs = rtspClient->scs; // alias

	scs.streamTimerTask = NULL;

	// Shut down the stream:
	shutdownStream(rtspClient);
}
Ejemplo n.º 9
0
void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL, LostDetectCallBackFuction func) {
	// Begin by creating a "RTSPClient" object.  Note that there is a separate "RTSPClient" object for each stream that we wish
	// to receive (even if more than stream uses the same "rtsp://" URL).
	RTSPClient* rtspClient = ourRTSPClient::createNew(env, rtspURL);//, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
	if (rtspClient == NULL) {
		//env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n";
		OUTPUT_DEBUG_STRING("Failed to create a RTSP client for URL %s\": %s\n", rtspURL, env.getResultMsg());
		return;
	}
	
	OUTPUT_DEBUG_STRING("open times = %d close times = %d\n", ++open_times, close_times);

	gRtspClient = rtspClient;
	gLostDetectFun = func;
	// Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
	// Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
	// Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
	rtspClient->sendDescribeCommand(continueAfterDESCRIBE); 
}
Ejemplo n.º 10
0
void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	do {
		UsageEnvironment& env = rtspClient->envir(); // alias
		StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

		if (resultCode != 0) {
			//env << *rtspClient << "Failed to get a SDP description: " << resultString << "\n";
			OUTPUT_DEBUG_STRING("Failed to get a SDP description: %s\n", resultString);
			delete[] resultString;
			break;
		}

		char* const sdpDescription = resultString;
		//env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n";
		//OUTPUT_DEBUG_STRING("Got a SDP description:\n %s\n", sdpDescription);

		// Create a media session object from this SDP description:
		scs.session = MediaSession::createNew(env, sdpDescription);
		delete[] sdpDescription; // because we don't need it anymore
		if (scs.session == NULL) {
			//env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n";
			OUTPUT_DEBUG_STRING("Failed to create a MediaSession object from the SDP description: %s\n", env.getResultMsg());
			break;
		} else if (!scs.session->hasSubsessions()) {
			//env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
			OUTPUT_DEBUG_STRING("This session has no media subsessions (i.e., no \"m=\" lines)\n");
			break;
		}

		// Then, create and set up our data source objects for the session.  We do this by iterating over the session's 'subsessions',
		// calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
		// (Each 'subsession' will have its own data source.)
		scs.iter = new MediaSubsessionIterator(*scs.session);
		setupNextSubsession(rtspClient);
		return;
	} while (0);

	// An unrecoverable error occurred with this stream.
	shutdownStream(rtspClient);
}
unsigned __stdcall input_thread_proc(void* args)
{

	//	WaitForSingleObject(ppi_ptr->parallel_stop_event_, ppi_ptr->waiting_time_);
	PARALLEL_PORT_IMPL_PTR ppi_ptr = (PARALLEL_PORT_IMPL_PTR)args;
	DWORD result = 0x0;
	PARALLEL_OUTPUT_QUEUE_ITEM item;
	LARGE_INTEGER  litmp; 
	LONGLONG       QPart1,QPart2;;
	double         minute, freq, tim; 
	int i = 0;

	while(ppi_ptr->is_running_){

		QueryPerformanceFrequency(&litmp); 
		freq = (double)litmp.QuadPart; 
		QueryPerformanceCounter(&litmp); 
		QPart1 = litmp.QuadPart; 

		do{
			result = WaitForSingleObject(ppi_ptr->parallel_stop_event_, 0);
			if(result == WAIT_OBJECT_0)	goto END;

			QueryPerformanceCounter(&litmp); 
			QPart2 = litmp.QuadPart;
			minute = (double)(QPart2-QPart1); 
			tim = minute / freq; 
		}while(tim<0.01);

		for(i = 0; i < 4; ++i){
			if(parallel_output_queue_pop(&item)){
				if(output_data_into_parallel_port(item.buf_, sizeof(item.buf_)) != -1){
					_InterlockedIncrement(&__simple_proc_env.application_environment_->pci_output_num_);
				}
				else{
					OUTPUT_DEBUG_STRING("Write a pallerl error!");
				}
			}
		}

	}
END:

	_endthreadex(0);
	return 0;
}
Ejemplo n.º 12
0
void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	do {
		UsageEnvironment& env = rtspClient->envir(); // alias
		StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

		static int SessionID = 0;

		if (resultCode != 0) {
			OUTPUT_DEBUG_STRING("Failed to set up the subsession: %s \n", \
				resultString);
			//env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n";
			break;
		}

		OUTPUT_DEBUG_STRING("Set up the subsession (client ports %u-%u)\n", \
			scs.subsession->clientPortNum(), scs.subsession->clientPortNum()+1);
		//env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession (client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1 << ")\n";

		// Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it.
		// (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
		// after we've sent a RTSP "PLAY" command.)

		scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url());

		OUTPUT_DEBUG_STRING("add = %d, del = %d\n", ++sum_add, sum_del);

		((DummySink *)scs.subsession)->SetCallBackFunction(gAVDataFun, gLostDetectFun, SessionID, rtspClient);
		SessionID ++;
		// perhaps use your own custom "MediaSink" subclass instead
		if (scs.subsession->sink == NULL) {
			OUTPUT_DEBUG_STRING("Failed to create a data sink for the subsession: %s\n", \
				env.getResultMsg());
			//env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession<< "\" subsession: " << env.getResultMsg() << "\n";
			break;
		}
		OUTPUT_DEBUG_STRING("Created a data sink for the subsession\n");
		//env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n";
		scs.subsession->miscPtr = rtspClient; // a hack to let subsession handle functions get the "RTSPClient" from the subsession 
		scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
			subsessionAfterPlaying, scs.subsession);
		// Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
		if (scs.subsession->rtcpInstance() != NULL) {
			scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession);
		}
	} while (0);
	delete[] resultString;

	// Set up the next subsession, if any:
	setupNextSubsession(rtspClient);
}
Ejemplo n.º 13
0
void subsessionAfterPlaying(void* clientData) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	MediaSubsession* subsession = (MediaSubsession*)clientData;
	RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);

	// Begin by closing this subsession's stream:
	Medium::close(subsession->sink);
	subsession->sink = NULL;

	// Next, check whether *all* subsessions' streams have now been closed:
	MediaSession& session = subsession->parentSession();
	MediaSubsessionIterator iter(session);
	while ((subsession = iter.next()) != NULL) {
		if (subsession->sink != NULL) return; // this subsession is still active
	}

	// All subsessions' streams have now been closed, so shutdown the client:
	shutdownStream(rtspClient);
}
bool start_inputting_thread(parallel_inputting_thread_callback callback)
{
	I16 ret_code = 0x0;

	if(callback == NULL){			
		OUTPUT_DEBUG_STRING(IDS_STRING140);
		return false;
	}
	else{
		__parallel_port.parallel_thread_callback_ = callback;
	}

	ResetEvent(__parallel_port.parallel_stop_event_);
	__parallel_port.is_running_ = true;

	//create input thread
	__parallel_port.parallel_input_thread_handler_ = (HANDLE)_beginthreadex(NULL, 0, input_thread_proc, &__parallel_port, 0, NULL);

	return true;
}
Ejemplo n.º 15
0
//-----------------------------------------------------------------------------
// Name: Create()
// Desc: Loads all the texture resources from the given XPR.
//-----------------------------------------------------------------------------
HRESULT CXBPackedResource::Create( LPDIRECT3DDEVICE8 pd3dDevice, 
                                   const CHAR* strFilename, DWORD dwNumResources,
                                   XBRESOURCE* pResourceTags )
{
    // Find the media file
    CHAR strResourcePath[512];
    if( FAILED( XBUtil_FindMediaFile( strResourcePath, strFilename ) ) )
        return E_FAIL;

    // Open the file
    HANDLE hFile;
    DWORD dwNumBytesRead;
    hFile = CreateFile(strResourcePath, GENERIC_READ, FILE_SHARE_READ, NULL,
                       OPEN_EXISTING, FILE_ATTRIBUTE_READONLY, NULL);
    if(hFile == INVALID_HANDLE_VALUE)
    {
        OUTPUT_DEBUG_STRING( "CXBPackedResource::Create(): ERROR: File not found!\n" );
        return E_FAIL;
    }

   
    // Read in and verify the XPR magic header
    XPR_HEADER xprh;

    ReadFile(hFile, &xprh, sizeof(XPR_HEADER), &dwNumBytesRead, NULL);
    if( xprh.dwMagic != XPR_MAGIC_VALUE )
    {
        OUTPUT_DEBUG_STRING( "Invalid Xbox Packed Resource (.xpr) file" );
        return E_INVALIDARG;
    }

    // Compute memory requirements
    DWORD dwSysMemDataSize = xprh.dwHeaderSize - sizeof(XPR_HEADER);
    DWORD dwVidMemDataSize = xprh.dwTotalSize - xprh.dwHeaderSize;

    // Allocate memory
    m_pSysMemData = new BYTE[dwSysMemDataSize];
    m_pVidMemData = (BYTE*)D3D_AllocContiguousMemory( dwVidMemDataSize, D3DTEXTURE_ALIGNMENT );

    // Read in the data from the file
    ReadFile(hFile, m_pSysMemData, dwSysMemDataSize, &dwNumBytesRead, NULL);
    ReadFile(hFile, m_pVidMemData, dwVidMemDataSize, &dwNumBytesRead, NULL);

    // Done with the file
    CloseHandle(hFile);
    
    // Loop over resources, calling Register()
    BYTE* pData = m_pSysMemData;

    for( DWORD i = 0; i < dwNumResources; i++ )
    {
        // Check for user data
        if( *((DWORD*)pData) & 0x80000000 )
        {
            DWORD dwType = ((DWORD*)pData)[0];
            DWORD dwSize = ((DWORD*)pData)[1];
            pData += sizeof(DWORD) * 2;

            (VOID)dwType; // not used
            pData += dwSize;
        }
        else
        {
            // Get the resource
            LPDIRECT3DRESOURCE8 pResource = (LPDIRECT3DRESOURCE8)pData;
    
            // Register the resource
            pResource->Register( m_pVidMemData );
        
            // Advance the pointer
            pData += XBResource_SizeOf( pResource );
        }
    }

    // Finally, store number of resources and the resource tags
    m_dwNumResources = dwNumResources;
    m_pResourceTags  = pResourceTags;

    return S_OK;
}
Ejemplo n.º 16
0
bool create_udp_socket(unsigned short lisen_port, const char* remote_ip, unsigned short remote_port,
					   unsigned recv_buff_size, unsigned send_buff_size)
{
	bool is_ok = true;
	DWORD bytes_returned = 0;
	BOOL new_behavior = FALSE;
	unsigned long arg = 1;
	BOOL is_broadcast = TRUE;

	SOCKADDR_IN sockAddr;
	memset(&sockAddr,0,sizeof(sockAddr));

	//create a socket
	assert(__socket_impl.socket_ == NULL);
	__socket_impl.socket_ = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
	if(__socket_impl.socket_ != NULL){

		__socket_impl.listen_port_ = lisen_port;
		sockAddr.sin_family = AF_INET;
		sockAddr.sin_addr.s_addr = htonl(INADDR_ANY);
		sockAddr.sin_port = htons((u_short)__socket_impl.listen_port_);

		//bind socket
		bind(__socket_impl.socket_, (SOCKADDR*)&sockAddr, sizeof(sockAddr));

		//set receive buff
		__socket_impl.recv_buffer_size_ = recv_buff_size;
		if(__socket_impl.recv_buffer_size_ != 0)
			setsockopt(__socket_impl.socket_, SOL_SOCKET, SO_RCVBUF, (const char*)&__socket_impl.recv_buffer_size_, 
						sizeof(__socket_impl.recv_buffer_size_));

		//set send buff
		__socket_impl.send_buffer_size_ = send_buff_size;
		if(__socket_impl.send_buffer_size_ != 0)
			setsockopt(__socket_impl.socket_, SOL_SOCKET, SO_SNDBUF, (const char*)&__socket_impl.send_buffer_size_, 
						sizeof(__socket_impl.send_buffer_size_));
		//set io control
		ioctlsocket(__socket_impl.socket_, FIONBIO, &arg);

		// disable  new behavior using IOCTL: SIO_UDP_CONNRESET
		WSAIoctl(__socket_impl.socket_, SIO_UDP_CONNRESET,	&new_behavior, sizeof(new_behavior), 
				NULL, 0, &bytes_returned,	NULL, NULL);

	}
	else{
		OUTPUT_DEBUG_STRING(IDS_STRING142);
		return false;
	}

	//create a broadcast socket
	assert(__socket_impl.broadcast_socket_ == NULL);
	__socket_impl.broadcast_socket_ = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
	if(__socket_impl.broadcast_socket_ != NULL){

		sockAddr.sin_family = AF_INET;
		sockAddr.sin_addr.s_addr = htonl(INADDR_ANY);
		sockAddr.sin_port = htons(0);

		setsockopt(__socket_impl.broadcast_socket_, SOL_SOCKET, SO_BROADCAST,
					(char*)&is_broadcast, sizeof(BOOL));

		//bind socket
		bind(__socket_impl.broadcast_socket_, (SOCKADDR*)&sockAddr, sizeof(sockAddr));
	}
	
	//set remote ip
	memset(&__socket_impl.remote_ip_, 0, sizeof(__socket_impl.remote_ip_));

	__socket_impl.remote_ip_.sin_family = AF_INET;

	if (remote_ip == NULL){

		__socket_impl.remote_ip_.sin_addr.s_addr = htonl(INADDR_ANY);
	}
	else{

		__socket_impl.remote_ip_.sin_addr.s_addr = inet_addr(remote_ip);
		if (__socket_impl.remote_ip_.sin_addr.s_addr == INADDR_NONE){

			LPHOSTENT lphost;
			lphost = gethostbyname(remote_ip);
			if (lphost != NULL){

				__socket_impl.remote_ip_.sin_addr.s_addr = ((LPIN_ADDR)lphost->h_addr)->s_addr;
			}
			else{

				WSASetLastError(WSAEINVAL);
				return false;
			}
		}
	}

	__socket_impl.remote_ip_.sin_port = htons((u_short)remote_port);
	__socket_impl.remote_port_ = remote_port;
	
	return is_ok;
}
Ejemplo n.º 17
0
void shutdownStream(RTSPClient* rtspClient, int exitCode) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	
	OUTPUT_DEBUG_STRING("open times = %d close times = %d\n", open_times, ++close_times);


	UsageEnvironment& env = rtspClient->envir(); // alias
	StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

	// First, check whether any subsessions have still to be closed:
	if (scs.session != NULL) { 
		Boolean someSubsessionsWereActive = False;
		MediaSubsessionIterator iter(*scs.session);
		MediaSubsession* subsession;

		while ((subsession = iter.next()) != NULL) {
			if (subsession->sink != NULL) {
				//subsession->sink->stopPlaying();
				Medium::close(subsession->sink);
				subsession->sink = NULL;

				OUTPUT_DEBUG_STRING("22 add = %d, del = %d\n", sum_add, ++sum_del);
				
				if (subsession->rtcpInstance() != NULL) {
					subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
				}
				someSubsessionsWereActive = True;
			}
		}

		if (someSubsessionsWereActive) {
			// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
			// Don't bother handling the response to the "TEARDOWN".
			//gEnv->taskScheduler().unscheduleDelayedTask(scs.streamTimerTask);
			rtspClient->sendTeardownCommand(*scs.session, NULL);
			
		}
		
		//Medium::close(scs.session);
		//gEnv->taskScheduler().unscheduleDelayedTask(scs.streamTimerTask);
		Medium::close(rtspClient);
		rtspClient = NULL;
		
		//gEnv->reclaim(); //del by huguohu
		//gEnv = NULL;
		if(gScheduler != NULL){
			delete gScheduler; 
			gScheduler = NULL;	
		}

	}

	

	

	


	//env << *rtspClient << "Closing the stream.\n";
	//Medium::close(scs.session);
	

	// Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.


	// 
	// 	if (--rtspClientCount == 0) {
	// 		// The final stream has ended, so exit the application now.
	// 		// (Of course, if you're embedding this code into your own application, you might want to comment this out,
	// 		// and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
	// 		//exit(exitCode);
	// 	}

	
}
Ejemplo n.º 18
0
// 最大下注
__int64 CAndroidUserItemSink::GetMaxAndroidScore(int iArea)
{
	if ((iArea < 0) || (iArea>=COUNT_AZIMUTH))
	{
		return 0L;
	}
	__int64 i64UserAllScore = 0; // 玩家自己已经下注的分
	__int64 i64AllUserAllScore=0;// 所有玩家下注数 
	__int64 i64AllLossScore=0;   // 庄家总赔数
	__int64 i64LoseAreaScore=0;  // 庄家输的区域下注数
	for (int i=0; i<COUNT_AZIMUTH; ++i)
	{
		i64UserAllScore += m_lMeScore[i];
		i64AllUserAllScore += m_lAllScore[i];
	}
	// 机器人限制
	if (i64AllUserAllScore > 350000000L)
	{
		m_bEnableNote = false;
		return 0L;
	}
	for (int i=1; i<=MAX_SICBO_NUMBER; ++i)
	{
		for (int j=i; j<=MAX_SICBO_NUMBER; ++j)
		{
			for (int h=j; h<=MAX_SICBO_NUMBER; ++h)
			{
				E_SICBO_NUMBER enBuffer[]={(E_SICBO_NUMBER)i,(E_SICBO_NUMBER)j,(E_SICBO_NUMBER)h};
				E_CARD_TYPE enCardType[COUNT_AZIMUTH];
				int iCount = 0;
				m_GameLogic.GetCardType(enBuffer,enCardType, iCount);
				__int64 i64AllLossScoreTemp=0;
				__int64 i64LoseAreaScoreTemp=0;  // 庄家输的区域下注数
				bool bFind = false;
				for (int l=0; l<iCount; ++l)
				{
					if (enCardType_Illegal == enBuffer[l])
					{
						continue;
					}
					if (iArea == enCardType[l])
					{
						bFind = true;
					}
				}
				if (!bFind)
				{
					continue;
				}

				// 算出这手牌输多少钱
				for (int l=0; l<iCount; ++l)
				{
					if (enCardType_Illegal == enCardType[l])
					{
						continue;
					}
					i64LoseAreaScoreTemp +=m_lAllScore[enCardType[l]];  // 庄家输的区域下注数
					if ((enCardType[l]<enCardType_SicboOne) ||(enCardType[l]>enCardType_SicboSix))
					{
						i64AllLossScoreTemp += (m_lAllScore[enCardType[l]] * m_GameLogic.m_i64Loss_Percent[enCardType[l]]);
					}
					else
					{
						// 需要换算个数
						E_SICBO_NUMBER enSicboNum = (E_SICBO_NUMBER)(enCardType[l]-enCardType_SicboOne+1);
						int iCountNum = m_GameLogic.GetSicboCountByNumber(enBuffer, enSicboNum);
						int iMuti[]={0,2,3,4};
						i64AllLossScoreTemp += (m_lAllScore[enCardType[l]] * iMuti[iCountNum]);
					}
				}

				if (i64AllLossScoreTemp > i64AllLossScore)
				{
					i64AllLossScore = i64AllLossScoreTemp;
				}
				if (i64LoseAreaScoreTemp>i64LoseAreaScore)
				{
					i64LoseAreaScore = i64LoseAreaScoreTemp;
				}
			}
		}
	}
	//OUTPUT_DEBUG_STRING(_T("client %d区域下注庄家最多输 %I64d"),iArea, i64AllLossScore);

	// 获取基本上限
	__int64 lAllMaxScore = m_lAllMaxScore;
	__int64 lUserMaxScore = m_lMeMaxScore;

	// 减去已经下注的
	lUserMaxScore -= i64UserAllScore;
	if (lUserMaxScore<=0)
	{
		return 0L;
	}

	OUTPUT_DEBUG_STRING(_T("adroid 最大下注 %I64d, 所有玩家下注 %I64d,  庄家当前输 %I64d  玩家下注区域%d"),
		lAllMaxScore, i64AllUserAllScore, i64AllLossScore, iArea);

	// 庄家可以赔的钱
	lAllMaxScore = lAllMaxScore - i64LoseAreaScore + i64AllUserAllScore;
	lAllMaxScore -= i64AllLossScore;
	if ((iArea<enCardType_SicboOne) ||(iArea>enCardType_SicboSix))
	{
		lAllMaxScore = lAllMaxScore/(m_GameLogic.m_i64Loss_Percent[iArea] -1);
	}
	else
	{
		lAllMaxScore = lAllMaxScore/3; // 以最大倍数算
	}
	if (lAllMaxScore<=0)
	{
		return 0L;
	}
	lUserMaxScore = min(lAllMaxScore, lUserMaxScore);
	return lUserMaxScore;
}