예제 #1
0
파일: server.c 프로젝트: tsu14/HTTP2RTSP
int start_server(const char *url, const char *rtspport)
{
  int mediafd = -1, listenfd, tempfd, maxfd;
  int videofd;
  struct addrinfo *info;
  struct sockaddr_storage remoteaddr;
  socklen_t addrlen = sizeof remoteaddr;
  fd_set readfds, masterfds;
  int nready, i;
  int videosize, videoleft;
  int recvd, sent;
  char urlhost[URLSIZE], urlpath[URLSIZE], tempstr[URLSIZE];
  unsigned char msgbuf[BUFSIZE], sendbuf[BUFSIZE];
  char *temp;
  RTSPMsg rtspmsg;
  Client streamclient;


  /* The current state of the protocol */
  int mediastate = IDLE;
  int quit = 0;

  init_client(&streamclient);

  /* Open the a file where the video is to be stored */
  if ((videofd = open("videotemp.mp4", O_RDWR | O_CREAT | O_TRUNC, S_IRWXU)) < 0) {
    fatal_error("Error opening the temporary videofile");
  }

  /* Create the RTSP listening socket */
  resolve_host(NULL, rtspport, SOCK_STREAM, AI_PASSIVE, &info);
  listenfd = server_socket(info);
  maxfd = listenfd;


  FD_ZERO(&readfds);
  FD_ZERO(&masterfds);
  FD_SET(listenfd, &masterfds);


  while (!quit) {

    readfds = masterfds;

    if ((nready = Select(maxfd + 1, &readfds, NULL)) == -1) {
      write_log(logfd, "Select interrupted by a signal\n");
    } 

    for (i = 0; i <= maxfd; i++) {
      if (FD_ISSET(i, &readfds)) {

        nready--;

        /* New connection from a client */
        if (i == listenfd) {
          if ((tempfd = accept(i, (struct sockaddr *)&remoteaddr, &addrlen)) == -1) {
            if (errno != EWOULDBLOCK && errno != ECONNABORTED &&
                errno != EPROTO && errno != EINTR)
            {
              fatal_error("accept");
            }
          }

          /* If we are already serving a client, close the new connection. Otherwise, continue. */
          if (streamclient.state != NOCLIENT) close (tempfd);
          else {
            streamclient.rtspfd = tempfd;
            streamclient.state = CLICONNECTED;
            maxfd = max(2, streamclient.rtspfd, maxfd);
            FD_SET(streamclient.rtspfd, &masterfds);
          }
        }

        /* Data from the media source */
        else if (i == mediafd) {

          switch (mediastate) {

            case GETSENT:
              /* Read ONLY the HTTP message from the socket and store the video size */
              recvd = recv_all(i, msgbuf, BUFSIZE, MSG_PEEK);
              temp = strstr((char *)msgbuf, "\r\n\r\n");
              recvd = recv_all(i, msgbuf, (int)(temp + 4 - (char *)msgbuf), 0);
              temp = strstr((char *)msgbuf, "Content-Length:");
              sscanf(temp, "Content-Length: %d", &videosize);
              videoleft = videosize;
              mediastate = RECVTCP;
              break;

            case RECVTCP:
              if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
                FD_CLR(i, &masterfds);
                close(i);
                printf("Socket closed\n");
              }
              writestr(videofd, msgbuf, recvd);
              videoleft -= recvd;
              if (videoleft <= 0) mediastate = STREAM;
              break;

              /* TODO: Start streaming, currently just exits the program */
            case STREAM:
              /*
                 close(videofd);
                 close(mediafd);
                 close(listenfd);
                 quit = 1;
                 */
              break;

            default: 
              break;
          }
        }

        /* Data from a client ( i == streamclient.rtspfd) */
        else {

          if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
            FD_CLR(i, &masterfds);
            close(i);
            printf("Socket closed\n");
            streamclient.state = NOCLIENT;
          }
          else {
            printf("%s", msgbuf);
            parse_rtsp(&rtspmsg, msgbuf); 
          }

          switch (streamclient.state) {

            case CLICONNECTED:
              if (rtspmsg.type == OPTIONS) {
                sent = rtsp_options(&rtspmsg, sendbuf);
                send_all(i, sendbuf, sent);
              }
              else if (rtspmsg.type == DESCRIBE) {

                /* Start fetching the file from the server */
                parse_url(url, urlhost, urlpath);
                resolve_host(urlhost, "80", SOCK_STREAM, 0, &info);
                mediafd = client_socket(info, 0);
                FD_SET(mediafd, &masterfds);
                maxfd = max(2, maxfd, mediafd);

                /* Send the GET message */
                http_get(url, msgbuf);
                send_all(mediafd, msgbuf, strlen((char *)msgbuf));
                mediastate = GETSENT;

                /* TODO: parse SDP from the media file rather than hardcoding it */
                sent = rtsp_describe(&rtspmsg, sendbuf);
                send_all(i, sendbuf, sent);
                streamclient.state = SDPSENT;
              }
              break;

            case SDPSENT:
              if (rtspmsg.type == SETUP) {
                sent = rtsp_setup(&rtspmsg, sendbuf, 50508, 50509);
                send_all(i, sendbuf, sent);
                write_remote_ip(tempstr, streamclient.rtspfd);
                resolve_host(tempstr, rtspmsg.clirtpport, 0, SOCK_DGRAM, &info); 
                streamclient.videofds[0] = client_socket(info, 50508);
                resolve_host(tempstr, rtspmsg.clirtcpport, 0, SOCK_DGRAM, &info);
                streamclient.videofds[1] = client_socket(info, 50509);
                streamclient.state = SETUPSENT;
              }
              break;

            case SETUPSENT:
              if (rtspmsg.type == PLAY) {
              }
              
              break;

            default:
              break;
          }
        }
      }
      if (nready <= 0) break;   
    }

  }


  return 1;
}
예제 #2
0
파일: rtsp.c 프로젝트: gwq5210/learn_curl
/* main app */
int main(int argc, char *const argv[])
{
#if 1
	const char *transport = "RTP/AVP;unicast;client_port=1234-1235";	/* UDP */
#else
	const char *transport = "RTP/AVP/TCP;unicast;client_port=1234-1235";	/* TCP */
#endif
	const char *range = "0.000-";
	int rc = EXIT_SUCCESS;
	char *base_name = NULL;

	printf("\nRTSP request %s\n", VERSION_STR);
	printf("    Project web site: http://code.google.com/p/rtsprequest/\n");
	printf("    Requires cURL V7.20 or greater\n\n");

	/* check command line */
	if ((argc != 2) && (argc != 3)) {
		base_name = strrchr(argv[0], '/');
		if (base_name == NULL) {
			base_name = strrchr(argv[0], '\\');
		}
		if (base_name == NULL) {
			base_name = argv[0];
		} else {
			base_name++;
		}
		printf("Usage:   %s url [transport]\n", base_name);
		printf("         url of video server\n");
		printf
		    ("         transport (optional) specifier for media stream protocol\n");
		printf("         default transport: %s\n", transport);
		printf("Example: %s rtsp://192.168.0.2/media/video1\n\n",
		       base_name);
		rc = EXIT_FAILURE;
	} else {
		const char *url = argv[1];
		char *uri = malloc(strlen(url) + 32);
		char *sdp_filename = malloc(strlen(url) + 32);
		char *control = malloc(strlen(url) + 32);
		CURLcode res;
		get_sdp_filename(url, sdp_filename);
		if (argc == 3) {
			transport = argv[2];
		}

		/* initialize curl */
		res = curl_global_init(CURL_GLOBAL_ALL);
		if (res == CURLE_OK) {
			curl_version_info_data *data =
			    curl_version_info(CURLVERSION_NOW);
			CURL *curl;
			fprintf(stderr, "    cURL V%s loaded\n", data->version);

			/* initialize this curl session */
			curl = curl_easy_init();
			if (curl != NULL) {
				my_curl_easy_setopt(curl, CURLOPT_VERBOSE, 0L);
				my_curl_easy_setopt(curl, CURLOPT_NOPROGRESS,
						    1L);
				my_curl_easy_setopt(curl, CURLOPT_HEADERDATA,
						    stdout);
				my_curl_easy_setopt(curl, CURLOPT_URL, url);

				/* request server options */
				sprintf(uri, "%s", url);
				rtsp_options(curl, uri);

				/* request session description and write response to sdp file */
				rtsp_describe(curl, uri, sdp_filename);

				/* get media control attribute from sdp file */
				get_media_control_attribute(sdp_filename,
							    control);

				/* setup media stream */
				sprintf(uri, "%s/%s", url, control);
				rtsp_setup(curl, uri, transport);

				/* start playing media stream */
				sprintf(uri, "%s/", url);
				rtsp_play(curl, uri, range);
				printf
				    ("Playing video, press any key to stop ...");
				_getch();
				printf("\n");

				/* teardown session */
				rtsp_teardown(curl, uri);

				/* cleanup */
				curl_easy_cleanup(curl);
				curl = NULL;
			} else {
				fprintf(stderr, "curl_easy_init() failed\n");
			}
			curl_global_cleanup();
		} else {
			fprintf(stderr, "curl_global_init(%s) failed: %d\n",
				"CURL_GLOBAL_ALL", res);
		}
		free(control);
		free(sdp_filename);
		free(uri);
	}

	return rc;
}
예제 #3
0
파일: server.c 프로젝트: gitorup/HTTP2RTSP
int start_server(const char *url, const char *rtspport)
{
  int mediafd = -1, listenfd, tempfd, maxfd;
  int videofd;
  struct addrinfo *info;
  struct sockaddr_storage remoteaddr;
  socklen_t addrlen = sizeof remoteaddr;
  fd_set readfds, masterfds;
  struct timeval *timeout, *timeind = NULL, timenow;
  int nready, i;
  int videosize, videoleft;
  int recvd, sent;
  char urlhost[URLSIZE], urlpath[URLSIZE], tempstr[URLSIZE];
  unsigned char msgbuf[BUFSIZE], sendbuf[BUFSIZE];
  char *temp;
  unsigned char *sps = NULL, *pps = NULL;
  size_t spslen, ppslen;
  RTSPMsg rtspmsg;
  Client streamclient;
  pthread_t threadid;
  ThreadInfo *tinfo = NULL;

  uint16_t rtpseqno_video = (rand() % 1000000);
  uint16_t rtpseqno_audio = (rand() % 1000000);

  TimeoutEvent *event;

  /* The current state of the protocol */
  int mediastate = IDLE;
  int quit = 0;

  int media_downloaded = 0;

  timeout = (struct timeval *)malloc(sizeof(struct timeval));
  init_client(&streamclient);
  

  /* Open the a file where the video is to be stored */
  if ((videofd = open("videotemp.mp4", O_RDWR | O_CREAT | O_TRUNC, S_IRWXU)) < 0) {
    fatal_error("Error opening the temporary videofile");
  }

  /* Create the RTSP listening socket */
  resolve_host(NULL, rtspport, SOCK_STREAM, AI_PASSIVE, &info);
  listenfd = server_socket(info);
  maxfd = listenfd;


  FD_ZERO(&readfds);
  FD_ZERO(&masterfds);
  FD_SET(listenfd, &masterfds);

  while (!quit) {

    readfds = masterfds;

    if ((nready = Select(maxfd + 1, &readfds, timeind)) == -1) {
      printf("Select interrupted by a signal\n");
    } 

    /* Timeout handling, used for packet pacing and other timeouts */
    else if (nready == 0) {
      timeind = NULL;
      lock_mutex(&queuelock);
      if ((event = pull_event(&queue)) != NULL) {

        switch (event->type) {

	case ENDOFSTREAM:
	  printf("MEDIA FINISHED\n");
	  break;

	case FRAME:
	  /* Video frame */
	  if (event->frame->frametype == VIDEO_FRAME) {
	    rtpseqno_video += send_video_frame(sendbuf, event->frame, streamclient.videofds[0], rtpseqno_video);
	  }

	  /* Audio frame */
	  else {
            rtpseqno_audio += send_audio_frame(sendbuf, event->frame, streamclient.audiofds[0], rtpseqno_audio);
	  }

          free(event->frame->data);
          free(event->frame);
          break;

	case CHECKMEDIASTATE:
	  oma_debug_print("Checking media ready for streaming...\n");
	  if (mediastate != STREAM) {
	    printf("Sending dummy RTP\n");
	    send_dummy_rtp(sendbuf, streamclient.videofds[0], &rtpseqno_video);
	    push_timeout(&queue, 1000, CHECKMEDIASTATE);
	  }
          break;

	default:
	  oma_debug_print("ERRORENOUS EVENT TYPE!\n");
          break;
        }

        /* If there are elements left in the queue, calculate next timeout */
        if (queue.size > 0) {
          *timeout = calculate_delta(&event->time, &queue.first->time);
          timeind = timeout;
          oma_debug_print("Timeout: %ld secs, %ld usecs\n", timeout->tv_sec, timeout->tv_usec);
        }
        else {
          oma_debug_print("The first entry of the queue is NULL!\n");
        }

        if (queue.size < QUEUESIZE / 2) {
          oma_debug_print("Signaling thread to start filling the queue");
          pthread_cond_signal(&queuecond);
        }

        free(event);
      }

      unlock_mutex(&queuelock);
      continue;
    } /* End of timeout handling */

    /* Start to loop through the file descriptors */
    for (i = 0; i <= maxfd; i++) {
      if (FD_ISSET(i, &readfds)) {

        nready--;

        /* New connection from a client */
        if (i == listenfd) {
          oma_debug_print("Recieved a new RTSP connection\n");
	  fflush(stdout);
          if ((tempfd = accept(i, (struct sockaddr *)&remoteaddr, &addrlen)) == -1) {
            if (errno != EWOULDBLOCK && errno != ECONNABORTED &&
                errno != EPROTO && errno != EINTR) {
              fatal_error("accept");
            }
          }

          /* If we are already serving a client, close the new connection. Otherwise, continue. */
          if (streamclient.state != NOCLIENT) {
	    printf("Another RTSP client tried to connect. Sorry, we can only serve one client at a time\n");
	    close (tempfd);
	  }
          else {
            streamclient.rtspfd = tempfd;
            streamclient.state = CLICONNECTED;
            maxfd = max(2, streamclient.rtspfd, maxfd);
            FD_SET(streamclient.rtspfd, &masterfds);
	    }
        }

        /* Data from the media source */
        else if (i == mediafd) {

          switch (mediastate) {

            case GETSENT:
              /* Read ONLY the HTTP message from the socket and store the video size */
              recvd = recv_all(i, msgbuf, BUFSIZE, MSG_PEEK);
              temp = strstr((char *)msgbuf, "\r\n\r\n");
              recvd = recv_all(i, msgbuf, (int)(temp + 4 - (char *)msgbuf), 0);
              printf("Received HTTP response\n%s\n", msgbuf);
              temp = strstr((char *)msgbuf, "Content-Length:");
              sscanf(temp, "Content-Length: %d", &videosize);
              videoleft = videosize;
              mediastate = RECVTCP;
              break;

            case RECVTCP:
              if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
                FD_CLR(i, &masterfds);
                close(i);
                oma_debug_print("Socket closed\n");
              }
              oma_debug_print("Received data from video source!\n");

              writestr(videofd, msgbuf, recvd);
              videoleft -= recvd;

              if (videoleft <= 0) {
                
		printf("Video download complete.\n");
                FD_CLR(mediafd, &masterfds);
                close(videofd);
                close(mediafd);
		media_downloaded = 1;
		printf("Media socket closed\n");

                /* Create the context and the queue filler thread parameter struct */
                tinfo = (ThreadInfo *)malloc(sizeof(ThreadInfo));
                initialize_context(&tinfo->ctx, "videotemp.mp4", &tinfo->videoIdx, &tinfo->audioIdx,
                    &tinfo->videoRate, &tinfo->audioRate, &sps, &spslen, &pps, &ppslen);

                /* Launch the queue filler thread */
                CHECK((pthread_create(&threadid, NULL, fill_queue, tinfo)) == 0);
                pthread_detach(threadid);

                /* Send the sprop-parameters before any other frames */
                send_video_frame(sendbuf, create_sprop_frame(sps, spslen, 0),
                    streamclient.videofds[0], rtpseqno_video++);
                send_video_frame(sendbuf, create_sprop_frame(pps, ppslen, 0),
                    streamclient.videofds[0], rtpseqno_video++);

                g_free(sps);
                g_free(pps);

                lock_mutex(&queuelock);
                push_timeout(&queue, 1000, CHECKMEDIASTATE);
                unlock_mutex(&queuelock);

                mediastate = STREAM;
              }
              break;

             case STREAM:
              /*
                 close(videofd);
                 close(mediafd);
                 close(listenfd);
                 quit = 1;
                 */
              break;

            default: 
              break;
          }
        }

        /* Data from a client ( i == streamclient.rtspfd) */
        else {

          oma_debug_print("Received data from rtspfd\n");
	  fflush(stdout);

          if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
            FD_CLR(i, &masterfds);
            close(i);
            oma_debug_print("RTSP client closed the connection\n");
            streamclient.state = NOCLIENT;
          }
          else {
            oma_debug_print("%s", msgbuf);
            parse_rtsp(&rtspmsg, msgbuf);
          }

	  if (rtspmsg.type == TEARDOWN) {

            /* Kill thread and empty queue */         
 	    lock_mutex(&queuelock);
            pthread_cancel(threadid);
            empty_queue(&queue);
            sleep(1);
            

	    /* Reply with 200 OK */
            sent = rtsp_teardown(&rtspmsg, sendbuf);
            send_all(i, sendbuf, sent);
            FD_CLR(i, &masterfds);
            close(i);
            close(streamclient.videofds[0]);
            close(streamclient.videofds[1]);
            close(streamclient.audiofds[0]);
            close(streamclient.audiofds[1]);

            printf("Closing AVFormatContext\n");
            close_context(tinfo->ctx);
            free(tinfo);
            rtpseqno_video = (rand() % 1000000) + 7;
            rtpseqno_audio = rtpseqno_video + 9;
            init_client(&streamclient);

	    printf("Closing RTSP client sockets (RTP&RTCP)\n");
            streamclient.state = NOCLIENT;

	    unlock_mutex(&queuelock);
	    pthread_cond_signal(&queuecond);
          }

          switch (streamclient.state) {

            case CLICONNECTED:
              if (rtspmsg.type == OPTIONS) {
                sent = rtsp_options(&rtspmsg, &streamclient, sendbuf);
                send_all(i, sendbuf, sent);
              }
              else if (rtspmsg.type == DESCRIBE) {
		if (media_downloaded == 0) {
		  /* Start fetching the file from the server */
		  parse_url(url, urlhost, urlpath);
		  resolve_host(urlhost, "80", SOCK_STREAM, 0, &info);
		  mediafd = client_socket(info, 0);
		  FD_SET(mediafd, &masterfds);
		  maxfd = max(2, maxfd, mediafd);

		  /* Send the GET message */
		  http_get(url, msgbuf);
		  send_all(mediafd, msgbuf, strlen((char *)msgbuf));
		  mediastate = GETSENT;
		}
		else {
		  mediastate = STREAM;
		}

                /* Send the SDP without sprop-parameter-sets, those are sent
                 * later in-band */
                streamclient.state = SDPSENT;
                sent = rtsp_describe(&streamclient, sendbuf);
                send_all(i, sendbuf, sent);
              }
              break;

            case SDPSENT:
              if (rtspmsg.type == SETUP) {
                streamclient.setupsreceived++;

                /* Open up the needed ports and bind them locally. The RTCP ports opened here
                 * are not really used by this application. */
                write_remote_ip(tempstr, streamclient.rtspfd);
                oma_debug_print("Remote IP: %s\n", tempstr);

                if (streamclient.setupsreceived < 2) {
                  resolve_host(tempstr, rtspmsg.clirtpport, SOCK_DGRAM, 0, &info); 
                  streamclient.audiofds[0] = client_socket(info, streamclient.server_rtp_audio_port);
                  resolve_host(tempstr, rtspmsg.clirtcpport, SOCK_DGRAM, 0, &info);
                  streamclient.audiofds[1] = client_socket(info, streamclient.server_rtcp_audio_port);

                  sent = rtsp_setup(&rtspmsg, &streamclient, sendbuf,
                      streamclient.server_rtp_audio_port, streamclient.server_rtcp_audio_port);

                }
                else {
                  resolve_host(tempstr, rtspmsg.clirtpport, SOCK_DGRAM, 0, &info); 
                  streamclient.videofds[0] = client_socket(info, streamclient.server_rtp_video_port);
                  resolve_host(tempstr, rtspmsg.clirtcpport, SOCK_DGRAM, 0, &info);
                  streamclient.audiofds[1] = client_socket(info, streamclient.server_rtcp_video_port);

                  sent = rtsp_setup(&rtspmsg, &streamclient, sendbuf,
                      streamclient.server_rtp_video_port, streamclient.server_rtcp_video_port);

                streamclient.state = SETUPCOMPLETE;
                }

                oma_debug_print("Sending setup response...\n");
                send_all(i, sendbuf, sent);

              }
              break;

            case SETUPCOMPLETE:
              if (rtspmsg.type == PLAY) {

                /* Respond to the PLAY request, and start sending dummy RTP packets
                 * to disable the client timeout */
                sent = rtsp_play(&rtspmsg, sendbuf);
                send_all(i, sendbuf, sent);

		if (media_downloaded == 0) {
		 
		  lock_mutex(&queuelock);
		  push_timeout(&queue, 100, CHECKMEDIASTATE);
		  unlock_mutex(&queuelock);
		}
		/* Media has already been once downloaded, initialize context and thread */
		else {
		  tinfo = (ThreadInfo *)malloc(sizeof(ThreadInfo));
		  initialize_context(&tinfo->ctx, "videotemp.mp4", &tinfo->videoIdx, &tinfo->audioIdx,
				     &tinfo->videoRate, &tinfo->audioRate, &sps, &spslen, &pps, &ppslen);
		  /* Launch the queue filler thread */
		  CHECK((pthread_create(&threadid, NULL, fill_queue, tinfo)) == 0);
		  pthread_detach(threadid);

		  /* Send the sprop-parameters before any other frames */
		  send_video_frame(sendbuf, create_sprop_frame(sps, spslen, 0),
				   streamclient.videofds[0], rtpseqno_video++);
		  send_video_frame(sendbuf, create_sprop_frame(pps, ppslen, 0),
				   streamclient.videofds[0], rtpseqno_video++);

		  g_free(sps);
		  g_free(pps);

		  /* Dummy timeouts to start queue/timeout mechanism */
		  push_timeout(&queue, 100, CHECKMEDIASTATE);
		  push_timeout(&queue, 2000, CHECKMEDIASTATE);
		}
              }
              
              break;

            default:
              break;
          }
        }
      }

      if (nready <= 0) break;   
    }

    /* Set the timeout value again, since select will mess it up */
    
    lock_mutex(&queuelock);
    if (queue.size > 0) {
      CHECK((gettimeofday(&timenow, NULL)) == 0);
      *timeout = calculate_delta(&timenow, &queue.first->time);
      /* oma_debug_print("Delta sec: %ld, Delta usec: %ld\n", timeout->tv_sec, timeout->tv_usec); */

      if (timeout->tv_sec < 0) {
        timeout->tv_sec = 0;
        timeout->tv_usec = 0;
      }

      timeind = timeout;
    }
    else timeind = NULL;
    unlock_mutex(&queuelock);

  }


  return 1;
}