Beispiel #1
0
static void lcd_move_e()
{
    if (encoderPosition != 0)
    {
        current_position[E_AXIS] += float((int)encoderPosition) * move_menu_scale;
        encoderPosition = 0;
        #ifdef DELTA
        calculate_delta(current_position);
        plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], manual_feedrate[E_AXIS]/60, active_extruder);
        #else
        plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], manual_feedrate[E_AXIS]/60, active_extruder);
        #endif
        lcdDrawUpdate = 1;
    }
    if (lcdDrawUpdate)
    {
        lcd_implementation_drawedit(PSTR("Extruder"), ftostr31(current_position[E_AXIS]));
    }
    if (LCD_CLICKED)
    {
        lcd_quick_feedback();
        currentMenu = lcd_move_menu_axis;
        encoderPosition = 0;
    }
}
Beispiel #2
0
void delta_move_to_top_endstops(float feedrate) {
  long up_steps = printer_state.zMaxSteps;
  for (byte i=0; i<3; i++)
    printer_state.currentPositionSteps[i] = 0;
  calculate_delta(printer_state.currentPositionSteps, printer_state.currentDeltaPositionSteps);
  move_steps(0,0,printer_state.zMaxSteps*ENDSTOP_Z_BACK_MOVE,0,feedrate, true, true);
}
Beispiel #3
0
void FeatureStatistics::set_d1Features(){
	// memory allocation
	m_d1Feature = (SAMPLE**)malloc(m_nCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nCols; ++i){
		m_d1Feature[i] = (SAMPLE*)malloc(m_nRows*sizeof(SAMPLE));
	}
	m_d1Mean = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d1Mean[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}
	m_d1Var = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d1Var[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}
	m_d1Std = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d1Std[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}

	// calculate d1 feature variables
	SAMPLE** feature = this->get_featureObject()->get_feature()
		m_d1Feature = calculate_delta(feature);
	m_d1Mean = calcualte_mean(m_d1Feature);
	m_d1Var = calculate_var(m_d1Feature, m_d1Mean);
	m_d1Std = calculate_var2std(m_d1Var);
}
static void lcd_move_z()
{
    if (encoderPosition != 0)
    {
        refresh_cmd_timeout();
        current_position[Z_AXIS] += float((int)encoderPosition) * move_menu_scale;
        if (min_software_endstops && current_position[Z_AXIS] < Z_MIN_POS)
            current_position[Z_AXIS] = Z_MIN_POS;
        if (max_software_endstops && current_position[Z_AXIS] > Z_MAX_POS)
            current_position[Z_AXIS] = Z_MAX_POS;
        encoderPosition = 0;
        #ifdef DELTA
        calculate_delta(current_position);
        plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], manual_feedrate[Z_AXIS]/60, active_extruder);
        #else
        plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], manual_feedrate[Z_AXIS]/60, active_extruder);
        #endif
        lcdDrawUpdate = 1;
    }
    if (lcdDrawUpdate)
    {
        lcd_implementation_drawedit(PSTR("Z"), ftostr31(current_position[Z_AXIS]));
    }
    if (LCD_CLICKED)
    {
        lcd_quick_feedback();
        currentMenu = lcd_move_menu_axis;
        encoderPosition = 0;
    }
}
Beispiel #5
0
void FeatureStatistics::set_d2Features(){

	int nCols = this->get_featureObject()->get_nCols();
	int nRows = this->get_featureObject()->get_nRows();

	// memory allocation
	m_d2Feature = (SAMPLE**)malloc(nCols*sizeof(SAMPLE*));
	for (int i = 0; i < nCols; ++i){
		m_d2Feature[i] = (SAMPLE*)malloc(nRows*sizeof(SAMPLE));
	}
	m_d2Mean = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d2Mean[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}
	m_d2Var = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d2Var[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}
	m_d2Std = (SAMPLE**)malloc(m_nStatCols*sizeof(SAMPLE*));
	for (int i = 0; i < m_nStatCols; ++i){
		m_d2Std[i] = (SAMPLE*)malloc(m_nStatRows*sizeof(SAMPLE));
	}
	
	// calculate
	SAMPLE** feature = this->get_d1Feature();
	m_d2Feature = calculate_delta(feature);
	m_d2Mean = calculate_mean(m_d2Feature);
	m_d2Var = calculate_var(m_d2Feature, m_d2Mean);
	m_d2Std = calculate_var2std(m_d2Var);
}
void update_behavior(vector<float> &k, Creature* c,bool good=true)
{

    if (novelty_function==NF_COGSAMPSQ)
    {
        dVector3& o_com= ((Biped*)c)->orig_com;
        dVector3& c_com= ((Biped*)c)->curr_com;
        dVector3 com;
        dVector3 delta;
        c->CenterOfMass(com);
        calculate_delta(o_com,com,delta);
        calculate_power(delta,2);

        if (good)
        {
            k.push_back(delta[0]);
            k.push_back(delta[1]);
        }
        else
        {
            k.push_back(0.0);
            k.push_back(0.0);
        }
    }
}
Beispiel #7
0
static void lcd_extrude(float length, float feedrate) {
	current_position[E_AXIS] += length;
  #ifdef DELTA
  calculate_delta(current_position);
  plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], feedrate, active_extruder);
  #else
  plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], feedrate, active_extruder);
  #endif
}
Beispiel #8
0
static void _lcd_move(const char *name, int axis, int min, int max) {
  if (encoderPosition != 0) {
    refresh_cmd_timeout();
    current_position[axis] += float((int)encoderPosition) * move_menu_scale;
    if (min_software_endstops && current_position[axis] < min) current_position[axis] = min;
    if (max_software_endstops && current_position[axis] > max) current_position[axis] = max;
    encoderPosition = 0;
    #ifdef DELTA
      calculate_delta(current_position);
      plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], manual_feedrate[axis]/60, active_extruder);
    #else
      plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], manual_feedrate[axis]/60, active_extruder);
    #endif
    lcdDrawUpdate = 1;
  }
  if (lcdDrawUpdate) lcd_implementation_drawedit(name, ftostr31(current_position[axis]));
  if (LCD_CLICKED) lcd_goto_menu(lcd_move_menu_axis);
}
Beispiel #9
0
void home_axis(bool xaxis,bool yaxis,bool zaxis) {
  long steps;
  bool homeallaxis = (xaxis && yaxis && zaxis) || (!xaxis && !yaxis && !zaxis);
  if (X_MAX_PIN > -1 && Y_MAX_PIN > -1 && Z_MAX_PIN > -1 && MAX_HARDWARE_ENDSTOP_X & MAX_HARDWARE_ENDSTOP_Y && MAX_HARDWARE_ENDSTOP_Z) {
    UI_STATUS_UPD(UI_TEXT_HOME_DELTA);
    // Homing Z axis means that you must home X and Y
    if (homeallaxis || zaxis) {
      delta_move_to_top_endstops(homing_feedrate[0]);	
      move_steps(0,0,axis_steps_per_unit[0]*-ENDSTOP_Z_BACK_MOVE,0,homing_feedrate[0]/ENDSTOP_X_RETEST_REDUCTION_FACTOR, true, false);
      delta_move_to_top_endstops(homing_feedrate[0]/ENDSTOP_X_RETEST_REDUCTION_FACTOR);	
      printer_state.currentPositionSteps[0] = 0;
      printer_state.currentPositionSteps[1] = 0;
      printer_state.currentPositionSteps[2] = printer_state.zMaxSteps;
      calculate_delta(printer_state.currentPositionSteps, printer_state.currentDeltaPositionSteps);
      printer_state.maxDeltaPositionSteps = printer_state.currentDeltaPositionSteps[0];
    } else {
      if (xaxis) printer_state.destinationSteps[0] = 0;
      if (yaxis) printer_state.destinationSteps[1] = 0;
      split_delta_move(true,false,false);
    }
    printer_state.countZSteps = 0;
    UI_CLEAR_STATUS 
  }
Beispiel #10
0
static void lcd_fc()
{
if (fil_temp == 200)
{
setTargetHotend0(plaPreheatHotendTemp);
}
if (fil_temp == 230)
{
setTargetHotend0(absPreheatHotendTemp);
}
setWatch(); // heater sanity check timer
if (!isHeatingHotend(0)&&(filament_seq == 0))
{
filament_seq = 1;
}
if ((filament_seq == 0)&&(pausa_display == 0))
{
pausa_display = 1;
}
if (filament_seq == 1)
{   
if (st_message == 0)  
    {
	lcdDrawUpdate = 2;  
	lcd_implementation_message0(PSTR("Introducir el nuevo"));
    lcd_implementation_message1(PSTR("filamento y girar el  "));
	lcd_implementation_message2(PSTR("boton hasta que"));
	lcd_implementation_message3(PSTR("salga el filamento"));
	if (encoder_zero == 0)
	{
	encoderPosition = 0;
	encoder_zero = 1;
	}
	}
    if (encoderPosition != 0)
    {
	    move_menu_scale = 1.0;
        current_position[E_AXIS] += float((int)encoderPosition) * move_menu_scale;
        encoderPosition = 0;
		 #ifdef DELTA
        calculate_delta(current_position);
        plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], manual_feedrate[E_AXIS]/60, active_extruder);
        #else
        plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], manual_feedrate[E_AXIS]/60, active_extruder);
        #endif
        lcdDrawUpdate = 2;
		st_message = 1;
    }
	
    if ((lcdDrawUpdate) && (st_message == 1))
    {
	lcd_implementation_drawedit(PSTR("Extruder"), ftostr31(current_position[E_AXIS]));
    }

}
else
    {
    lcd_implementation_message4(PSTR(" Calentando..."));
    _draw_heater_status(6, 0);
	lcdDrawUpdate = 1;
	u8g.drawBitmapP(9,1,STATUS_SCREENBYTEWIDTH,STATUS_SCREENHEIGHT, (blink % 2) && fanSpeed ? status_screen0_bmp : status_screen1_bmp);
	
	}
	
if ((LCD_CLICKED) || (degTargetHotend(0) == 0)  || (currentMenu == lcd_status_screen))
	{
 	filament_seq = 0;
	st_message = 0;
	pausa_display = 0;
	encoder_zero = 0;
	setTargetHotend0(0);
	lcd_return_to_status();
    }
}
Beispiel #11
0
int start_server(const char *url, const char *rtspport)
{
  int mediafd = -1, listenfd, tempfd, maxfd;
  int videofd;
  struct addrinfo *info;
  struct sockaddr_storage remoteaddr;
  socklen_t addrlen = sizeof remoteaddr;
  fd_set readfds, masterfds;
  struct timeval *timeout, *timeind = NULL, timenow;
  int nready, i;
  int videosize, videoleft;
  int recvd, sent;
  char urlhost[URLSIZE], urlpath[URLSIZE], tempstr[URLSIZE];
  unsigned char msgbuf[BUFSIZE], sendbuf[BUFSIZE];
  char *temp;
  unsigned char *sps = NULL, *pps = NULL;
  size_t spslen, ppslen;
  RTSPMsg rtspmsg;
  Client streamclient;
  pthread_t threadid;
  ThreadInfo *tinfo = NULL;

  uint16_t rtpseqno_video = (rand() % 1000000);
  uint16_t rtpseqno_audio = (rand() % 1000000);

  TimeoutEvent *event;

  /* The current state of the protocol */
  int mediastate = IDLE;
  int quit = 0;

  int media_downloaded = 0;

  timeout = (struct timeval *)malloc(sizeof(struct timeval));
  init_client(&streamclient);
  

  /* Open the a file where the video is to be stored */
  if ((videofd = open("videotemp.mp4", O_RDWR | O_CREAT | O_TRUNC, S_IRWXU)) < 0) {
    fatal_error("Error opening the temporary videofile");
  }

  /* Create the RTSP listening socket */
  resolve_host(NULL, rtspport, SOCK_STREAM, AI_PASSIVE, &info);
  listenfd = server_socket(info);
  maxfd = listenfd;


  FD_ZERO(&readfds);
  FD_ZERO(&masterfds);
  FD_SET(listenfd, &masterfds);

  while (!quit) {

    readfds = masterfds;

    if ((nready = Select(maxfd + 1, &readfds, timeind)) == -1) {
      printf("Select interrupted by a signal\n");
    } 

    /* Timeout handling, used for packet pacing and other timeouts */
    else if (nready == 0) {
      timeind = NULL;
      lock_mutex(&queuelock);
      if ((event = pull_event(&queue)) != NULL) {

        switch (event->type) {

	case ENDOFSTREAM:
	  printf("MEDIA FINISHED\n");
	  break;

	case FRAME:
	  /* Video frame */
	  if (event->frame->frametype == VIDEO_FRAME) {
	    rtpseqno_video += send_video_frame(sendbuf, event->frame, streamclient.videofds[0], rtpseqno_video);
	  }

	  /* Audio frame */
	  else {
            rtpseqno_audio += send_audio_frame(sendbuf, event->frame, streamclient.audiofds[0], rtpseqno_audio);
	  }

          free(event->frame->data);
          free(event->frame);
          break;

	case CHECKMEDIASTATE:
	  oma_debug_print("Checking media ready for streaming...\n");
	  if (mediastate != STREAM) {
	    printf("Sending dummy RTP\n");
	    send_dummy_rtp(sendbuf, streamclient.videofds[0], &rtpseqno_video);
	    push_timeout(&queue, 1000, CHECKMEDIASTATE);
	  }
          break;

	default:
	  oma_debug_print("ERRORENOUS EVENT TYPE!\n");
          break;
        }

        /* If there are elements left in the queue, calculate next timeout */
        if (queue.size > 0) {
          *timeout = calculate_delta(&event->time, &queue.first->time);
          timeind = timeout;
          oma_debug_print("Timeout: %ld secs, %ld usecs\n", timeout->tv_sec, timeout->tv_usec);
        }
        else {
          oma_debug_print("The first entry of the queue is NULL!\n");
        }

        if (queue.size < QUEUESIZE / 2) {
          oma_debug_print("Signaling thread to start filling the queue");
          pthread_cond_signal(&queuecond);
        }

        free(event);
      }

      unlock_mutex(&queuelock);
      continue;
    } /* End of timeout handling */

    /* Start to loop through the file descriptors */
    for (i = 0; i <= maxfd; i++) {
      if (FD_ISSET(i, &readfds)) {

        nready--;

        /* New connection from a client */
        if (i == listenfd) {
          oma_debug_print("Recieved a new RTSP connection\n");
	  fflush(stdout);
          if ((tempfd = accept(i, (struct sockaddr *)&remoteaddr, &addrlen)) == -1) {
            if (errno != EWOULDBLOCK && errno != ECONNABORTED &&
                errno != EPROTO && errno != EINTR) {
              fatal_error("accept");
            }
          }

          /* If we are already serving a client, close the new connection. Otherwise, continue. */
          if (streamclient.state != NOCLIENT) {
	    printf("Another RTSP client tried to connect. Sorry, we can only serve one client at a time\n");
	    close (tempfd);
	  }
          else {
            streamclient.rtspfd = tempfd;
            streamclient.state = CLICONNECTED;
            maxfd = max(2, streamclient.rtspfd, maxfd);
            FD_SET(streamclient.rtspfd, &masterfds);
	    }
        }

        /* Data from the media source */
        else if (i == mediafd) {

          switch (mediastate) {

            case GETSENT:
              /* Read ONLY the HTTP message from the socket and store the video size */
              recvd = recv_all(i, msgbuf, BUFSIZE, MSG_PEEK);
              temp = strstr((char *)msgbuf, "\r\n\r\n");
              recvd = recv_all(i, msgbuf, (int)(temp + 4 - (char *)msgbuf), 0);
              printf("Received HTTP response\n%s\n", msgbuf);
              temp = strstr((char *)msgbuf, "Content-Length:");
              sscanf(temp, "Content-Length: %d", &videosize);
              videoleft = videosize;
              mediastate = RECVTCP;
              break;

            case RECVTCP:
              if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
                FD_CLR(i, &masterfds);
                close(i);
                oma_debug_print("Socket closed\n");
              }
              oma_debug_print("Received data from video source!\n");

              writestr(videofd, msgbuf, recvd);
              videoleft -= recvd;

              if (videoleft <= 0) {
                
		printf("Video download complete.\n");
                FD_CLR(mediafd, &masterfds);
                close(videofd);
                close(mediafd);
		media_downloaded = 1;
		printf("Media socket closed\n");

                /* Create the context and the queue filler thread parameter struct */
                tinfo = (ThreadInfo *)malloc(sizeof(ThreadInfo));
                initialize_context(&tinfo->ctx, "videotemp.mp4", &tinfo->videoIdx, &tinfo->audioIdx,
                    &tinfo->videoRate, &tinfo->audioRate, &sps, &spslen, &pps, &ppslen);

                /* Launch the queue filler thread */
                CHECK((pthread_create(&threadid, NULL, fill_queue, tinfo)) == 0);
                pthread_detach(threadid);

                /* Send the sprop-parameters before any other frames */
                send_video_frame(sendbuf, create_sprop_frame(sps, spslen, 0),
                    streamclient.videofds[0], rtpseqno_video++);
                send_video_frame(sendbuf, create_sprop_frame(pps, ppslen, 0),
                    streamclient.videofds[0], rtpseqno_video++);

                g_free(sps);
                g_free(pps);

                lock_mutex(&queuelock);
                push_timeout(&queue, 1000, CHECKMEDIASTATE);
                unlock_mutex(&queuelock);

                mediastate = STREAM;
              }
              break;

             case STREAM:
              /*
                 close(videofd);
                 close(mediafd);
                 close(listenfd);
                 quit = 1;
                 */
              break;

            default: 
              break;
          }
        }

        /* Data from a client ( i == streamclient.rtspfd) */
        else {

          oma_debug_print("Received data from rtspfd\n");
	  fflush(stdout);

          if ((recvd = recv_all(i, msgbuf, BUFSIZE, 0)) == 0) {
            FD_CLR(i, &masterfds);
            close(i);
            oma_debug_print("RTSP client closed the connection\n");
            streamclient.state = NOCLIENT;
          }
          else {
            oma_debug_print("%s", msgbuf);
            parse_rtsp(&rtspmsg, msgbuf);
          }

	  if (rtspmsg.type == TEARDOWN) {

            /* Kill thread and empty queue */         
 	    lock_mutex(&queuelock);
            pthread_cancel(threadid);
            empty_queue(&queue);
            sleep(1);
            

	    /* Reply with 200 OK */
            sent = rtsp_teardown(&rtspmsg, sendbuf);
            send_all(i, sendbuf, sent);
            FD_CLR(i, &masterfds);
            close(i);
            close(streamclient.videofds[0]);
            close(streamclient.videofds[1]);
            close(streamclient.audiofds[0]);
            close(streamclient.audiofds[1]);

            printf("Closing AVFormatContext\n");
            close_context(tinfo->ctx);
            free(tinfo);
            rtpseqno_video = (rand() % 1000000) + 7;
            rtpseqno_audio = rtpseqno_video + 9;
            init_client(&streamclient);

	    printf("Closing RTSP client sockets (RTP&RTCP)\n");
            streamclient.state = NOCLIENT;

	    unlock_mutex(&queuelock);
	    pthread_cond_signal(&queuecond);
          }

          switch (streamclient.state) {

            case CLICONNECTED:
              if (rtspmsg.type == OPTIONS) {
                sent = rtsp_options(&rtspmsg, &streamclient, sendbuf);
                send_all(i, sendbuf, sent);
              }
              else if (rtspmsg.type == DESCRIBE) {
		if (media_downloaded == 0) {
		  /* Start fetching the file from the server */
		  parse_url(url, urlhost, urlpath);
		  resolve_host(urlhost, "80", SOCK_STREAM, 0, &info);
		  mediafd = client_socket(info, 0);
		  FD_SET(mediafd, &masterfds);
		  maxfd = max(2, maxfd, mediafd);

		  /* Send the GET message */
		  http_get(url, msgbuf);
		  send_all(mediafd, msgbuf, strlen((char *)msgbuf));
		  mediastate = GETSENT;
		}
		else {
		  mediastate = STREAM;
		}

                /* Send the SDP without sprop-parameter-sets, those are sent
                 * later in-band */
                streamclient.state = SDPSENT;
                sent = rtsp_describe(&streamclient, sendbuf);
                send_all(i, sendbuf, sent);
              }
              break;

            case SDPSENT:
              if (rtspmsg.type == SETUP) {
                streamclient.setupsreceived++;

                /* Open up the needed ports and bind them locally. The RTCP ports opened here
                 * are not really used by this application. */
                write_remote_ip(tempstr, streamclient.rtspfd);
                oma_debug_print("Remote IP: %s\n", tempstr);

                if (streamclient.setupsreceived < 2) {
                  resolve_host(tempstr, rtspmsg.clirtpport, SOCK_DGRAM, 0, &info); 
                  streamclient.audiofds[0] = client_socket(info, streamclient.server_rtp_audio_port);
                  resolve_host(tempstr, rtspmsg.clirtcpport, SOCK_DGRAM, 0, &info);
                  streamclient.audiofds[1] = client_socket(info, streamclient.server_rtcp_audio_port);

                  sent = rtsp_setup(&rtspmsg, &streamclient, sendbuf,
                      streamclient.server_rtp_audio_port, streamclient.server_rtcp_audio_port);

                }
                else {
                  resolve_host(tempstr, rtspmsg.clirtpport, SOCK_DGRAM, 0, &info); 
                  streamclient.videofds[0] = client_socket(info, streamclient.server_rtp_video_port);
                  resolve_host(tempstr, rtspmsg.clirtcpport, SOCK_DGRAM, 0, &info);
                  streamclient.audiofds[1] = client_socket(info, streamclient.server_rtcp_video_port);

                  sent = rtsp_setup(&rtspmsg, &streamclient, sendbuf,
                      streamclient.server_rtp_video_port, streamclient.server_rtcp_video_port);

                streamclient.state = SETUPCOMPLETE;
                }

                oma_debug_print("Sending setup response...\n");
                send_all(i, sendbuf, sent);

              }
              break;

            case SETUPCOMPLETE:
              if (rtspmsg.type == PLAY) {

                /* Respond to the PLAY request, and start sending dummy RTP packets
                 * to disable the client timeout */
                sent = rtsp_play(&rtspmsg, sendbuf);
                send_all(i, sendbuf, sent);

		if (media_downloaded == 0) {
		 
		  lock_mutex(&queuelock);
		  push_timeout(&queue, 100, CHECKMEDIASTATE);
		  unlock_mutex(&queuelock);
		}
		/* Media has already been once downloaded, initialize context and thread */
		else {
		  tinfo = (ThreadInfo *)malloc(sizeof(ThreadInfo));
		  initialize_context(&tinfo->ctx, "videotemp.mp4", &tinfo->videoIdx, &tinfo->audioIdx,
				     &tinfo->videoRate, &tinfo->audioRate, &sps, &spslen, &pps, &ppslen);
		  /* Launch the queue filler thread */
		  CHECK((pthread_create(&threadid, NULL, fill_queue, tinfo)) == 0);
		  pthread_detach(threadid);

		  /* Send the sprop-parameters before any other frames */
		  send_video_frame(sendbuf, create_sprop_frame(sps, spslen, 0),
				   streamclient.videofds[0], rtpseqno_video++);
		  send_video_frame(sendbuf, create_sprop_frame(pps, ppslen, 0),
				   streamclient.videofds[0], rtpseqno_video++);

		  g_free(sps);
		  g_free(pps);

		  /* Dummy timeouts to start queue/timeout mechanism */
		  push_timeout(&queue, 100, CHECKMEDIASTATE);
		  push_timeout(&queue, 2000, CHECKMEDIASTATE);
		}
              }
              
              break;

            default:
              break;
          }
        }
      }

      if (nready <= 0) break;   
    }

    /* Set the timeout value again, since select will mess it up */
    
    lock_mutex(&queuelock);
    if (queue.size > 0) {
      CHECK((gettimeofday(&timenow, NULL)) == 0);
      *timeout = calculate_delta(&timenow, &queue.first->time);
      /* oma_debug_print("Delta sec: %ld, Delta usec: %ld\n", timeout->tv_sec, timeout->tv_usec); */

      if (timeout->tv_sec < 0) {
        timeout->tv_sec = 0;
        timeout->tv_usec = 0;
      }

      timeind = timeout;
    }
    else timeind = NULL;
    unlock_mutex(&queuelock);

  }


  return 1;
}
Beispiel #12
0
int timecmp(struct timeval first, struct timeval second)
{
  struct timeval delta = calculate_delta(&first, &second);
  return (delta.tv_sec != 0)?delta.tv_sec:delta.tv_usec;
}