예제 #1
0
파일: avc.c 프로젝트: wmyrda/tvheadend
th_pkt_t *
avc_convert_pkt(th_pkt_t *src)
{
  th_pkt_t *pkt = malloc(sizeof(th_pkt_t));
  *pkt = *src;
  pkt->pkt_refcount = 1;
  pkt->pkt_header = NULL;
  pkt->pkt_payload = NULL;

  if (src->pkt_header) {
    sbuf_t headers;
    sbuf_init(&headers);

    isom_write_avcc(&headers, pktbuf_ptr(src->pkt_header),
		    pktbuf_len(src->pkt_header));
    pkt->pkt_header = pktbuf_make(headers.sb_data, headers.sb_ptr);
  }

  sbuf_t payload;
  sbuf_init(&payload);

  if(src->pkt_header)
    avc_parse_nal_units(&payload, pktbuf_ptr(src->pkt_header),
			pktbuf_len(src->pkt_header));

  avc_parse_nal_units(&payload, pktbuf_ptr(src->pkt_payload),
		      pktbuf_len(src->pkt_payload));

  pkt->pkt_payload = pktbuf_make(payload.sb_data, payload.sb_ptr);
  pkt_ref_dec(src);
  return pkt;
}
예제 #2
0
static size_t
streaming_message_data_size(streaming_message_t *sm)
{
  if (sm->sm_type == SMT_PACKET) {
    th_pkt_t *pkt = sm->sm_data;
    if (pkt && pkt->pkt_payload)
      return pktbuf_len(pkt->pkt_payload);
  } else if (sm->sm_type == SMT_MPEGTS) {
    pktbuf_t *pkt_payload = sm->sm_data;
    if (pkt_payload)
      return pktbuf_len(pkt_payload);
  }
  return 0;
}
예제 #3
0
파일: packet.c 프로젝트: swegener/tvheadend
void
pkt_trace_(const char *file, int line, int subsys, th_pkt_t *pkt,
           const char *fmt, ...)
{
  char buf[512], _pcr[22], _dts[22], _pts[22], _type[2], _meta[20];
  va_list args;

  va_start(args, fmt);
  if (SCT_ISVIDEO(pkt->pkt_type) && pkt->v.pkt_frametype) {
    _type[0] = pkt_frametype_to_char(pkt->v.pkt_frametype);
    _type[1] = '\0';
  } else {
    _type[0] = '\0';
  }
  if (pkt->pkt_meta)
    snprintf(_meta, sizeof(_meta), " meta %zu", pktbuf_len(pkt->pkt_meta));
  else
    _meta[0] = '\0';
  snprintf(buf, sizeof(buf),
           "%s%spkt stream %d %s%s%s"
           " pcr %s dts %s pts %s"
           " dur %d len %zu err %i%s%s",
           fmt ? fmt : "",
           fmt ? " (" : "",
           pkt->pkt_componentindex,
           streaming_component_type2txt(pkt->pkt_type),
           _type[0] ? " type " : "", _type,
           pts_to_string(pkt->pkt_pcr, _pcr),
           pts_to_string(pkt->pkt_dts, _dts),
           pts_to_string(pkt->pkt_pts, _pts),
           pkt->pkt_duration,
           pktbuf_len(pkt->pkt_payload),
           pkt->pkt_err,
           _meta,
           fmt ? ")" : "");
  tvhlogv(file, line, LOG_TRACE, subsys, buf, &args);
  va_end(args);
}
예제 #4
0
/*
 * Write packet buffer
 */
static int _write_pktbuf ( timeshift_file_t *tsf, pktbuf_t *pktbuf )
{
  ssize_t ret, err;
  if (pktbuf) {
    ret = err = _write(tsf, &pktbuf->pb_size, sizeof(pktbuf->pb_size));
    if (err < 0) return err;
    err = _write(tsf, pktbuf_ptr(pktbuf), pktbuf_len(pktbuf));
    if (err < 0) return err;
    ret += err;
  } else {
    size_t sz = 0;
    ret = _write(tsf, &sz, sizeof(sz));
  }
  return ret;
}
예제 #5
0
/*
 * Packet log
 */
void
timeshift_packet_log0
  ( const char *source, timeshift_t *ts, streaming_message_t *sm )
{
  th_pkt_t *pkt = sm->sm_data;
  tvhtrace("timeshift",
           "ts %d pkt %s - stream %d type %c pts %10"PRId64
           " dts %10"PRId64" dur %10d len %6zu time %14"PRId64,
           ts->id, source,
           pkt->pkt_componentindex,
           pkt_frametype_to_char(pkt->pkt_frametype),
           ts_rescale(pkt->pkt_pts, 1000000),
           ts_rescale(pkt->pkt_dts, 1000000),
           pkt->pkt_duration,
           pktbuf_len(pkt->pkt_payload),
           sm->sm_time);
}
예제 #6
0
static void
timeshift_packet_deliver ( timeshift_t *ts, streaming_message_t *sm )
{
  th_pkt_t *pkt = sm->sm_data;
  tvhtrace("timeshift",
           "ts %d pkt buf - stream %d type %c pts %10"PRId64
           " dts %10"PRId64" dur %10d len %6zu time %14"PRId64,
           ts->id,
           pkt->pkt_componentindex,
           pkt_frametype_to_char(pkt->pkt_frametype),
           ts_rescale(pkt->pkt_pts, 1000000),
           ts_rescale(pkt->pkt_dts, 1000000),
           pkt->pkt_duration,
           pktbuf_len(pkt->pkt_payload),
           sm->sm_time);
  streaming_target_deliver2(&ts->wr_queue.sq_st, sm);
}
예제 #7
0
/*
 * Receive data
 */
static void timeshift_input
  ( void *opaque, streaming_message_t *sm )
{
  int exit = 0;
  timeshift_t *ts = opaque;
  th_pkt_t *pkt = sm->sm_data;

  pthread_mutex_lock(&ts->state_mutex);

  /* Control */
  if (sm->sm_type == SMT_SKIP) {
    if (ts->state >= TS_LIVE)
      timeshift_write_skip(ts->rd_pipe.wr, sm->sm_data);
  } else if (sm->sm_type == SMT_SPEED) {
    if (ts->state >= TS_LIVE)
      timeshift_write_speed(ts->rd_pipe.wr, sm->sm_code);
  }

  else {

    /* Start */
    if (sm->sm_type == SMT_START && ts->state == TS_INIT) {
      ts->state  = TS_LIVE;
    }

    if (sm->sm_type == SMT_PACKET) {
      tvhtrace("timeshift",
               "ts %d pkt in  - stream %d type %c pts %10"PRId64
               " dts %10"PRId64" dur %10d len %zu",
               ts->id,
               pkt->pkt_componentindex,
               pkt_frametype_to_char(pkt->pkt_frametype),
               ts_rescale(pkt->pkt_pts, 1000000),
               ts_rescale(pkt->pkt_dts, 1000000),
               pkt->pkt_duration,
               pktbuf_len(pkt->pkt_payload));
    }

    /* Pass-thru */
    if (ts->state <= TS_LIVE) {
      if (sm->sm_type == SMT_START) {
        if (ts->smt_start)
          streaming_start_unref(ts->smt_start);
        ts->smt_start = sm->sm_data;
        atomic_add(&ts->smt_start->ss_refcount, 1);
      }
      streaming_target_deliver2(ts->output, streaming_msg_clone(sm));
    }

    /* Check for exit */
    if (sm->sm_type == SMT_EXIT ||
        (sm->sm_type == SMT_STOP && sm->sm_code == 0))
      exit = 1;

    /* Record (one-off) PTS delta */
    if (sm->sm_type == SMT_PACKET && ts->pts_delta == PTS_UNSET)
      timeshift_set_pts_delta(ts, pkt->pkt_pts);

    /* Buffer to disk */
    if ((ts->state > TS_LIVE) || (!ts->ondemand && (ts->state == TS_LIVE))) {
      sm->sm_time = getmonoclock();
      if (sm->sm_type == SMT_PACKET) {
        tvhtrace("timeshift",
                 "ts %d pkt buf - stream %d type %c pts %10"PRId64
                 " dts %10"PRId64" dur %10d len %zu",
                 ts->id,
                 pkt->pkt_componentindex,
                 pkt_frametype_to_char(pkt->pkt_frametype),
                 ts_rescale(pkt->pkt_pts, 1000000),
                 ts_rescale(pkt->pkt_dts, 1000000),
                 pkt->pkt_duration,
                 pktbuf_len(pkt->pkt_payload));
      }
      streaming_target_deliver2(&ts->wr_queue.sq_st, sm);
    } else
      streaming_msg_free(sm);

    /* Exit/Stop */
    if (exit) {
      timeshift_write_exit(ts->rd_pipe.wr);
      ts->state = TS_EXIT;
    }
  }

  pthread_mutex_unlock(&ts->state_mutex);
}
예제 #8
0
파일: tsfix.c 프로젝트: linniksa/tvheadend
static void
normalize_ts(tsfix_t *tf, tfstream_t *tfs, th_pkt_t *pkt)
{
  int64_t dts, d;

  int checkts = SCT_ISAUDIO(tfs->tfs_type) || SCT_ISVIDEO(tfs->tfs_type);

  if(tf->tf_tsref == PTS_UNSET) {
    pkt_ref_dec(pkt);
    return;
  }

  pkt->pkt_dts &= PTS_MASK;
  pkt->pkt_pts &= PTS_MASK;

  /* Subtract the transport wide start offset */
  dts = pkt->pkt_dts - tf->tf_tsref;

  if(tfs->tfs_last_dts_norm == PTS_UNSET) {
    if(dts < 0) {
      /* Early packet with negative time stamp, drop those */
      pkt_ref_dec(pkt);
      return;
    }
  } else if(checkts) {
    d = dts + tfs->tfs_dts_epoch - tfs->tfs_last_dts_norm;

    if(d < 0 || d > 90000) {

      if(d < -PTS_MASK || d > -PTS_MASK + 180000) {

	tfs->tfs_bad_dts++;

	if(tfs->tfs_bad_dts < 5) {
	  tvhlog(LOG_ERR, "parser", 
		 "transport stream %s, DTS discontinuity. "
		 "DTS = %" PRId64 ", last = %" PRId64,
		 streaming_component_type2txt(tfs->tfs_type),
		 dts, tfs->tfs_last_dts_norm);
	}
      } else {
	/* DTS wrapped, increase upper bits */
	tfs->tfs_dts_epoch += PTS_MASK + 1;
	tfs->tfs_bad_dts = 0;
      }
    } else {
      tfs->tfs_bad_dts = 0;
    }
  }

  dts += tfs->tfs_dts_epoch;
  tfs->tfs_last_dts_norm = dts;

  if(pkt->pkt_pts != PTS_UNSET) {
    /* Compute delta between PTS and DTS (and watch out for 33 bit wrap) */
    int64_t ptsoff = (pkt->pkt_pts - pkt->pkt_dts) & PTS_MASK;
    
    pkt->pkt_pts = dts + ptsoff;
  }

  pkt->pkt_dts = dts;

  tsfixprintf("TSFIX: %-12s %d %10"PRId64" %10"PRId64" %10d %zd\n",
	      streaming_component_type2txt(tfs->tfs_type),
	      pkt->pkt_frametype,
	      pkt->pkt_dts,
	      pkt->pkt_pts,
	      pkt->pkt_duration,
	      pktbuf_len(pkt->pkt_payload));

  streaming_message_t *sm = streaming_msg_create_pkt(pkt);
  streaming_target_deliver2(tf->tf_output, sm);
  pkt_ref_dec(pkt);
}
예제 #9
0
/*
 * Timeshift thread
 */
void *timeshift_reader ( void *p )
{
  timeshift_t *ts = p;
  int nfds, end, run = 1, wait = -1;
  timeshift_file_t *cur_file = NULL;
  int cur_speed = 100, keyframe_mode = 0;
  int64_t pause_time = 0, play_time = 0, last_time = 0;
  int64_t now, deliver, skip_time = 0;
  streaming_message_t *sm = NULL, *ctrl = NULL;
  timeshift_index_iframe_t *tsi = NULL;
  streaming_skip_t *skip = NULL;
  time_t last_status = 0;
  tvhpoll_t *pd;
  tvhpoll_event_t ev = { 0 };

  pd = tvhpoll_create(1);
  ev.fd     = ts->rd_pipe.rd;
  ev.events = TVHPOLL_IN;
  tvhpoll_add(pd, &ev, 1);

  /* Output */
  while (run) {

    // Note: Previously we allowed unlimited wait, but we now must wake periodically
    //       to output status message
    if (wait < 0 || wait > 1000)
      wait = 1000;

    /* Wait for data */
    if(wait)
      nfds = tvhpoll_wait(pd, &ev, 1, wait);
    else
      nfds = 0;
    wait      = -1;
    end       = 0;
    skip      = NULL;
    now       = getmonoclock();

    /* Control */
    pthread_mutex_lock(&ts->state_mutex);
    if (nfds == 1) {
      if (_read_msg(NULL, ts->rd_pipe.rd, &ctrl) > 0) {

        /* Exit */
        if (ctrl->sm_type == SMT_EXIT) {
          tvhtrace("timeshift", "ts %d read exit request", ts->id);
          run = 0;
          streaming_msg_free(ctrl);
          ctrl = NULL;

        /* Speed */
        } else if (ctrl->sm_type == SMT_SPEED) {
          int speed = ctrl->sm_code;
          int keyframe;

          /* Bound it */
          if (speed > 3200)  speed = 3200;
          if (speed < -3200) speed = -3200;

          /* Ignore negative */
          if (ts->ondemand && (speed < 0))
            speed = cur_file ? speed : 0;

          /* Process */
          if (cur_speed != speed) {

            /* Live playback */
            if (ts->state == TS_LIVE) {

              /* Reject */
              if (speed >= 100) {
                tvhlog(LOG_DEBUG, "timeshift", "ts %d reject 1x+ in live mode",
                       ts->id);
                speed = 100;

              /* Set position */
              } else {
                tvhlog(LOG_DEBUG, "timeshift", "ts %d enter timeshift mode",
                       ts->id);
                timeshift_writer_flush(ts);
                pthread_mutex_lock(&ts->rdwr_mutex);
                if ((cur_file    = timeshift_filemgr_get(ts, 1))) {
                  cur_file->roff = cur_file->size;
                  pause_time     = cur_file->last;
                  last_time      = pause_time;
                }
                pthread_mutex_unlock(&ts->rdwr_mutex);
              }

            /* Buffer playback */
            } else if (ts->state == TS_PLAY) {
              pause_time = last_time;

            /* Paused */
            } else {
            }

            /* Check keyframe mode */
            keyframe      = (speed < 0) || (speed > 400);
            if (keyframe != keyframe_mode) {
              tvhlog(LOG_DEBUG, "timeshift", "using keyframe mode? %s",
                     keyframe ? "yes" : "no");
              keyframe_mode = keyframe;
              if (keyframe) {
                tsi = NULL;
              }
            }

            /* Update */
            play_time  = getmonoclock();
            cur_speed  = speed;
            if (speed != 100 || ts->state != TS_LIVE)
              ts->state = speed == 0 ? TS_PAUSE : TS_PLAY;
            tvhlog(LOG_DEBUG, "timeshift", "ts %d change speed %d",
                   ts->id, speed);
          }

          /* Send on the message */
          ctrl->sm_code = speed;
          streaming_target_deliver2(ts->output, ctrl);
          ctrl = NULL;

        /* Skip/Seek */
        } else if (ctrl->sm_type == SMT_SKIP) {
          skip = ctrl->sm_data;
          switch (skip->type) {
            case SMT_SKIP_LIVE:
              if (ts->state != TS_LIVE) {

                /* Reset */
                if (ts->full) {
                  pthread_mutex_lock(&ts->rdwr_mutex);
                  timeshift_filemgr_flush(ts, NULL);
                  ts->full = 0;
                  pthread_mutex_unlock(&ts->rdwr_mutex);
                }

                /* Release */
                if (sm)
                  streaming_msg_free(sm);

                /* Find end */
                skip_time = 0x7fffffffffffffffLL;
                // TODO: change this sometime!
              }
              break;

            case SMT_SKIP_ABS_TIME:
              if (ts->pts_delta == 0) {
                tvhlog(LOG_ERR, "timeshift", "ts %d abs skip not possible no PTS delta", ts->id);
                skip = NULL;
                break;
              }
              /* -fallthrough */
            case SMT_SKIP_REL_TIME:

              /* Convert */
              skip_time = ts_rescale(skip->time, 1000000);
              tvhlog(LOG_DEBUG, "timeshift", "ts %d skip %"PRId64" requested %"PRId64, ts->id, skip_time, skip->time);

              /* Live playback (stage1) */
              if (ts->state == TS_LIVE) {
                pthread_mutex_lock(&ts->rdwr_mutex);
                if ((cur_file    = timeshift_filemgr_get(ts, !ts->ondemand))) {
                  cur_file->roff = cur_file->size;
                  last_time      = cur_file->last;
                } else {
                  tvhlog(LOG_ERR, "timeshift", "ts %d failed to get current file", ts->id);
                  skip = NULL;
                }
                pthread_mutex_unlock(&ts->rdwr_mutex);
              }

              /* May have failed */
              if (skip) {
                skip_time += (skip->type == SMT_SKIP_ABS_TIME) ? ts->pts_delta : last_time;
                tvhlog(LOG_DEBUG, "timeshift", "ts %d skip last_time %"PRId64" pts_delta %"PRId64,
                       ts->id, skip_time - ts->pts_delta, ts->pts_delta);

               /* Live (stage2) */
                if (ts->state == TS_LIVE) {
                  if (skip_time >= now) {
                    tvhlog(LOG_DEBUG, "timeshift", "ts %d skip ignored, already live", ts->id);
                    skip = NULL;
                  } else {
                    ts->state = TS_PLAY;
                  }
                }
              }

              /* OK */
              if (skip) {

                /* Adjust time */
                play_time  = now;
                pause_time = skip_time;
                tsi        = NULL;

                /* Clear existing packet */
                if (sm)
                  streaming_msg_free(sm);
                sm = NULL;
              }
              break;
            default:
              tvhlog(LOG_ERR, "timeshift", "ts %d invalid/unsupported skip type: %d", ts->id, skip->type);
              skip = NULL;
              break;
          }

          /* Error */
          if (!skip) {
            ((streaming_skip_t*)ctrl->sm_data)->type = SMT_SKIP_ERROR;
            streaming_target_deliver2(ts->output, ctrl);
            ctrl = NULL;
          }

        /* Ignore */
        } else {
          streaming_msg_free(ctrl);
          ctrl = NULL;
        }
      }
    }

    /* Status message */
    if (now >= (last_status + 1000000)) {
      streaming_message_t *tsm;
      timeshift_status_t *status;
      timeshift_index_iframe_t *fst, *lst;
      status = calloc(1, sizeof(timeshift_status_t));
      fst    = _timeshift_first_frame(ts);
      lst    = _timeshift_last_frame(ts);
      status->full  = ts->full;
      status->shift = ts->state <= TS_LIVE ? 0 : ts_rescale_i(now - last_time, 1000000);
      if (lst && fst && lst != fst && ts->pts_delta != PTS_UNSET) {
        status->pts_start = ts_rescale_i(fst->time - ts->pts_delta, 1000000);
        status->pts_end   = ts_rescale_i(lst->time - ts->pts_delta, 1000000);
      } else {
        status->pts_start = PTS_UNSET;
        status->pts_end   = PTS_UNSET;
      }
      tsm = streaming_msg_create_data(SMT_TIMESHIFT_STATUS, status);
      streaming_target_deliver2(ts->output, tsm);
      last_status = now;
    }

    /* Done */
    if (!run || !cur_file || ((ts->state != TS_PLAY && !skip))) {
      pthread_mutex_unlock(&ts->state_mutex);
      continue;
    }

    /* Calculate delivery time */
    deliver = (now - play_time) + TIMESHIFT_PLAY_BUF;
    deliver = (deliver * cur_speed) / 100;
    deliver = (deliver + pause_time);

    /* Determine next packet */
    if (!sm) {

      /* Rewind or Fast forward (i-frame only) */
      if (skip || keyframe_mode) {
        timeshift_file_t *tsf = NULL;
        int64_t req_time;

        /* Time */
        if (!skip)
          req_time = last_time + ((cur_speed < 0) ? -1 : 1);
        else
          req_time = skip_time;
        tvhlog(LOG_DEBUG, "timeshift", "ts %d skip to %"PRId64" from %"PRId64,
               ts->id, req_time - ts->pts_delta, last_time - ts->pts_delta);

        /* Find */
        pthread_mutex_lock(&ts->rdwr_mutex);
        end = _timeshift_skip(ts, req_time, last_time,
                              cur_file, &tsf, &tsi);
        pthread_mutex_unlock(&ts->rdwr_mutex);
        if (tsi)
          tvhlog(LOG_DEBUG, "timeshift", "ts %d skip found pkt @ %"PRId64,
                 ts->id, tsi->time - ts->pts_delta);

        /* File changed (close) */
        if ((tsf != cur_file) && cur_file && cur_file->rfd >= 0) {
          close(cur_file->rfd);
          cur_file->rfd = -1;
        }

        /* Position */
        if (cur_file)
          cur_file->refcount--;
        if ((cur_file = tsf) != NULL) {
          if (tsi)
            cur_file->roff = tsi->pos;
          else
            cur_file->roff = 0;
        }
      }

      /* Find packet */
      if (_timeshift_read(ts, &cur_file, &sm, &wait) == -1) {
        pthread_mutex_unlock(&ts->state_mutex);
        break;
      }
    }

    /* Send skip response */
    if (skip) {
      if (sm && sm->sm_type == SMT_PACKET) {
        th_pkt_t *pkt = sm->sm_data;
        skip->time = pkt->pkt_pts;
        skip->type = SMT_SKIP_ABS_TIME;
        tvhlog(LOG_DEBUG, "timeshift", "ts %d skip to pts %"PRId64" ok, time %"PRId64,
               ts->id, ts_rescale(skip->time, 1000000), sm->sm_time - ts->pts_delta);
      } else {
        /* Report error */
        skip->type = SMT_SKIP_ERROR;
        skip       = NULL;
        tvhlog(LOG_DEBUG, "timeshift", "ts %d skip failed (%d)", ts->id, sm ? sm->sm_type : -1);
      }
      streaming_target_deliver2(ts->output, ctrl);
      ctrl = NULL;
    }

    /* Deliver */
    if (sm && (skip ||
               (((cur_speed < 0) && (sm->sm_time >= deliver)) ||
               ((cur_speed > 0) && (sm->sm_time <= deliver))))) {

      if (sm->sm_type == SMT_PACKET && tvhtrace_enabled()) {
        th_pkt_t *pkt = sm->sm_data;
        tvhtrace("timeshift",
                 "ts %d pkt out - stream %d type %c pts %10"PRId64
                 " dts %10"PRId64 " dur %10d len %6zu time %14"PRItime_t,
                 ts->id,
                 pkt->pkt_componentindex,
                 pkt_frametype_to_char(pkt->pkt_frametype),
                 ts_rescale(pkt->pkt_pts, 1000000),
                 ts_rescale(pkt->pkt_dts, 1000000),
                 pkt->pkt_duration,
                 pktbuf_len(pkt->pkt_payload), sm->sm_time - ts->pts_delta);
      }
      last_time = sm->sm_time;
      streaming_target_deliver2(ts->output, sm);
      sm        = NULL;
      wait      = 0;
    } else if (sm) {
      if (cur_speed > 0)
        wait = (sm->sm_time - deliver) / 1000;
      else
        wait = (deliver - sm->sm_time) / 1000;
      if (wait == 0) wait = 1;
      tvhtrace("timeshift", "ts %d wait %d",
               ts->id, wait);
    }

    /* Terminate */
    if (!cur_file || end != 0) {
      if (!end)
        end = (cur_speed > 0) ? 1 : -1;

      /* Back to live (unless buffer is full) */
      if (end == 1 && !ts->full) {
        tvhlog(LOG_DEBUG, "timeshift", "ts %d eob revert to live mode", ts->id);
        ts->state = TS_LIVE;
        cur_speed = 100;
        ctrl      = streaming_msg_create_code(SMT_SPEED, cur_speed);
        streaming_target_deliver2(ts->output, ctrl);
        ctrl      = NULL;

        /* Flush timeshift buffer to live */
        if (_timeshift_flush_to_live(ts, &cur_file, &sm, &wait) == -1)
          break;

        /* Close file (if open) */
        if (cur_file && cur_file->rfd >= 0) {
          close(cur_file->rfd);
          cur_file->rfd = -1;
        }

        /* Flush ALL files */
        if (ts->ondemand)
          timeshift_filemgr_flush(ts, NULL);

      /* Pause */
      } else {
        if (cur_speed <= 0) {
          cur_speed = 0;
          ts->state = TS_PAUSE;
        } else {
          cur_speed = 100;
          ts->state = TS_PLAY;
          play_time = now;
        }
        tvhlog(LOG_DEBUG, "timeshift", "ts %d sob speed %d", ts->id, cur_speed);
        pause_time = last_time;
        ctrl       = streaming_msg_create_code(SMT_SPEED, cur_speed);
        streaming_target_deliver2(ts->output, ctrl);
        ctrl       = NULL;
      }

    /* Flush unwanted */
    } else if (ts->ondemand && cur_file) {
      pthread_mutex_lock(&ts->rdwr_mutex);
      timeshift_filemgr_flush(ts, cur_file);
      pthread_mutex_unlock(&ts->rdwr_mutex);
    }

    pthread_mutex_unlock(&ts->state_mutex);
  }

  /* Cleanup */
  tvhpoll_destroy(pd);
  if (cur_file && cur_file->rfd >= 0) {
    close(cur_file->rfd);
    cur_file->rfd = -1;
  }
  if (sm)       streaming_msg_free(sm);
  if (ctrl)     streaming_msg_free(ctrl);
  tvhtrace("timeshift", "ts %d exit reader thread", ts->id);

  return NULL;
}
예제 #10
0
파일: dvr_rec.c 프로젝트: JPP1/tvheadend
static void *
dvr_thread(void *aux)
{
  dvr_entry_t *de = aux;
  dvr_config_t *cfg = de->de_config;
  profile_chain_t *prch = de->de_chain;
  streaming_queue_t *sq = &prch->prch_sq;
  streaming_message_t *sm;
  th_pkt_t *pkt;
  int run = 1;
  int started = 0;
  int comm_skip = cfg->dvr_skip_commercials;
  int commercial = COMMERCIAL_UNKNOWN;

  pthread_mutex_lock(&sq->sq_mutex);

  while(run) {
    sm = TAILQ_FIRST(&sq->sq_queue);
    if(sm == NULL) {
      pthread_cond_wait(&sq->sq_cond, &sq->sq_mutex);
      continue;
    }

    if (de->de_s && started) {
      pktbuf_t *pb = NULL;
      if (sm->sm_type == SMT_PACKET)
        pb = ((th_pkt_t*)sm->sm_data)->pkt_payload;
      else if (sm->sm_type == SMT_MPEGTS)
        pb = sm->sm_data;
      if (pb)
        atomic_add(&de->de_s->ths_bytes_out, pktbuf_len(pb));
    }

    TAILQ_REMOVE(&sq->sq_queue, sm, sm_link);

    pthread_mutex_unlock(&sq->sq_mutex);

    switch(sm->sm_type) {

    case SMT_PACKET:
      pkt = sm->sm_data;
      if(pkt->pkt_commercial == COMMERCIAL_YES)
	dvr_rec_set_state(de, DVR_RS_COMMERCIAL, 0);
      else
	dvr_rec_set_state(de, DVR_RS_RUNNING, 0);

      if(pkt->pkt_commercial == COMMERCIAL_YES && comm_skip)
	break;

      if(commercial != pkt->pkt_commercial)
	muxer_add_marker(prch->prch_muxer);

      commercial = pkt->pkt_commercial;

      if(started) {
	muxer_write_pkt(prch->prch_muxer, sm->sm_type, sm->sm_data);
	sm->sm_data = NULL;
      }
      break;

    case SMT_MPEGTS:
      if(started) {
	dvr_rec_set_state(de, DVR_RS_RUNNING, 0);
	muxer_write_pkt(prch->prch_muxer, sm->sm_type, sm->sm_data);
	sm->sm_data = NULL;
      }
      break;

    case SMT_START:
      if(started &&
	 muxer_reconfigure(prch->prch_muxer, sm->sm_data) < 0) {
	tvhlog(LOG_WARNING,
	       "dvr", "Unable to reconfigure \"%s\"",
	       de->de_filename ?: lang_str_get(de->de_title, NULL));

	// Try to restart the recording if the muxer doesn't
	// support reconfiguration of the streams.
	dvr_thread_epilog(de);
	started = 0;
      }

      if(!started) {
        pthread_mutex_lock(&global_lock);
        dvr_rec_set_state(de, DVR_RS_WAIT_PROGRAM_START, 0);
        if(dvr_rec_start(de, sm->sm_data) == 0) {
          started = 1;
          idnode_changed(&de->de_id);
          htsp_dvr_entry_update(de);
        }
        pthread_mutex_unlock(&global_lock);
      } 
      break;

    case SMT_STOP:
       if(sm->sm_code == SM_CODE_SOURCE_RECONFIGURED) {
	 // Subscription is restarting, wait for SMT_START

       } else if(sm->sm_code == 0) {
	 // Recording is completed

	de->de_last_error = 0;
	tvhlog(LOG_INFO, 
	       "dvr", "Recording completed: \"%s\"",
	       de->de_filename ?: lang_str_get(de->de_title, NULL));

	dvr_thread_epilog(de);
	started = 0;

      }else if(de->de_last_error != sm->sm_code) {
예제 #11
0
/**
 * Add a stream to the muxer
 */
static int
lav_muxer_add_stream(lav_muxer_t *lm, 
		     const streaming_start_component_t *ssc)
{
  AVStream *st;
  AVCodecContext *c;

  st = avformat_new_stream(lm->lm_oc, NULL);
  if (!st)
    return -1;

  st->id = ssc->ssc_index;
  c = st->codec;
  c->codec_id = streaming_component_type2codec_id(ssc->ssc_type);

  switch(lm->m_container) {
  case MC_MATROSKA:
    st->time_base.num = 1000000;
    st->time_base.den = 1;
    break;

  case MC_MPEGPS:
    c->rc_buffer_size = 224*1024*8;
    //Fall-through
  case MC_MPEGTS:
    st->time_base.num = 90000;
    st->time_base.den = 1;
    break;

  default:
    st->time_base = AV_TIME_BASE_Q;
    break;
  }



  if(ssc->ssc_gh) {
    c->extradata_size = pktbuf_len(ssc->ssc_gh);
    c->extradata = av_malloc(c->extradata_size);
    memcpy(c->extradata, pktbuf_ptr(ssc->ssc_gh), 
	   pktbuf_len(ssc->ssc_gh));
  }

  if(SCT_ISAUDIO(ssc->ssc_type)) {
    c->codec_type    = AVMEDIA_TYPE_AUDIO;
    c->sample_fmt    = AV_SAMPLE_FMT_S16;

    c->sample_rate   = sri_to_rate(ssc->ssc_sri);
    c->channels      = ssc->ssc_channels;

    c->time_base.num = 1;
    c->time_base.den = c->sample_rate;

    av_dict_set(&st->metadata, "language", ssc->ssc_lang, 0);

  } else if(SCT_ISVIDEO(ssc->ssc_type)) {
    c->codec_type = AVMEDIA_TYPE_VIDEO;
    c->width      = ssc->ssc_width;
    c->height     = ssc->ssc_height;

    c->time_base.num  = 1;
    c->time_base.den = 25;

    c->sample_aspect_ratio.num = ssc->ssc_aspect_num;
    c->sample_aspect_ratio.den = ssc->ssc_aspect_den;

    st->sample_aspect_ratio.num = c->sample_aspect_ratio.num;
    st->sample_aspect_ratio.den = c->sample_aspect_ratio.den;

  } else if(SCT_ISSUBTITLE(ssc->ssc_type)) {
    c->codec_type = AVMEDIA_TYPE_SUBTITLE;
    av_dict_set(&st->metadata, "language", ssc->ssc_lang, 0);
  }

  if(lm->lm_oc->oformat->flags & AVFMT_GLOBALHEADER)
    c->flags |= CODEC_FLAG_GLOBAL_HEADER;

  return 0;
}
예제 #12
0
/**
 * Write a packet to the muxer
 */
static int
lav_muxer_write_pkt(muxer_t *m, streaming_message_type_t smt, void *data)
{
  int i;
  AVFormatContext *oc;
  AVStream *st;
  AVPacket packet;
  th_pkt_t *pkt = (th_pkt_t*)data;
  lav_muxer_t *lm = (lav_muxer_t*)m;
  int rc = 0;

  assert(smt == SMT_PACKET);

  oc = lm->lm_oc;

  if(!oc->nb_streams) {
    tvhlog(LOG_ERR, "libav", "No streams to mux");
    rc = -1;
    goto ret;
  }

  if(!lm->lm_init) {
    tvhlog(LOG_ERR, "libav", "Muxer not initialized correctly");
    rc = -1;
    goto ret;
  }

  for(i=0; i<oc->nb_streams; i++) {
    st = oc->streams[i];

    if(st->id != pkt->pkt_componentindex)
      continue;

    av_init_packet(&packet);

    if(st->codec->codec_id == CODEC_ID_MPEG2VIDEO)
      pkt = pkt_merge_header(pkt);

    if(lm->lm_h264_filter && st->codec->codec_id == CODEC_ID_H264) {
      if(av_bitstream_filter_filter(lm->lm_h264_filter,
				    st->codec, 
				    NULL, 
				    &packet.data, 
				    &packet.size, 
				    pktbuf_ptr(pkt->pkt_payload), 
				    pktbuf_len(pkt->pkt_payload), 
				    pkt->pkt_frametype < PKT_P_FRAME) < 0) {
	tvhlog(LOG_WARNING, "libav",  "Failed to filter bitstream");
	break;
      }
    } else {
      packet.data = pktbuf_ptr(pkt->pkt_payload);
      packet.size = pktbuf_len(pkt->pkt_payload);
    }

    packet.stream_index = st->index;
 
    packet.pts      = av_rescale_q(pkt->pkt_pts     , mpeg_tc, st->time_base);
    packet.dts      = av_rescale_q(pkt->pkt_dts     , mpeg_tc, st->time_base);
    packet.duration = av_rescale_q(pkt->pkt_duration, mpeg_tc, st->time_base);

    if(pkt->pkt_frametype < PKT_P_FRAME)
      packet.flags |= AV_PKT_FLAG_KEY;

    if((rc = av_interleaved_write_frame(oc, &packet)))
      tvhlog(LOG_WARNING, "libav",  "Failed to write frame");

    // h264_mp4toannexb filter might allocate new data.
    if(packet.data != pktbuf_ptr(pkt->pkt_payload))
      av_free(packet.data);

    break;
  }

 ret:
  lm->m_errors += (rc != 0);
  pkt_ref_dec(pkt);

  return rc;
}
예제 #13
0
/*
 * Receive data
 */
static void timeshift_input
  ( void *opaque, streaming_message_t *sm )
{
  int exit = 0, type = sm->sm_type;
  timeshift_t *ts = opaque;
  th_pkt_t *pkt = sm->sm_data, *pkt2;

  pthread_mutex_lock(&ts->state_mutex);

  /* Control */
  if (type == SMT_SKIP) {
    if (ts->state >= TS_LIVE)
      timeshift_write_skip(ts->rd_pipe.wr, sm->sm_data);
    streaming_msg_free(sm);
  } else if (type == SMT_SPEED) {
    if (ts->state >= TS_LIVE)
      timeshift_write_speed(ts->rd_pipe.wr, sm->sm_code);
    streaming_msg_free(sm);
  }

  else {

    /* Start */
    if (type == SMT_START && ts->state == TS_INIT)
      ts->state = TS_LIVE;

    /* Change PTS/DTS offsets */
    if (ts->packet_mode && ts->start_pts && type == SMT_PACKET) {
      pkt2 = pkt_copy_shallow(pkt);
      pkt_ref_dec(pkt);
      sm->sm_data = pkt = pkt2;
      pkt->pkt_pts += ts->start_pts;
      pkt->pkt_dts += ts->start_pts;
    }

    /* Pass-thru */
    if (ts->state <= TS_LIVE) {
      if (type == SMT_START) {
        if (ts->smt_start)
          streaming_start_unref(ts->smt_start);
        ts->smt_start = sm->sm_data;
        atomic_add(&ts->smt_start->ss_refcount, 1);
        if (ts->packet_mode) {
          timeshift_packet_flush(ts, ts->last_time + MAX_TIME_DELTA + 1000, ts->dobuf);
          if (ts->last_time)
            ts->start_pts = ts->last_time + 1000;
        }
      }
      streaming_target_deliver2(ts->output, streaming_msg_clone(sm));
    }

    /* Check for exit */
    if (type == SMT_EXIT ||
        (type == SMT_STOP && sm->sm_code != SM_CODE_SOURCE_RECONFIGURED))
      exit = 1;

    if (type == SMT_MPEGTS)
      ts->packet_mode = 0;

    /* Buffer to disk */
    if ((ts->state > TS_LIVE) || (ts->dobuf && (ts->state == TS_LIVE))) {
      if (ts->packet_mode) {
        sm->sm_time = ts->last_time;
        if (type == SMT_PACKET) {
          timeshift_packet(ts, pkt, 1);
          goto msg_free;
        }
      } else {
        if (ts->ref_time == 0) {
          ts->ref_time = getmonoclock();
          sm->sm_time = 0;
        } else {
          sm->sm_time = getmonoclock() - ts->ref_time;
        }
      }
      streaming_target_deliver2(&ts->wr_queue.sq_st, sm);
    } else {
      if (type == SMT_PACKET) {
        timeshift_packet(ts, pkt, 0);
        tvhtrace("timeshift",
                 "ts %d pkt in  - stream %d type %c pts %10"PRId64
                 " dts %10"PRId64" dur %10d len %6zu",
                 ts->id,
                 pkt->pkt_componentindex,
                 pkt_frametype_to_char(pkt->pkt_frametype),
                 ts_rescale(pkt->pkt_pts, 1000000),
                 ts_rescale(pkt->pkt_dts, 1000000),
                 pkt->pkt_duration,
                 pktbuf_len(pkt->pkt_payload));
      }
msg_free:
      streaming_msg_free(sm);
    }

    /* Exit/Stop */
    if (exit) {
      timeshift_write_exit(ts->rd_pipe.wr);
      ts->state = TS_EXIT;
    }
  }

  pthread_mutex_unlock(&ts->state_mutex);
}
예제 #14
0
파일: tsfix.c 프로젝트: JPP1/tvheadend
static void
normalize_ts(tsfix_t *tf, tfstream_t *tfs, th_pkt_t *pkt)
{
  int64_t ref, dts, d;

  if(tf->tf_tsref == PTS_UNSET) {
    pkt_ref_dec(pkt);
    return;
  }

  pkt->pkt_dts &= PTS_MASK;
  pkt->pkt_pts &= PTS_MASK;

  /* Subtract the transport wide start offset */
  ref = tfs->tfs_local_ref != PTS_UNSET ? tfs->tfs_local_ref : tf->tf_tsref;
  dts = pkt->pkt_dts - ref;

  if(tfs->tfs_last_dts_norm == PTS_UNSET) {
    if(dts < 0) {
      /* Early packet with negative time stamp, drop those */
      pkt_ref_dec(pkt);
      return;
    }
  } else {
    int64_t low   =  90000; /* one second */
    int64_t upper = 180000; /* two seconds */
    d = dts + tfs->tfs_dts_epoch - tfs->tfs_last_dts_norm;

    if (SCT_ISSUBTITLE(tfs->tfs_type)) {
      /*
       * special conditions for subtitles, because they may be broadcasted
       * with large time gaps
       */
      low   = PTS_MASK / 2; /* more than 13 hours */
      upper = low - 1;
    }

    if (d < 0 || d > low) {

      if(d < -PTS_MASK || d > -PTS_MASK + upper) {

	tfs->tfs_bad_dts++;

	if(tfs->tfs_bad_dts < 5) {
	  tvhlog(LOG_ERR, "parser",
		 "transport stream %s, DTS discontinuity. "
		 "DTS = %" PRId64 ", last = %" PRId64,
		 streaming_component_type2txt(tfs->tfs_type),
		 dts, tfs->tfs_last_dts_norm);
	}
      } else {
	/* DTS wrapped, increase upper bits */
	tfs->tfs_dts_epoch += PTS_MASK + 1;
	tfs->tfs_bad_dts = 0;
      }
    } else {
      tfs->tfs_bad_dts = 0;
    }
  }

  dts += tfs->tfs_dts_epoch;
  tfs->tfs_last_dts_norm = dts;

  if(pkt->pkt_pts != PTS_UNSET) {
    /* Compute delta between PTS and DTS (and watch out for 33 bit wrap) */
    d = (pkt->pkt_pts - pkt->pkt_dts) & PTS_MASK;
    pkt->pkt_pts = dts + d;
  }

  pkt->pkt_dts = dts;

  tsfixprintf("TSFIX: %-12s %d %10"PRId64" %10"PRId64" %10d %zd\n",
	      streaming_component_type2txt(tfs->tfs_type),
	      pkt->pkt_frametype,
	      pkt->pkt_dts,
	      pkt->pkt_pts,
	      pkt->pkt_duration,
	      pktbuf_len(pkt->pkt_payload));

  streaming_message_t *sm = streaming_msg_create_pkt(pkt);
  streaming_target_deliver2(tf->tf_output, sm);
  pkt_ref_dec(pkt);
}
예제 #15
0
/**
 * Write a packet to the muxer
 */
static int
lav_muxer_write_pkt(muxer_t *m, streaming_message_type_t smt, void *data)
{
  int i;
  AVFormatContext *oc;
  AVStream *st;
  AVPacket packet;
  th_pkt_t *pkt = (th_pkt_t*)data, *opkt;
  lav_muxer_t *lm = (lav_muxer_t*)m;
  unsigned char *tofree;
  int rc = 0;

  assert(smt == SMT_PACKET);

  oc = lm->lm_oc;

  if(!oc->nb_streams) {
    tvhlog(LOG_ERR, "libav", "No streams to mux");
    rc = -1;
    goto ret;
  }

  if(!lm->lm_init) {
    tvhlog(LOG_ERR, "libav", "Muxer not initialized correctly");
    rc = -1;
    goto ret;
  }

  for(i=0; i<oc->nb_streams; i++) {
    st = oc->streams[i];

    if(st->id != pkt->pkt_componentindex)
      continue;
    if(pkt->pkt_payload == NULL)
      continue;

    tofree = NULL;
    av_init_packet(&packet);

    if((lm->lm_h264_filter && st->codec->codec_id == AV_CODEC_ID_H264) ||
       (lm->lm_hevc_filter && st->codec->codec_id == AV_CODEC_ID_HEVC)) {
      pkt = avc_convert_pkt(opkt = pkt);
      pkt_ref_dec(opkt);
      if(av_bitstream_filter_filter(st->codec->codec_id == AV_CODEC_ID_H264 ?
                                      lm->lm_h264_filter : lm->lm_hevc_filter,
				    st->codec, 
				    NULL, 
				    &packet.data, 
				    &packet.size, 
				    pktbuf_ptr(pkt->pkt_payload), 
				    pktbuf_len(pkt->pkt_payload), 
				    pkt->pkt_frametype < PKT_P_FRAME) < 0) {
	tvhlog(LOG_WARNING, "libav",  "Failed to filter bitstream");
	if (packet.data != pktbuf_ptr(pkt->pkt_payload))
	  av_free(packet.data);
	break;
      } else {
        tofree = packet.data;
      }
    } else if (st->codec->codec_id == AV_CODEC_ID_AAC) {
      /* remove ADTS header */
      packet.data = pktbuf_ptr(pkt->pkt_payload) + 7;
      packet.size = pktbuf_len(pkt->pkt_payload) - 7;
    } else {
      packet.data = pktbuf_ptr(pkt->pkt_payload);
      packet.size = pktbuf_len(pkt->pkt_payload);
    }

    packet.stream_index = st->index;
 
    packet.pts      = av_rescale_q(pkt->pkt_pts     , mpeg_tc, st->time_base);
    packet.dts      = av_rescale_q(pkt->pkt_dts     , mpeg_tc, st->time_base);
    packet.duration = av_rescale_q(pkt->pkt_duration, mpeg_tc, st->time_base);

    if(pkt->pkt_frametype < PKT_P_FRAME)
      packet.flags |= AV_PKT_FLAG_KEY;

    if((rc = av_interleaved_write_frame(oc, &packet)))
      tvhlog(LOG_WARNING, "libav",  "Failed to write frame");

    if(tofree && tofree != pktbuf_ptr(pkt->pkt_payload))
      av_free(tofree);

    break;
  }

 ret:
  lm->m_errors += (rc != 0);
  pkt_ref_dec(pkt);

  return rc;
}
예제 #16
0
/**
 * Add a stream to the muxer
 */
static int
lav_muxer_add_stream(lav_muxer_t *lm, 
		     const streaming_start_component_t *ssc)
{
  AVStream *st;
  AVCodecContext *c;

  st = avformat_new_stream(lm->lm_oc, NULL);
  if (!st)
    return -1;

  st->id = ssc->ssc_index;
  c = st->codec;
  c->codec_id = streaming_component_type2codec_id(ssc->ssc_type);

  switch(lm->m_config.m_type) {
  case MC_MATROSKA:
  case MC_AVMATROSKA:
  case MC_AVMP4:
    st->time_base.num = 1000000;
    st->time_base.den = 1;
    break;

  case MC_MPEGPS:
    c->rc_buffer_size = 224*1024*8;
    //Fall-through
  case MC_MPEGTS:
    st->time_base.num = 90000;
    st->time_base.den = 1;
    break;

  default:
    st->time_base = AV_TIME_BASE_Q;
    break;
  }

  if(ssc->ssc_gh) {
    if (ssc->ssc_type == SCT_H264 || ssc->ssc_type == SCT_HEVC) {
      sbuf_t hdr;
      sbuf_init(&hdr);
      if (ssc->ssc_type == SCT_H264) {
          isom_write_avcc(&hdr, pktbuf_ptr(ssc->ssc_gh),
                          pktbuf_len(ssc->ssc_gh));
      } else {
          isom_write_hvcc(&hdr, pktbuf_ptr(ssc->ssc_gh),
                          pktbuf_len(ssc->ssc_gh));
      }
      c->extradata_size = hdr.sb_ptr;
      c->extradata = av_malloc(hdr.sb_ptr);
      memcpy(c->extradata, hdr.sb_data, hdr.sb_ptr);
      sbuf_free(&hdr);
    } else {
      c->extradata_size = pktbuf_len(ssc->ssc_gh);
      c->extradata = av_malloc(c->extradata_size);
      memcpy(c->extradata, pktbuf_ptr(ssc->ssc_gh),
             pktbuf_len(ssc->ssc_gh));
    }
  }

  if(SCT_ISAUDIO(ssc->ssc_type)) {
    c->codec_type    = AVMEDIA_TYPE_AUDIO;
    c->sample_fmt    = AV_SAMPLE_FMT_S16;

    c->sample_rate   = sri_to_rate(ssc->ssc_sri);
    c->channels      = ssc->ssc_channels;

#if 0
    c->time_base.num = 1;
    c->time_base.den = c->sample_rate;
#else
    c->time_base     = st->time_base;
#endif

    av_dict_set(&st->metadata, "language", ssc->ssc_lang, 0);

  } else if(SCT_ISVIDEO(ssc->ssc_type)) {
    c->codec_type = AVMEDIA_TYPE_VIDEO;
    c->width      = ssc->ssc_width;
    c->height     = ssc->ssc_height;

    c->time_base.num = 1;
    c->time_base.den = 25;

    c->sample_aspect_ratio.num = ssc->ssc_aspect_num;
    c->sample_aspect_ratio.den = ssc->ssc_aspect_den;

    if (lm->m_config.m_type == MC_AVMP4) {
      /* this is a whole hell */
      AVRational ratio = { c->height, c->width };
      c->sample_aspect_ratio = av_mul_q(c->sample_aspect_ratio, ratio);
    }

    st->sample_aspect_ratio.num = c->sample_aspect_ratio.num;
    st->sample_aspect_ratio.den = c->sample_aspect_ratio.den;

  } else if(SCT_ISSUBTITLE(ssc->ssc_type)) {
    c->codec_type = AVMEDIA_TYPE_SUBTITLE;
    av_dict_set(&st->metadata, "language", ssc->ssc_lang, 0);
  }

  if(lm->lm_oc->oformat->flags & AVFMT_GLOBALHEADER)
    c->flags |= CODEC_FLAG_GLOBAL_HEADER;

  return 0;
}