示例#1
0
static void
tsfix_input_packet(tsfix_t *tf, streaming_message_t *sm)
{
  th_pkt_t *pkt = pkt_copy_shallow(sm->sm_data);
  tfstream_t *tfs = tfs_find(tf, pkt);
  streaming_msg_free(sm);
  
  if(tfs == NULL || dispatch_clock < tf->tf_start_time) {
    pkt_ref_dec(pkt);
    return;
  }

  if(tf->tf_tsref == PTS_UNSET &&
     (!tf->tf_hasvideo ||
      (SCT_ISVIDEO(tfs->tfs_type) && pkt->pkt_frametype == PKT_I_FRAME))) {
      tf->tf_tsref = pkt->pkt_dts & PTS_MASK;
      tsfixprintf("reference clock set to %"PRId64"\n", tf->tf_tsref);
  }

  if(pkt->pkt_dts == PTS_UNSET) {

    int pdur = pkt->pkt_duration >> pkt->pkt_field;

    if(tfs->tfs_last_dts_in == PTS_UNSET) {
      pkt_ref_dec(pkt);
      return;
    }

    pkt->pkt_dts = (tfs->tfs_last_dts_in + pdur) & PTS_MASK;

    tsfixprintf("TSFIX: %-12s DTS set to last %"PRId64" +%d == %"PRId64"\n",
		streaming_component_type2txt(tfs->tfs_type),
		tfs->tfs_last_dts_in, pdur, pkt->pkt_dts);
  }
示例#2
0
文件: avc.c 项目: wmyrda/tvheadend
th_pkt_t *
avc_convert_pkt(th_pkt_t *src)
{
  th_pkt_t *pkt = malloc(sizeof(th_pkt_t));
  *pkt = *src;
  pkt->pkt_refcount = 1;
  pkt->pkt_header = NULL;
  pkt->pkt_payload = NULL;

  if (src->pkt_header) {
    sbuf_t headers;
    sbuf_init(&headers);

    isom_write_avcc(&headers, pktbuf_ptr(src->pkt_header),
		    pktbuf_len(src->pkt_header));
    pkt->pkt_header = pktbuf_make(headers.sb_data, headers.sb_ptr);
  }

  sbuf_t payload;
  sbuf_init(&payload);

  if(src->pkt_header)
    avc_parse_nal_units(&payload, pktbuf_ptr(src->pkt_header),
			pktbuf_len(src->pkt_header));

  avc_parse_nal_units(&payload, pktbuf_ptr(src->pkt_payload),
		      pktbuf_len(src->pkt_payload));

  pkt->pkt_payload = pktbuf_make(payload.sb_data, payload.sb_ptr);
  pkt_ref_dec(src);
  return pkt;
}
示例#3
0
void
pktref_remove(struct th_pktref_queue *q, th_pktref_t *pr)
{
  TAILQ_REMOVE(q, pr, pr_link);
  pkt_ref_dec(pr->pr_pkt);
  free(pr);
}
示例#4
0
文件: tsfix.c 项目: JPP1/tvheadend
static void
tsfix_input_packet(tsfix_t *tf, streaming_message_t *sm)
{
  th_pkt_t *pkt = pkt_copy_shallow(sm->sm_data);
  tfstream_t *tfs = tfs_find(tf, pkt);
  streaming_msg_free(sm);
  
  if(tfs == NULL || dispatch_clock < tf->tf_start_time) {
    pkt_ref_dec(pkt);
    return;
  }

  if(tf->tf_tsref == PTS_UNSET &&
     (!tf->tf_hasvideo ||
      (SCT_ISVIDEO(tfs->tfs_type) && pkt->pkt_frametype == PKT_I_FRAME))) {
      tf->tf_tsref = pkt->pkt_dts & PTS_MASK;
      tsfixprintf("reference clock set to %"PRId64"\n", tf->tf_tsref);
  } else {
    /* For teletext, the encoders might use completely different timestamps */
    /* If the difference is greater than 2 seconds, use the actual dts value */
    if (tfs->tfs_type == SCT_TELETEXT && tfs->tfs_local_ref == PTS_UNSET &&
        tf->tf_tsref != PTS_UNSET && pkt->pkt_dts != PTS_UNSET) {
      int64_t diff = tsfix_ts_diff(tf->tf_tsref, pkt->pkt_dts);
      if (diff > 2 * 90000) {
        tfstream_t *tfs2;
        tvhwarn("parser", "The timediff for TELETEXT is big (%"PRId64"), using current dts", diff);
        tfs->tfs_local_ref = pkt->pkt_dts;
        /* Text subtitles extracted from teletext have same timebase */
        LIST_FOREACH(tfs2, &tf->tf_streams, tfs_link)
          if(tfs2->tfs_type == SCT_TEXTSUB)
            tfs2->tfs_local_ref = pkt->pkt_dts;
      } else {
示例#5
0
/*
 * Receive data
 */
static void timeshift_input
  ( void *opaque, streaming_message_t *sm )
{
  int type = sm->sm_type;
  timeshift_t *ts = opaque;
  th_pkt_t *pkt, *pkt2;

  if (ts->exit)
    return;

  /* Control */
  if (type == SMT_SKIP) {
    timeshift_write_skip(ts->rd_pipe.wr, sm->sm_data);
    streaming_msg_free(sm);
  } else if (type == SMT_SPEED) {
    timeshift_write_speed(ts->rd_pipe.wr, sm->sm_code);
    streaming_msg_free(sm);
  } else {

    /* Change PTS/DTS offsets */
    if (ts->packet_mode && ts->start_pts && type == SMT_PACKET) {
      pkt = sm->sm_data;
      pkt2 = pkt_copy_shallow(pkt);
      pkt_ref_dec(pkt);
      sm->sm_data = pkt2;
      pkt2->pkt_pts += ts->start_pts;
      pkt2->pkt_dts += ts->start_pts;
    }

    /* Check for exit */
    else if (type == SMT_EXIT ||
        (type == SMT_STOP && sm->sm_code != SM_CODE_SOURCE_RECONFIGURED))
      ts->exit = 1;

    else if (type == SMT_MPEGTS)
      ts->packet_mode = 0;

    /* Send to the writer thread */
    if (ts->packet_mode) {
      sm->sm_time = ts->last_wr_time;
      if ((type == SMT_PACKET) && !timeshift_packet(ts, sm))
        goto _exit;
    } else {
      if (ts->ref_time == 0) {
        ts->ref_time = getfastmonoclock();
        sm->sm_time = 0;
      } else {
        sm->sm_time = getfastmonoclock() - ts->ref_time;
      }
    }
    streaming_target_deliver2(&ts->wr_queue.sq_st, sm);

    /* Exit/Stop */
_exit:
    if (ts->exit)
      timeshift_write_exit(ts->rd_pipe.wr);
  }
}
示例#6
0
void
pktref_remove(struct th_pktref_queue *q, th_pktref_t *pr)
{
  if (pr) {
    if (q)
      TAILQ_REMOVE(q, pr, pr_link);
    pkt_ref_dec(pr->pr_pkt);
    free(pr);
    memoryinfo_free(&pktref_memoryinfo, sizeof(*pr));
  }
}
示例#7
0
void
pktref_clear_queue(struct th_pktref_queue *q)
{
  th_pktref_t *pr;

  while((pr = TAILQ_FIRST(q)) != NULL) {
    TAILQ_REMOVE(q, pr, pr_link);
    pkt_ref_dec(pr->pr_pkt);
    free(pr);
  }
}
示例#8
0
void
pktref_clear_queue(struct th_pktref_queue *q)
{
  th_pktref_t *pr;

  if (q) {
    while((pr = TAILQ_FIRST(q)) != NULL) {
      TAILQ_REMOVE(q, pr, pr_link);
      pkt_ref_dec(pr->pr_pkt);
      free(pr);
      memoryinfo_free(&pktref_memoryinfo, sizeof(*pr));
    }
  }
}
示例#9
0
void
streaming_msg_free(streaming_message_t *sm)
{
  if (!sm)
    return;

  switch(sm->sm_type) {
  case SMT_PACKET:
    if(sm->sm_data)
      pkt_ref_dec(sm->sm_data);
    break;

  case SMT_START:
    if(sm->sm_data)
      streaming_start_unref(sm->sm_data);
    break;

  case SMT_GRACE:
  case SMT_STOP:
  case SMT_EXIT:
  case SMT_SERVICE_STATUS:
  case SMT_NOSTART:
  case SMT_NOSTART_WARN:
  case SMT_SPEED:
    break;

  case SMT_SKIP:
  case SMT_SIGNAL_STATUS:
  case SMT_DESCRAMBLE_INFO:
#if ENABLE_TIMESHIFT
  case SMT_TIMESHIFT_STATUS:
#endif
    free(sm->sm_data);
    break;

  case SMT_MPEGTS:
    if(sm->sm_data)
      pktbuf_ref_dec(sm->sm_data);
    break;

  default:
    abort();
  }
  memoryinfo_free(&streaming_msg_memoryinfo, sizeof(*sm));
  free(sm);
}
示例#10
0
void
streaming_msg_free(streaming_message_t *sm)
{
  switch(sm->sm_type) {
  case SMT_PACKET:
    if(sm->sm_data)
      pkt_ref_dec(sm->sm_data);
    break;

  case SMT_START:
    if(sm->sm_data)
      streaming_start_unref(sm->sm_data);
    break;

  case SMT_STOP:
  case SMT_EXIT:
  case SMT_SERVICE_STATUS:
  case SMT_NOSTART:
  case SMT_SPEED:
    break;

  case SMT_SKIP:
  case SMT_SIGNAL_STATUS:
#if ENABLE_TIMESHIFT
  case SMT_TIMESHIFT_STATUS:
#endif
    free(sm->sm_data);
    break;

  case SMT_MPEGTS:
    if(sm->sm_data)
      pktbuf_ref_dec(sm->sm_data);
    break;

  default:
    abort();
  }
  free(sm);
}
示例#11
0
static void
normalize_ts(tsfix_t *tf, tfstream_t *tfs, th_pkt_t *pkt)
{
  int64_t dts, d;

  int checkts = SCT_ISAUDIO(tfs->tfs_type) || SCT_ISVIDEO(tfs->tfs_type);

  if(tf->tf_tsref == PTS_UNSET) {
    pkt_ref_dec(pkt);
    return;
  }

  pkt->pkt_dts &= PTS_MASK;
  pkt->pkt_pts &= PTS_MASK;

  /* Subtract the transport wide start offset */
  dts = pkt->pkt_dts - tf->tf_tsref;

  if(tfs->tfs_last_dts_norm == PTS_UNSET) {
    if(dts < 0) {
      /* Early packet with negative time stamp, drop those */
      pkt_ref_dec(pkt);
      return;
    }
  } else if(checkts) {
    d = dts + tfs->tfs_dts_epoch - tfs->tfs_last_dts_norm;

    if(d < 0 || d > 90000) {

      if(d < -PTS_MASK || d > -PTS_MASK + 180000) {

	tfs->tfs_bad_dts++;

	if(tfs->tfs_bad_dts < 5) {
	  tvhlog(LOG_ERR, "parser", 
		 "transport stream %s, DTS discontinuity. "
		 "DTS = %" PRId64 ", last = %" PRId64,
		 streaming_component_type2txt(tfs->tfs_type),
		 dts, tfs->tfs_last_dts_norm);
	}
      } else {
	/* DTS wrapped, increase upper bits */
	tfs->tfs_dts_epoch += PTS_MASK + 1;
	tfs->tfs_bad_dts = 0;
      }
    } else {
      tfs->tfs_bad_dts = 0;
    }
  }

  dts += tfs->tfs_dts_epoch;
  tfs->tfs_last_dts_norm = dts;

  if(pkt->pkt_pts != PTS_UNSET) {
    /* Compute delta between PTS and DTS (and watch out for 33 bit wrap) */
    int64_t ptsoff = (pkt->pkt_pts - pkt->pkt_dts) & PTS_MASK;
    
    pkt->pkt_pts = dts + ptsoff;
  }

  pkt->pkt_dts = dts;

  tsfixprintf("TSFIX: %-12s %d %10"PRId64" %10"PRId64" %10d %zd\n",
	      streaming_component_type2txt(tfs->tfs_type),
	      pkt->pkt_frametype,
	      pkt->pkt_dts,
	      pkt->pkt_pts,
	      pkt->pkt_duration,
	      pktbuf_len(pkt->pkt_payload));

  streaming_message_t *sm = streaming_msg_create_pkt(pkt);
  streaming_target_deliver2(tf->tf_output, sm);
  pkt_ref_dec(pkt);
}
示例#12
0
/**
 * Write a packet to the muxer
 */
static int
lav_muxer_write_pkt(muxer_t *m, streaming_message_type_t smt, void *data)
{
  int i;
  AVFormatContext *oc;
  AVStream *st;
  AVPacket packet;
  th_pkt_t *pkt = (th_pkt_t*)data;
  lav_muxer_t *lm = (lav_muxer_t*)m;
  int rc = 0;

  assert(smt == SMT_PACKET);

  oc = lm->lm_oc;

  if(!oc->nb_streams) {
    tvhlog(LOG_ERR, "libav", "No streams to mux");
    rc = -1;
    goto ret;
  }

  if(!lm->lm_init) {
    tvhlog(LOG_ERR, "libav", "Muxer not initialized correctly");
    rc = -1;
    goto ret;
  }

  for(i=0; i<oc->nb_streams; i++) {
    st = oc->streams[i];

    if(st->id != pkt->pkt_componentindex)
      continue;

    av_init_packet(&packet);

    if(st->codec->codec_id == CODEC_ID_MPEG2VIDEO)
      pkt = pkt_merge_header(pkt);

    if(lm->lm_h264_filter && st->codec->codec_id == CODEC_ID_H264) {
      if(av_bitstream_filter_filter(lm->lm_h264_filter,
				    st->codec, 
				    NULL, 
				    &packet.data, 
				    &packet.size, 
				    pktbuf_ptr(pkt->pkt_payload), 
				    pktbuf_len(pkt->pkt_payload), 
				    pkt->pkt_frametype < PKT_P_FRAME) < 0) {
	tvhlog(LOG_WARNING, "libav",  "Failed to filter bitstream");
	break;
      }
    } else {
      packet.data = pktbuf_ptr(pkt->pkt_payload);
      packet.size = pktbuf_len(pkt->pkt_payload);
    }

    packet.stream_index = st->index;
 
    packet.pts      = av_rescale_q(pkt->pkt_pts     , mpeg_tc, st->time_base);
    packet.dts      = av_rescale_q(pkt->pkt_dts     , mpeg_tc, st->time_base);
    packet.duration = av_rescale_q(pkt->pkt_duration, mpeg_tc, st->time_base);

    if(pkt->pkt_frametype < PKT_P_FRAME)
      packet.flags |= AV_PKT_FLAG_KEY;

    if((rc = av_interleaved_write_frame(oc, &packet)))
      tvhlog(LOG_WARNING, "libav",  "Failed to write frame");

    // h264_mp4toannexb filter might allocate new data.
    if(packet.data != pktbuf_ptr(pkt->pkt_payload))
      av_free(packet.data);

    break;
  }

 ret:
  lm->m_errors += (rc != 0);
  pkt_ref_dec(pkt);

  return rc;
}
示例#13
0
/*
 * Receive data
 */
static void timeshift_input
  ( void *opaque, streaming_message_t *sm )
{
  int exit = 0, type = sm->sm_type;
  timeshift_t *ts = opaque;
  th_pkt_t *pkt = sm->sm_data, *pkt2;

  pthread_mutex_lock(&ts->state_mutex);

  /* Control */
  if (type == SMT_SKIP) {
    if (ts->state >= TS_LIVE)
      timeshift_write_skip(ts->rd_pipe.wr, sm->sm_data);
    streaming_msg_free(sm);
  } else if (type == SMT_SPEED) {
    if (ts->state >= TS_LIVE)
      timeshift_write_speed(ts->rd_pipe.wr, sm->sm_code);
    streaming_msg_free(sm);
  }

  else {

    /* Start */
    if (type == SMT_START && ts->state == TS_INIT)
      ts->state = TS_LIVE;

    /* Change PTS/DTS offsets */
    if (ts->packet_mode && ts->start_pts && type == SMT_PACKET) {
      pkt2 = pkt_copy_shallow(pkt);
      pkt_ref_dec(pkt);
      sm->sm_data = pkt = pkt2;
      pkt->pkt_pts += ts->start_pts;
      pkt->pkt_dts += ts->start_pts;
    }

    /* Pass-thru */
    if (ts->state <= TS_LIVE) {
      if (type == SMT_START) {
        if (ts->smt_start)
          streaming_start_unref(ts->smt_start);
        ts->smt_start = sm->sm_data;
        atomic_add(&ts->smt_start->ss_refcount, 1);
        if (ts->packet_mode) {
          timeshift_packet_flush(ts, ts->last_time + MAX_TIME_DELTA + 1000, ts->dobuf);
          if (ts->last_time)
            ts->start_pts = ts->last_time + 1000;
        }
      }
      streaming_target_deliver2(ts->output, streaming_msg_clone(sm));
    }

    /* Check for exit */
    if (type == SMT_EXIT ||
        (type == SMT_STOP && sm->sm_code != SM_CODE_SOURCE_RECONFIGURED))
      exit = 1;

    if (type == SMT_MPEGTS)
      ts->packet_mode = 0;

    /* Buffer to disk */
    if ((ts->state > TS_LIVE) || (ts->dobuf && (ts->state == TS_LIVE))) {
      if (ts->packet_mode) {
        sm->sm_time = ts->last_time;
        if (type == SMT_PACKET) {
          timeshift_packet(ts, pkt, 1);
          goto msg_free;
        }
      } else {
        if (ts->ref_time == 0) {
          ts->ref_time = getmonoclock();
          sm->sm_time = 0;
        } else {
          sm->sm_time = getmonoclock() - ts->ref_time;
        }
      }
      streaming_target_deliver2(&ts->wr_queue.sq_st, sm);
    } else {
      if (type == SMT_PACKET) {
        timeshift_packet(ts, pkt, 0);
        tvhtrace("timeshift",
                 "ts %d pkt in  - stream %d type %c pts %10"PRId64
                 " dts %10"PRId64" dur %10d len %6zu",
                 ts->id,
                 pkt->pkt_componentindex,
                 pkt_frametype_to_char(pkt->pkt_frametype),
                 ts_rescale(pkt->pkt_pts, 1000000),
                 ts_rescale(pkt->pkt_dts, 1000000),
                 pkt->pkt_duration,
                 pktbuf_len(pkt->pkt_payload));
      }
msg_free:
      streaming_msg_free(sm);
    }

    /* Exit/Stop */
    if (exit) {
      timeshift_write_exit(ts->rd_pipe.wr);
      ts->state = TS_EXIT;
    }
  }

  pthread_mutex_unlock(&ts->state_mutex);
}
示例#14
0
文件: tsfix.c 项目: JPP1/tvheadend
static void
normalize_ts(tsfix_t *tf, tfstream_t *tfs, th_pkt_t *pkt)
{
  int64_t ref, dts, d;

  if(tf->tf_tsref == PTS_UNSET) {
    pkt_ref_dec(pkt);
    return;
  }

  pkt->pkt_dts &= PTS_MASK;
  pkt->pkt_pts &= PTS_MASK;

  /* Subtract the transport wide start offset */
  ref = tfs->tfs_local_ref != PTS_UNSET ? tfs->tfs_local_ref : tf->tf_tsref;
  dts = pkt->pkt_dts - ref;

  if(tfs->tfs_last_dts_norm == PTS_UNSET) {
    if(dts < 0) {
      /* Early packet with negative time stamp, drop those */
      pkt_ref_dec(pkt);
      return;
    }
  } else {
    int64_t low   =  90000; /* one second */
    int64_t upper = 180000; /* two seconds */
    d = dts + tfs->tfs_dts_epoch - tfs->tfs_last_dts_norm;

    if (SCT_ISSUBTITLE(tfs->tfs_type)) {
      /*
       * special conditions for subtitles, because they may be broadcasted
       * with large time gaps
       */
      low   = PTS_MASK / 2; /* more than 13 hours */
      upper = low - 1;
    }

    if (d < 0 || d > low) {

      if(d < -PTS_MASK || d > -PTS_MASK + upper) {

	tfs->tfs_bad_dts++;

	if(tfs->tfs_bad_dts < 5) {
	  tvhlog(LOG_ERR, "parser",
		 "transport stream %s, DTS discontinuity. "
		 "DTS = %" PRId64 ", last = %" PRId64,
		 streaming_component_type2txt(tfs->tfs_type),
		 dts, tfs->tfs_last_dts_norm);
	}
      } else {
	/* DTS wrapped, increase upper bits */
	tfs->tfs_dts_epoch += PTS_MASK + 1;
	tfs->tfs_bad_dts = 0;
      }
    } else {
      tfs->tfs_bad_dts = 0;
    }
  }

  dts += tfs->tfs_dts_epoch;
  tfs->tfs_last_dts_norm = dts;

  if(pkt->pkt_pts != PTS_UNSET) {
    /* Compute delta between PTS and DTS (and watch out for 33 bit wrap) */
    d = (pkt->pkt_pts - pkt->pkt_dts) & PTS_MASK;
    pkt->pkt_pts = dts + d;
  }

  pkt->pkt_dts = dts;

  tsfixprintf("TSFIX: %-12s %d %10"PRId64" %10"PRId64" %10d %zd\n",
	      streaming_component_type2txt(tfs->tfs_type),
	      pkt->pkt_frametype,
	      pkt->pkt_dts,
	      pkt->pkt_pts,
	      pkt->pkt_duration,
	      pktbuf_len(pkt->pkt_payload));

  streaming_message_t *sm = streaming_msg_create_pkt(pkt);
  streaming_target_deliver2(tf->tf_output, sm);
  pkt_ref_dec(pkt);
}
示例#15
0
/**
 * Write a packet to the muxer
 */
static int
lav_muxer_write_pkt(muxer_t *m, streaming_message_type_t smt, void *data)
{
  int i;
  AVFormatContext *oc;
  AVStream *st;
  AVPacket packet;
  th_pkt_t *pkt = (th_pkt_t*)data, *opkt;
  lav_muxer_t *lm = (lav_muxer_t*)m;
  unsigned char *tofree;
  int rc = 0;

  assert(smt == SMT_PACKET);

  oc = lm->lm_oc;

  if(!oc->nb_streams) {
    tvhlog(LOG_ERR, "libav", "No streams to mux");
    rc = -1;
    goto ret;
  }

  if(!lm->lm_init) {
    tvhlog(LOG_ERR, "libav", "Muxer not initialized correctly");
    rc = -1;
    goto ret;
  }

  for(i=0; i<oc->nb_streams; i++) {
    st = oc->streams[i];

    if(st->id != pkt->pkt_componentindex)
      continue;
    if(pkt->pkt_payload == NULL)
      continue;

    tofree = NULL;
    av_init_packet(&packet);

    if((lm->lm_h264_filter && st->codec->codec_id == AV_CODEC_ID_H264) ||
       (lm->lm_hevc_filter && st->codec->codec_id == AV_CODEC_ID_HEVC)) {
      pkt = avc_convert_pkt(opkt = pkt);
      pkt_ref_dec(opkt);
      if(av_bitstream_filter_filter(st->codec->codec_id == AV_CODEC_ID_H264 ?
                                      lm->lm_h264_filter : lm->lm_hevc_filter,
				    st->codec, 
				    NULL, 
				    &packet.data, 
				    &packet.size, 
				    pktbuf_ptr(pkt->pkt_payload), 
				    pktbuf_len(pkt->pkt_payload), 
				    pkt->pkt_frametype < PKT_P_FRAME) < 0) {
	tvhlog(LOG_WARNING, "libav",  "Failed to filter bitstream");
	if (packet.data != pktbuf_ptr(pkt->pkt_payload))
	  av_free(packet.data);
	break;
      } else {
        tofree = packet.data;
      }
    } else if (st->codec->codec_id == AV_CODEC_ID_AAC) {
      /* remove ADTS header */
      packet.data = pktbuf_ptr(pkt->pkt_payload) + 7;
      packet.size = pktbuf_len(pkt->pkt_payload) - 7;
    } else {
      packet.data = pktbuf_ptr(pkt->pkt_payload);
      packet.size = pktbuf_len(pkt->pkt_payload);
    }

    packet.stream_index = st->index;
 
    packet.pts      = av_rescale_q(pkt->pkt_pts     , mpeg_tc, st->time_base);
    packet.dts      = av_rescale_q(pkt->pkt_dts     , mpeg_tc, st->time_base);
    packet.duration = av_rescale_q(pkt->pkt_duration, mpeg_tc, st->time_base);

    if(pkt->pkt_frametype < PKT_P_FRAME)
      packet.flags |= AV_PKT_FLAG_KEY;

    if((rc = av_interleaved_write_frame(oc, &packet)))
      tvhlog(LOG_WARNING, "libav",  "Failed to write frame");

    if(tofree && tofree != pktbuf_ptr(pkt->pkt_payload))
      av_free(tofree);

    break;
  }

 ret:
  lm->m_errors += (rc != 0);
  pkt_ref_dec(pkt);

  return rc;
}