// Destination and source images have their dimensions inverted. static mblk_t *copy_frame_to_true_yuv_portrait(jbyte* initial_frame, int rotation, int w, int h) { // ms_message("copy_frame_to_true_yuv_inverted : Orientation %i; width %i; height %i", orientation, w, h); MSPicture pict; mblk_t *yuv_block = ms_yuv_buf_alloc(&pict, w, h); bool clockwise = rotation == 90 ? true : false; // Copying Y uint8_t* dsty = pict.planes[0]; uint8_t* srcy = (uint8_t*) initial_frame; rotate_plane(w,h,srcy,dsty,1, clockwise); int uv_w = w/2; int uv_h = h/2; // int uorvsize = uv_w * uv_h; // Copying U uint8_t* srcu = (uint8_t*) initial_frame + (w * h); uint8_t* dstu = pict.planes[2]; rotate_plane(uv_w,uv_h,srcu,dstu, 2, clockwise); // memset(dstu, 128, uorvsize); // Copying V uint8_t* srcv = srcu + 1; uint8_t* dstv = pict.planes[1]; rotate_plane(uv_w,uv_h,srcv,dstv, 2, clockwise); // memset(dstv, 128, uorvsize); return yuv_block; }
static mblk_t *get_as_yuvmsg(MSFilter *f, DecData *s, AVFrame *orig){ AVCodecContext *ctx=&s->av_context; if (s->outbuf.w!=ctx->width || s->outbuf.h!=ctx->height){ if (s->sws_ctx!=NULL){ sws_freeContext(s->sws_ctx); s->sws_ctx=NULL; freemsg(s->yuv_msg); s->yuv_msg=NULL; } ms_message("Getting yuv picture of %ix%i",ctx->width,ctx->height); s->yuv_msg=ms_yuv_buf_alloc(&s->outbuf,ctx->width,ctx->height); s->outbuf.w=ctx->width; s->outbuf.h=ctx->height; s->sws_ctx=sws_getContext(ctx->width,ctx->height,ctx->pix_fmt, ctx->width,ctx->height,PIX_FMT_YUV420P,SWS_FAST_BILINEAR, NULL, NULL, NULL); ms_filter_notify_no_arg(f,MS_FILTER_OUTPUT_FMT_CHANGED); } #if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0) if (sws_scale(s->sws_ctx,(const uint8_t * const *)orig->data,orig->linesize, 0, ctx->height, s->outbuf.planes, s->outbuf.strides)<0){ #else if (sws_scale(s->sws_ctx,(uint8_t **)orig->data,orig->linesize, 0, ctx->height, s->outbuf.planes, s->outbuf.strides)<0){ #endif ms_error("%s: error in sws_scale().",f->desc->name); } return dupmsg(s->yuv_msg); } static void update_sps(DecData *d, mblk_t *sps){ if (d->sps) freemsg(d->sps); d->sps=dupb(sps); } static void update_pps(DecData *d, mblk_t *pps){ if (d->pps) freemsg(d->pps); if (pps) d->pps=dupb(pps); else d->pps=NULL; } static bool_t check_sps_change(DecData *d, mblk_t *sps){ bool_t ret=FALSE; if (d->sps){ ret=(msgdsize(sps)!=msgdsize(d->sps)) || (memcmp(d->sps->b_rptr,sps->b_rptr,msgdsize(sps))!=0); if (ret) { ms_message("SPS changed ! %i,%i",(int)msgdsize(sps),(int)msgdsize(d->sps)); update_sps(d,sps); update_pps(d,NULL); } } else { ms_message("Receiving first SPS"); update_sps(d,sps); } return ret; }
static mblk_t *size_conv_alloc_mblk(SizeConvState *s){ if (s->om!=NULL){ int ref=s->om->b_datap->db_ref; if (ref==1){ return dupmsg(s->om); }else{ /*the last msg is still referenced by somebody else*/ ms_message("size_conv_alloc_mblk: Somebody still retaining yuv buffer (ref=%i)",ref); freemsg(s->om); s->om=NULL; } } s->om=ms_yuv_buf_alloc(&s->outbuf,s->target_vsize.width,s->target_vsize.height); return dupmsg(s->om); }
static mblk_t *get_as_yuvmsg(MSFilter *f, DecData *s, AVFrame *orig){ AVCodecContext *ctx=&s->av_context; if (s->outbuf.w!=ctx->width || s->outbuf.h!=ctx->height){ if (s->sws_ctx!=NULL){ sws_freeContext(s->sws_ctx); s->sws_ctx=NULL; freemsg(s->yuv_msg); s->yuv_msg=NULL; } ms_message("Getting yuv picture of %ix%i",ctx->width,ctx->height); s->yuv_msg=ms_yuv_buf_alloc(&s->outbuf,ctx->width,ctx->height); s->outbuf.w=ctx->width; s->outbuf.h=ctx->height; s->sws_ctx=sws_getContext(ctx->width,ctx->height,ctx->pix_fmt, ctx->width,ctx->height,PIX_FMT_YUV420P,SWS_FAST_BILINEAR, NULL, NULL, NULL); } if (sws_scale(s->sws_ctx,(const uint8_t * const *)orig->data,orig->linesize, 0, ctx->height, s->outbuf.planes, s->outbuf.strides)<0){ ms_error("%s: error in sws_scale().",f->desc->name); } return dupmsg(s->yuv_msg); }
static void dec_process(MSFilter *f) { mblk_t *im; DecState *s=(DecState*)f->data; while( (im=ms_queue_get(f->inputs[0]))!=0) { mblk_t *m; dec_unpacketize(f, s, im, &s->q); while((m=ms_queue_get(&s->q))!=NULL){ vpx_codec_err_t err; vpx_codec_iter_t iter = NULL; vpx_image_t *img; err = vpx_codec_decode(&s->codec, m->b_rptr, m->b_wptr - m->b_rptr, NULL, 0); if (err) { ms_warning("vpx_codec_decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); if ((f->ticker->time - s->last_error_reported_time)>5000 || s->last_error_reported_time==0) { s->last_error_reported_time=f->ticker->time; ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS); } if (s->first_image_decoded == FALSE) { /* if no frames have been decoded yet, do not try to browse decoded frames */ freemsg(m); continue; } } /* browse decoded frames */ while((img = vpx_codec_get_frame(&s->codec, &iter))) { int i,j; if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; } /* scale/copy frame to destination mblk_t */ for(i=0; i<3; i++) { uint8_t* dest = s->outbuf.planes[i]; uint8_t* src = img->planes[i]; int h = img->d_h >> ((i>0)?1:0); for(j=0; j<h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); if (ms_video_update_average_fps(&s->fps, f->ticker->time)) { ms_message("VP8 decoder: Frame size: %dx%d", s->yuv_width, s->yuv_height); } if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } freemsg(m); } } }
/* Destination and source images may have their dimensions inverted.*/ mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(uint8_t* y, uint8_t * cbcr, int rotation, int w, int h, int y_byte_per_row,int cbcr_byte_per_row, bool_t uFirstvSecond, bool_t down_scale) { MSPicture pict; int uv_w; int uv_h; uint8_t* ysrc; uint8_t* ydst; uint8_t* uvsrc; uint8_t* srcu; uint8_t* dstu; uint8_t* srcv; uint8_t* dstv; mblk_t *yuv_block = ms_yuv_buf_alloc(&pict, w, h); #ifdef ANDROID if (hasNeon == -1) { hasNeon = (android_getCpuFamily() == ANDROID_CPU_FAMILY_ARM && (android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_NEON) != 0); } #endif #ifdef __arm__ if (down_scale && !hasNeon) { ms_error("down scaling by two requires NEON, returning empty block"); return yuv_block; } #endif if (!uFirstvSecond) { unsigned char* tmp = pict.planes[1]; pict.planes[1] = pict.planes[2]; pict.planes[2] = tmp; } uv_w = w/2; uv_h = h/2; if (rotation % 180 == 0) { int i,j; uint8_t* u_dest=pict.planes[1], *v_dest=pict.planes[2]; if (rotation == 0) { #ifdef __arm__ if (hasNeon) { deinterlace_down_scale_neon(y, cbcr, pict.planes[0], u_dest, v_dest, w, h, y_byte_per_row, cbcr_byte_per_row,down_scale); } else #endif { // plain copy for(i=0; i<h; i++) { memcpy(&pict.planes[0][i*w], &y[i*y_byte_per_row], w); } // de-interlace u/v for (i=0; i<uv_h; i++) { for(j=0; j<uv_w; j++) { *u_dest++ = cbcr[cbcr_byte_per_row*i + 2*j]; *v_dest++ = cbcr[cbcr_byte_per_row*i + 2*j + 1]; } } } } else { #ifdef __arm__ if (hasNeon) { deinterlace_down_scale_and_rotate_180_neon(y, cbcr, pict.planes[0], u_dest, v_dest, w, h, y_byte_per_row, cbcr_byte_per_row,down_scale); } else #endif { // 180° y rotation ysrc=y; ydst=&pict.planes[0][h*w-1]; for(i=0; i<h*w; i++) { *ydst-- = *ysrc++; } // 180° rotation + de-interlace u/v uvsrc=&cbcr[uv_h*uv_w*2-2]; for (i=0; i<uv_h*uv_w*2; i++) { *u_dest++ = *uvsrc--; *v_dest++ = *uvsrc--; } } } } else { bool_t clockwise = rotation == 90 ? TRUE : FALSE; // Rotate Y #ifdef __arm__ if (hasNeon) { if (clockwise) { rotate_down_scale_plane_neon_clockwise(w,h,y_byte_per_row,(uint8_t*)y,pict.planes[0],down_scale); } else { rotate_down_scale_plane_neon_anticlockwise(w,h,y_byte_per_row,(uint8_t*)y,pict.planes[0], down_scale); } } else #endif { uint8_t* dsty = pict.planes[0]; uint8_t* srcy = (uint8_t*) y; rotate_plane(w,h,y_byte_per_row,srcy,dsty,1, clockwise); } #ifdef __arm__ if (hasNeon) { rotate_down_scale_cbcr_to_cr_cb(uv_w,uv_h, cbcr_byte_per_row/2, (uint8_t*)cbcr, pict.planes[2], pict.planes[1],clockwise,down_scale); } else #endif { // Copying U srcu = cbcr; dstu = pict.planes[1]; rotate_plane(uv_w,uv_h,cbcr_byte_per_row/2,srcu,dstu, 2, clockwise); // Copying V srcv = srcu + 1; dstv = pict.planes[2]; rotate_plane(uv_w,uv_h,cbcr_byte_per_row/2,srcv,dstv, 2, clockwise); } } return yuv_block; }
static void jpg_process(MSFilter *f){ JpegWriter *s=(JpegWriter*)f->data; ms_filter_lock(f); if (s->file!=NULL && s->codec!=NULL){ MSPicture yuvbuf, yuvjpeg; mblk_t *m=ms_queue_peek_last(f->inputs[0]); if (ms_yuv_buf_init_from_mblk(&yuvbuf,m)==0){ int error,got_pict; int comp_buf_sz=msgdsize(m); uint8_t *comp_buf=(uint8_t*)ms_malloc0(comp_buf_sz); mblk_t *jpegm; struct SwsContext *sws_ctx; struct AVPacket packet; AVCodecContext *avctx=avcodec_alloc_context3(s->codec); memset(&packet, 0, sizeof(packet)); avctx->width=yuvbuf.w; avctx->height=yuvbuf.h; avctx->time_base.num = 1; avctx->time_base.den =1; avctx->pix_fmt=AV_PIX_FMT_YUVJ420P; error=avcodec_open2(avctx,s->codec,NULL); if (error!=0) { ms_error("avcodec_open() failed: %i",error); cleanup(s,NULL, FALSE); av_free(avctx); goto end; } sws_ctx=sws_getContext(avctx->width,avctx->height,AV_PIX_FMT_YUV420P, avctx->width,avctx->height,avctx->pix_fmt,SWS_FAST_BILINEAR,NULL, NULL, NULL); if (sws_ctx==NULL) { ms_error(" sws_getContext() failed."); cleanup(s,avctx, FALSE); goto end; } jpegm=ms_yuv_buf_alloc (&yuvjpeg,avctx->width, avctx->height); #if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0) if (sws_scale(sws_ctx,(const uint8_t *const*)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){ #else if (sws_scale(sws_ctx,(uint8_t **)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){ #endif ms_error("sws_scale() failed."); sws_freeContext(sws_ctx); cleanup(s,avctx, FALSE); freemsg(jpegm); goto end; } sws_freeContext(sws_ctx); av_frame_unref(s->pict); avpicture_fill((AVPicture*)s->pict,(uint8_t*)jpegm->b_rptr,avctx->pix_fmt,avctx->width,avctx->height); packet.data=comp_buf; packet.size=comp_buf_sz; error=avcodec_encode_video2(avctx, &packet, s->pict, &got_pict); if (error<0){ ms_error("Could not encode jpeg picture."); }else{ if (fwrite(comp_buf,packet.size,1,s->file)>0){ ms_message("Snapshot done"); }else{ ms_error("Error writing snapshot."); } } ms_free(comp_buf); cleanup(s,avctx, TRUE); freemsg(jpegm); } goto end; } end: ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); } static MSFilterMethod jpg_methods[]={ { MS_JPEG_WRITER_TAKE_SNAPSHOT, take_snapshot }, { 0,NULL} }; #ifndef _MSC_VER MSFilterDesc ms_jpeg_writer_desc={ .id=MS_JPEG_WRITER_ID, .name="MSJpegWriter", .text="Take a video snapshot as jpg file", .category=MS_FILTER_OTHER, .ninputs=1, .noutputs=0, .init=jpg_init, .process=jpg_process, .uninit=jpg_uninit, .methods=jpg_methods }; #else MSFilterDesc ms_jpeg_writer_desc={ MS_JPEG_WRITER_ID, "MSJpegWriter", "Take a video snapshot as jpg file", MS_FILTER_OTHER, NULL, 1, 0, jpg_init, NULL, jpg_process, NULL, jpg_uninit, jpg_methods }; #endif MS_FILTER_DESC_EXPORT(ms_jpeg_writer_desc)
static void dec_process(MSFilter *f) { DecState *s = (DecState *)f->data; mblk_t *im; vpx_codec_err_t err; vpx_image_t *img; vpx_codec_iter_t iter = NULL; MSQueue frame; MSQueue mtofree_queue; Vp8RtpFmtFrameInfo frame_info; if (!s->ready){ ms_queue_flush(f->inputs[0]); return; } ms_filter_lock(f); ms_queue_init(&frame); ms_queue_init(&mtofree_queue); /* Unpack RTP payload format for VP8. */ vp8rtpfmt_unpacker_feed(&s->unpacker, f->inputs[0]); /* Decode unpacked VP8 frames. */ while (vp8rtpfmt_unpacker_get_frame(&s->unpacker, &frame, &frame_info) == 0) { while ((im = ms_queue_get(&frame)) != NULL) { err = vpx_codec_decode(&s->codec, im->b_rptr, (unsigned int)(im->b_wptr - im->b_rptr), NULL, 0); if ((s->flags & VPX_CODEC_USE_INPUT_FRAGMENTS) && mblk_get_marker_info(im)) { err = vpx_codec_decode(&s->codec, NULL, 0, NULL, 0); } if (err) { ms_warning("vp8 decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)?vpx_codec_error_detail(&s->codec):"no details"); } ms_queue_put(&mtofree_queue, im); } /* Get decoded frame */ if ((img = vpx_codec_get_frame(&s->codec, &iter))) { int i, j; int reference_updates = 0; if (vpx_codec_control(&s->codec, VP8D_GET_LAST_REF_UPDATES, &reference_updates) == 0) { if (frame_info.pictureid_present && ((reference_updates & VP8_GOLD_FRAME) || (reference_updates & VP8_ALTR_FRAME))) { vp8rtpfmt_send_rpsi(&s->unpacker, frame_info.pictureid); } } if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); ms_message("MSVp8Dec: video is %ix%i", img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; ms_filter_notify_no_arg(f, MS_FILTER_OUTPUT_FMT_CHANGED); } /* scale/copy frame to destination mblk_t */ for (i = 0; i < 3; i++) { uint8_t *dest = s->outbuf.planes[i]; uint8_t *src = img->planes[i]; int h = img->d_h >> ((i > 0) ? 1 : 0); for (j = 0; j < h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); ms_average_fps_update(&s->fps, (uint32_t)f->ticker->time); if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } while ((im = ms_queue_get(&mtofree_queue)) != NULL) { freemsg(im); } }
static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){ #ifndef NO_FFMPEG AVCodecContext av_context; int got_picture=0; AVFrame orig; mblk_t *ret; struct SwsContext *sws_ctx; AVPacket pkt; MSPicture dest; AVCodec *codec=avcodec_find_decoder(CODEC_ID_MJPEG); if (codec==NULL){ ms_error("Could not find MJPEG decoder in ffmpeg."); return NULL; } avcodec_get_context_defaults(&av_context); if (avcodec_open(&av_context,codec)<0){ ms_error("jpeg2yuv: avcodec_open failed"); return NULL; } av_init_packet(&pkt); pkt.data=jpgbuf; pkt.size=bufsize; if (avcodec_decode_video2(&av_context,&orig,&got_picture,&pkt) < 0) { ms_error("jpeg2yuv: avcodec_decode_video failed"); avcodec_close(&av_context); return NULL; } ret=ms_yuv_buf_alloc(&dest, reqsize->width,reqsize->height); /* not using SWS_FAST_BILINEAR because it doesn't play well with * av_context.pix_fmt set to PIX_FMT_YUVJ420P by jpeg decoder */ sws_ctx=sws_getContext(av_context.width,av_context.height,av_context.pix_fmt, reqsize->width,reqsize->height,PIX_FMT_YUV420P,SWS_BILINEAR, NULL, NULL, NULL); if (sws_ctx==NULL) { ms_error("jpeg2yuv: ms_sws_getContext() failed."); avcodec_close(&av_context); freemsg(ret); return NULL; } #if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0) if (sws_scale(sws_ctx,(const uint8_t* const *)orig.data,orig.linesize,0,av_context.height,dest.planes,dest.strides)<0){ #else if (sws_scale(sws_ctx,(uint8_t**)orig.data,orig.linesize,0,av_context.height,dest.planes,dest.strides)<0){ #endif ms_error("jpeg2yuv: ms_sws_scale() failed."); sws_freeContext(sws_ctx); avcodec_close(&av_context); freemsg(ret); return NULL; } sws_freeContext(sws_ctx); avcodec_close(&av_context); return ret; #elif TARGET_OS_IPHONE MSPicture dest; CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, jpgbuf, bufsize, NULL); // use the data provider to get a CGImage; release the data provider CGImageRef image = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, FALSE, kCGRenderingIntentDefault); CGDataProviderRelease(dataProvider); reqsize->width = CGImageGetWidth(image); reqsize->height = CGImageGetHeight(image); uint8_t* tmp = (uint8_t*) malloc(reqsize->width * reqsize->height * 4); mblk_t* ret=ms_yuv_buf_alloc(&dest, reqsize->width, reqsize->height); CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef imageContext = CGBitmapContextCreate(tmp, reqsize->width, reqsize->height, 8, reqsize->width*4, colourSpace, kCGImageAlphaNoneSkipLast); CGColorSpaceRelease(colourSpace); // draw the image to the context, release it CGContextDrawImage(imageContext, CGRectMake(0, 0, reqsize->width, reqsize->height), image); CGImageRelease(image); /* convert tmp/RGB -> ret/YUV */ for(int y=0; y<reqsize->height; y++) { for(int x=0; x<reqsize->width; x++) { uint8_t r = tmp[y * reqsize->width * 4 + x * 4 + 0]; uint8_t g = tmp[y * reqsize->width * 4 + x * 4 + 1]; uint8_t b = tmp[y * reqsize->width * 4 + x * 4 + 2]; // Y *dest.planes[0]++ = (uint8_t)((0.257 * r) + (0.504 * g) + (0.098 * b) + 16); // U/V subsampling if ((y % 2==0) && (x%2==0)) { uint32_t r32=0, g32=0, b32=0; for(int i=0; i<2; i++) { for(int j=0; j<2; j++) { r32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 0]; g32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 1]; b32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 2]; } } r32 = (uint32_t)(r32 * 0.25f); g32 = (uint32_t)(g32 * 0.25f); b32 = (uint32_t) (b32 * 0.25f); // U *dest.planes[1]++ = (uint8_t)(-(0.148 * r32) - (0.291 * g32) + (0.439 * b32) + 128); // V *dest.planes[2]++ = (uint8_t)((0.439 * r32) - (0.368 * g32) - (0.071 * b32) + 128); } } } free(tmp); return ret; #else return NULL; #endif } mblk_t *ms_load_jpeg_as_yuv(const char *jpgpath, MSVideoSize *reqsize){ #if defined(WIN32) mblk_t *m=NULL; DWORD st_sizel; DWORD st_sizeh; uint8_t *jpgbuf; DWORD err; HANDLE fd; #ifdef UNICODE WCHAR wUnicode[1024]; MultiByteToWideChar(CP_UTF8, 0, jpgpath, -1, wUnicode, 1024); fd = CreateFile(wUnicode, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL); #else fd = CreateFile(jpgpath, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL); #endif if (fd==INVALID_HANDLE_VALUE){ ms_error("Failed to open %s",jpgpath); return NULL; } st_sizel=0; st_sizeh=0; st_sizel = GetFileSize(fd, &st_sizeh); if (st_sizeh>0 || st_sizel<=0) { CloseHandle(fd); ms_error("Can't load file %s",jpgpath); return NULL; } jpgbuf=(uint8_t*)ms_malloc0(st_sizel); if (jpgbuf==NULL) { CloseHandle(fd); ms_error("Cannot allocate buffer for %s",jpgpath); return NULL; } err=0; ReadFile(fd, jpgbuf, st_sizel, &err, NULL) ; if (err!=st_sizel){ ms_error("Could not read as much as wanted !"); } m=jpeg2yuv(jpgbuf,st_sizel,reqsize); ms_free(jpgbuf); if (m==NULL) { CloseHandle(fd); ms_error("Cannot load image from buffer for %s",jpgpath); return NULL; } CloseHandle(fd); return m; #else mblk_t *m=NULL; struct stat statbuf; uint8_t *jpgbuf; int err; int fd=open(jpgpath,O_RDONLY); if (fd!=-1){ fstat(fd,&statbuf); if (statbuf.st_size<=0) { close(fd); ms_error("Cannot load %s",jpgpath); return NULL; } jpgbuf=(uint8_t*)ms_malloc0(statbuf.st_size + FF_INPUT_BUFFER_PADDING_SIZE); if (jpgbuf==NULL) { close(fd); ms_error("Cannot allocate buffer for %s",jpgpath); return NULL; } err=read(fd,jpgbuf,statbuf.st_size); if (err!=statbuf.st_size){ ms_error("Could not read as much as wanted: %i<>%li !",err,(long)statbuf.st_size); } m=jpeg2yuv(jpgbuf,statbuf.st_size,reqsize); ms_free(jpgbuf); if (m==NULL) { close(fd); ms_error("Cannot load image from buffer for %s",jpgpath); return NULL; } }else{ ms_error("Cannot load %s",jpgpath); return NULL; } close(fd); return m; #endif }
static void x11video_process(MSFilter *f){ X11Video *obj=(X11Video*)f->data; mblk_t *inm; int update=0; MSPicture lsrc={0}; MSPicture src={0}; MSRect mainrect,localrect; bool_t precious=FALSE; bool_t local_precious=FALSE; XWindowAttributes wa; MSTickerLateEvent late_info; ms_filter_lock(f); if ((obj->window_id == 0) || (x11_error == TRUE)) goto end; XGetWindowAttributes(obj->display,obj->window_id,&wa); if (x11_error == TRUE) { ms_error("Could not get window attributes for window %lu", obj->window_id); goto end; } if (wa.width!=obj->wsize.width || wa.height!=obj->wsize.height){ ms_warning("Resized to %ix%i", wa.width,wa.height); obj->wsize.width=wa.width; obj->wsize.height=wa.height; XClearWindow(obj->display,obj->window_id); } ms_ticker_get_last_late_tick(f->ticker, &late_info); if(late_info.current_late_ms > 100) { ms_warning("Dropping frames because we're late"); goto end; } if (!obj->show) { goto end; } if (!obj->ready){ goto end; } if (f->inputs[0]!=NULL && (inm=ms_queue_peek_last(f->inputs[0]))!=0) { if (ms_yuv_buf_init_from_mblk(&src,inm)==0){ MSVideoSize newsize; newsize.width=src.w; newsize.height=src.h; precious=mblk_get_precious_flag(inm); if (!ms_video_size_equal(newsize,obj->vsize) ) { ms_message("received size is %ix%i",newsize.width,newsize.height); obj->vsize=newsize; if (obj->autofit){ MSVideoSize new_window_size; static const MSVideoSize min_size=MS_VIDEO_SIZE_QVGA; /*don't resize less than QVGA, it is too small*/ if (min_size.width*min_size.height>newsize.width*newsize.height){ new_window_size.width=newsize.width*2; new_window_size.height=newsize.height*2; }else new_window_size=newsize; obj->wsize=new_window_size; ms_message("autofit: new window size should be %ix%i",new_window_size.width,new_window_size.height); XResizeWindow(obj->display,obj->window_id,new_window_size.width,new_window_size.height); XSync(obj->display,FALSE); } x11video_unprepare(f); x11video_prepare(f); if (!obj->ready) goto end; } } update=1; } /*process last video message for local preview*/ if (obj->corner!=-1 && f->inputs[1]!=NULL && (inm=ms_queue_peek_last(f->inputs[1]))!=0) { if (ms_yuv_buf_init_from_mblk(&lsrc,inm)==0){ obj->lsize.width=lsrc.w; obj->lsize.height=lsrc.h; local_precious=mblk_get_precious_flag(inm); update=1; } } ms_layout_compute(obj->vsize, obj->vsize,obj->lsize,obj->corner,obj->scale_factor,&mainrect,&localrect); if (lsrc.w!=0 && obj->corner!=-1){ /* first reduce the local preview image into a temporary image*/ if (obj->local_msg==NULL){ obj->local_msg=ms_yuv_buf_alloc(&obj->local_pic,localrect.w,localrect.h); } if (obj->sws2==NULL){ obj->sws2=ms_scaler_create_context(lsrc.w,lsrc.h,MS_YUV420P,localrect.w,localrect.h,MS_YUV420P, MS_SCALER_METHOD_BILINEAR); } ms_scaler_process(obj->sws2,lsrc.planes,lsrc.strides,obj->local_pic.planes,obj->local_pic.strides); if (!local_precious) ms_yuv_buf_mirror(&obj->local_pic); } if (update && src.w!=0){ ms_yuv_buf_copy(src.planes,src.strides,obj->fbuf.planes,obj->fbuf.strides,obj->vsize); if (obj->mirror && !precious) ms_yuv_buf_mirror(&obj->fbuf); } /*copy resized local view into a corner:*/ if (update && obj->local_msg!=NULL && obj->corner!=-1){ MSPicture corner=obj->fbuf; MSVideoSize roi; roi.width=obj->local_pic.w; roi.height=obj->local_pic.h; corner.w=obj->local_pic.w; corner.h=obj->local_pic.h; corner.planes[0]+=localrect.x+(localrect.y*corner.strides[0]); corner.planes[1]+=(localrect.x/2)+((localrect.y/2)*corner.strides[1]); corner.planes[2]+=(localrect.x/2)+((localrect.y/2)*corner.strides[2]); corner.planes[3]=0; ms_yuv_buf_copy(obj->local_pic.planes,obj->local_pic.strides, corner.planes,corner.strides,roi); } if (update){ MSRect rect; ms_layout_center_rectangle(obj->wsize,obj->vsize,&rect); //ms_message("XvShmPutImage() %ix%i --> %ix%i",obj->fbuf.w,obj->fbuf.h,obj->wsize.width,obj->wsize.height); XvShmPutImage(obj->display,obj->port,obj->window_id,obj->gc, obj->xv_image, 0,0,obj->fbuf.w,obj->fbuf.h, rect.x,rect.y,rect.w,rect.h,TRUE); XSync(obj->display,FALSE); } end: ms_filter_unlock(f); if (f->inputs[0]!=NULL) ms_queue_flush(f->inputs[0]); if (f->inputs[1]!=NULL) ms_queue_flush(f->inputs[1]); }
/* static void rotate_plane_with_stripes(int w, int h, uint8_t* src, int src_stride, uint8_t* dst, int dst_stride, int step) { int alpha = (w-h) / 2; // the stripe dst += alpha + h; src += step * alpha; int xmax = h*step; for (int y=0; y<h; y++) { uint8_t* dst2 = dst; for (int x=0; x<xmax; x+=step) { *dst2 = src[x]; dst2 += dst_stride; } dst--; src += src_stride; } } */ static mblk_t *copy_frame_to_true_yuv(jbyte* initial_frame, int rotation, int w, int h) { //ms_message("Orientation %i; width %i; heigth %i", orientation, w, h); MSPicture pict; mblk_t *yuv_block = ms_yuv_buf_alloc(&pict, w, h); int ysize = w * h; // Copying Y uint8_t* srcy = (uint8_t*) initial_frame; uint8_t* dsty = pict.planes[0]; switch (rotation) { case 180: // --> for (int i=0; i < ysize; i++) { *(dsty+i) = *(srcy + ysize - i - 1); } break; /* case 90: // <-- memset(dsty, 16, ysize); // background for stripes rotate_plane_with_stripes(w,h,srcy,w,dsty,w, 1); break; */ case 0: // ^^^ memcpy(pict.planes[0],srcy,ysize); break; default: ms_error("msandroidvideo.cpp: bad rotation %i", rotation); break; } uint8_t* dstu = pict.planes[2]; uint8_t* dstv = pict.planes[1]; int uorvsize = ysize / 4; uint8_t* srcuv = (uint8_t*) initial_frame + ysize; switch (rotation) { /* case 1: { memset(dstu, 128, uorvsize); memset(dstv, 128, uorvsize); int uvw = w/2; int uvh = h/2; rotate_plane_with_stripes(uvw,uvh,srcuv,w,dstu,uvw, 2); rotate_plane_with_stripes(uvw,uvh,srcuv +1,w,dstv,uvw, 2); break; }*/ case 0: for (int i = 0; i < uorvsize; i++) { *(dstu++) = *(srcuv++); // Copying U *(dstv++) = *(srcuv++); // Copying V } break; case 180: srcuv += 2 * uorvsize; for (int i = 0; i < uorvsize; i++) { *(dstu++) = *(srcuv--); // Copying U *(dstv++) = *(srcuv--); // Copying V } break; default: ms_error("msandroidvideo.cpp: bad rotation %i", rotation); break; } return yuv_block; }
void MSOpenH264Decoder::feed() { if (!isInitialized()) { ms_error("MSOpenH264Decoder::feed(): not initialized"); ms_queue_flush(mFilter->inputs[0]); return; } MSQueue nalus; ms_queue_init(&nalus); mblk_t *im; while ((im = ms_queue_get(mFilter->inputs[0])) != NULL) { if ((getIDRPicId() == 0) && (mSPS != 0) && (mPPS != 0)) { // Push the sps/pps given in sprop-parameter-sets if any mblk_set_timestamp_info(mSPS, mblk_get_timestamp_info(im)); mblk_set_timestamp_info(mPPS, mblk_get_timestamp_info(im)); rfc3984_unpack(mUnpacker, mSPS, &nalus); rfc3984_unpack(mUnpacker, mPPS, &nalus); mSPS = 0; mPPS = 0; } rfc3984_unpack(mUnpacker, im, &nalus); if (!ms_queue_empty(&nalus)) { void * pData[3] = { 0 }; SBufferInfo sDstBufInfo = { 0 }; int len = nalusToFrame(&nalus); DECODING_STATE state = mDecoder->DecodeFrame2(mBitstream, len, (uint8_t**)pData, &sDstBufInfo); if (state != dsErrorFree) { ms_error("OpenH264 decoder: DecodeFrame2 failed: 0x%x", state); if (((mFilter->ticker->time - mLastErrorReportTime) > 5000) || (mLastErrorReportTime == 0)) { mLastErrorReportTime = mFilter->ticker->time; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_DECODING_ERRORS); } } if (sDstBufInfo.iBufferStatus == 1) { uint8_t * pDst[3] = { 0 }; pDst[0] = (uint8_t *)pData[0]; pDst[1] = (uint8_t *)pData[1]; pDst[2] = (uint8_t *)pData[2]; // Update video size and (re)allocate YUV buffer if needed if ((mWidth != sDstBufInfo.UsrData.sSystemBuffer.iWidth) || (mHeight != sDstBufInfo.UsrData.sSystemBuffer.iHeight)) { if (mYUVMsg) { freemsg(mYUVMsg); } mWidth = sDstBufInfo.UsrData.sSystemBuffer.iWidth; mHeight = sDstBufInfo.UsrData.sSystemBuffer.iHeight; mYUVMsg = ms_yuv_buf_alloc(&mOutbuf, mWidth, mHeight); ms_filter_notify_no_arg(mFilter,MS_FILTER_OUTPUT_FMT_CHANGED); } // Scale/copy frame to destination mblk_t for (int i = 0; i < 3; i++) { uint8_t *dst = mOutbuf.planes[i]; uint8_t *src = pDst[i]; int h = mHeight >> (( i > 0) ? 1 : 0); for(int j = 0; j < h; j++) { memcpy(dst, src, mOutbuf.strides[i]); dst += mOutbuf.strides[i]; src += sDstBufInfo.UsrData.sSystemBuffer.iStride[(i == 0) ? 0 : 1]; } } ms_queue_put(mFilter->outputs[0], dupmsg(mYUVMsg)); // Update average FPS if (ms_average_fps_update(&mFPS, mFilter->ticker->time)) { ms_message("OpenH264 decoder: Frame size: %dx%d", mWidth, mHeight); } // Notify first decoded image if (!mFirstImageDecoded) { mFirstImageDecoded = true; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } #if MSOPENH264_DEBUG ms_message("OpenH264 decoder: IDR pic id: %d, Frame num: %d, Temporal id: %d, VCL NAL: %d", getIDRPicId(), getFrameNum(), getTemporalId(), getVCLNal()); #endif } }
static void mire_preprocess(MSFilter *f){ MireData *d=(MireData*)f->data; d->pic=ms_yuv_buf_alloc(&d->pict,d->vsize.width,d->vsize.height); memset(d->pic->b_rptr,0,d->pic->b_wptr-d->pic->b_rptr); d->starttime=f->ticker->time; }
static void dec_process(MSFilter *f){ DecData *d=(DecData*)f->data; MSPicture pic = {0}; mblk_t *im,*om = NULL; bool_t need_reinit=FALSE; bool_t request_pli=FALSE; MSQueue nalus; ms_queue_init(&nalus); while((im=ms_queue_get(f->inputs[0]))!=NULL){ if (d->packet_num==0 && d->sps && d->pps){ mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im)); mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im)); rfc3984_unpack(&d->unpacker, d->sps, &nalus); rfc3984_unpack(&d->unpacker, d->pps, &nalus); d->sps=NULL; d->pps=NULL; } if(rfc3984_unpack(&d->unpacker,im,&nalus) <0){ request_pli=TRUE; } if (!ms_queue_empty(&nalus)){ AMediaCodecBufferInfo info; int size; int width = 0, height = 0, color = 0; uint8_t *buf=NULL; size_t bufsize; ssize_t iBufidx, oBufidx; size=nalusToFrame(d,&nalus,&need_reinit); if (need_reinit) { //In case of rotation, the decoder needs to flushed in order to restart with the new video size AMediaCodec_flush(d->codec); } iBufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US); if (iBufidx >= 0) { buf = AMediaCodec_getInputBuffer(d->codec, iBufidx, &bufsize); if(buf == NULL) { break; } if((size_t)size > bufsize) { ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i",size,(int) bufsize); } else { memcpy(buf,d->bitstream,(size_t)size); AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, TIMEOUT_US, 0); } } oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US); if(oBufidx >= 0){ AMediaFormat *format; buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize); if(buf == NULL){ ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS); break; } format = AMediaCodec_getOutputFormat(d->codec); if(format != NULL){ AMediaFormat_getInt32(format, "width", &width); AMediaFormat_getInt32(format, "height", &height); AMediaFormat_getInt32(format, "color-format", &color); d->vsize.width=width; d->vsize.height=height; AMediaFormat_delete(format); } } if(buf != NULL){ //YUV if(width != 0 && height != 0 ){ if(color == 19) { int ysize = width*height; int usize = ysize/4; om=ms_yuv_buf_alloc(&pic,width,height); memcpy(pic.planes[0],buf,ysize); memcpy(pic.planes[1],buf+ysize,usize); memcpy(pic.planes[2],buf+ysize+usize,usize); } else { uint8_t* cbcr_src = (uint8_t*) (buf + width * height); om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE); } if (!d->first_image_decoded) { ms_message("First frame decoded %ix%i",width,height); d->first_image_decoded = true; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } ms_queue_put(f->outputs[0], om); } if(oBufidx > 0) { AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE); } } } d->packet_num++; } if (d->avpf_enabled && request_pli) { ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI); } }
static void jpg_process(MSFilter *f){ JpegWriter *s=(JpegWriter*)f->data; if (s->file!=NULL && s->codec!=NULL){ MSPicture yuvbuf, yuvjpeg; mblk_t *m=ms_queue_peek_last(f->inputs[0]); if (ms_yuv_buf_init_from_mblk(&yuvbuf,m)==0){ int error; int comp_buf_sz=msgdsize(m); uint8_t *comp_buf=(uint8_t*)alloca(comp_buf_sz); AVFrame pict; mblk_t *jpegm; struct SwsContext *sws_ctx; AVCodecContext *avctx=avcodec_alloc_context(); avctx->width=yuvbuf.w; avctx->height=yuvbuf.h; avctx->time_base.num = 1; avctx->time_base.den =1; avctx->pix_fmt=PIX_FMT_YUVJ420P; error=avcodec_open(avctx,s->codec); if (error!=0) { ms_error("avcodec_open() failed: %i",error); cleanup(s,NULL); av_free(avctx); return; } sws_ctx=sws_getContext(avctx->width,avctx->height,PIX_FMT_YUV420P, avctx->width,avctx->height,avctx->pix_fmt,SWS_FAST_BILINEAR,NULL, NULL, NULL); if (sws_ctx==NULL) { ms_error(" sws_getContext() failed."); cleanup(s,avctx); goto end; } jpegm=ms_yuv_buf_alloc (&yuvjpeg,avctx->width, avctx->height); if (sws_scale(sws_ctx,(const uint8_t *const*)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){ ms_error("sws_scale() failed."); sws_freeContext(sws_ctx); cleanup(s,avctx); freemsg(jpegm); goto end; } sws_freeContext(sws_ctx); avcodec_get_frame_defaults(&pict); avpicture_fill((AVPicture*)&pict,(uint8_t*)jpegm->b_rptr,avctx->pix_fmt,avctx->width,avctx->height); error=avcodec_encode_video(avctx, (uint8_t*)comp_buf,comp_buf_sz, &pict); if (error<0){ ms_error("Could not encode jpeg picture."); }else{ fwrite(comp_buf,error,1,s->file); ms_message("Snapshot done"); } cleanup(s,avctx); freemsg(jpegm); } goto end; } end: ms_queue_flush(f->inputs[0]); }