Esempio n. 1
0
GF_Err MCDec_InitMpeg4Decoder(MCDec *ctx) 
{
    char *dsi_data = NULL;
    u32 dsi_data_size = 0;

    if (!ctx->esd->decoderConfig->decoderSpecificInfo) {
        ctx->esd->decoderConfig->decoderSpecificInfo = (GF_DefaultDescriptor *) gf_odf_desc_new(GF_ODF_DSI_TAG);
    }
    
    if (ctx->esd->decoderConfig->decoderSpecificInfo->data) {
        GF_M4VDecSpecInfo vcfg;
        GF_BitStream *bs;
        
        gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg);
        ctx->width = vcfg.width;
        ctx->height = vcfg.height;

        if (ctx->esd->slConfig) {
            ctx->esd->slConfig->predefined  = 2;
        }

        bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
        gf_bs_write_u32(bs, 0);
        gf_odf_desc_write_bs((GF_Descriptor *) ctx->esd, bs);
        gf_bs_get_content(bs, &dsi_data, &dsi_data_size);
        gf_bs_del(bs);

        ctx->mime = "video/mp4v-es";

        char *esds = (char *)malloc(dsi_data_size);
        memcpy(esds, dsi_data, dsi_data_size);

        esds[0] = 0x00;
        esds[1] = 0x00;
        esds[2] = 0x00;
        esds[3] = 0x01;
       
        AMediaFormat_setBuffer(ctx->format, "csd-0", esds, dsi_data_size);

        gf_free(dsi_data);
        return GF_OK;
    } 
    return GF_NOT_SUPPORTED;
}
Esempio n. 2
0
GF_EXPORT
GF_Err gf_odf_desc_write(GF_Descriptor *desc, char **outEncDesc, u32 *outSize)
{
	GF_Err e;
	GF_BitStream *bs;
	if (!desc || !outEncDesc || !outSize) return GF_BAD_PARAM;
	*outEncDesc = NULL;
	*outSize = 0;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	if (!bs) return GF_OUT_OF_MEM;

	e = gf_odf_desc_write_bs(desc, bs);

	//then get the content from our bitstream
	gf_bs_get_content(bs, outEncDesc, outSize);
	gf_bs_del(bs);
	return e;
}
Esempio n. 3
0
static GF_Err VTBDec_InitDecoder(VTBDec *ctx, Bool force_dsi_rewrite)
{
	CFMutableDictionaryRef dec_dsi, dec_type;
	CFMutableDictionaryRef dsi;
	VTDecompressionOutputCallbackRecord cbacks;
    CFDictionaryRef buffer_attribs;
    OSStatus status;
	OSType kColorSpace;
	
	CFDataRef data = NULL;
	char *dsi_data=NULL;
	u32 dsi_data_size=0;
	
    dec_dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
	
	kColorSpace = kCVPixelFormatType_420YpCbCr8Planar;
	ctx->pix_fmt = GF_PIXEL_YV12;
	
	switch (ctx->esd->decoderConfig->objectTypeIndication) {
    case GPAC_OTI_VIDEO_AVC :
		if (ctx->sps && ctx->pps) {
			AVCState avc;
			s32 idx;
			memset(&avc, 0, sizeof(AVCState));
			avc.sps_active_idx = -1;

			idx = gf_media_avc_read_sps(ctx->sps, ctx->sps_size, &avc, 0, NULL);

			ctx->vtb_type = kCMVideoCodecType_H264;
			assert(ctx->sps);
			ctx->width = avc.sps[idx].width;
			ctx->height = avc.sps[idx].height;
			if (avc.sps[idx].vui.par_num && avc.sps[idx].vui.par_den) {
				ctx->pixel_ar = avc.sps[idx].vui.par_num;
				ctx->pixel_ar <<= 16;
				ctx->pixel_ar |= avc.sps[idx].vui.par_den;
			}
			ctx->chroma_format = avc.sps[idx].chroma_format;
			ctx->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
			ctx->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
		
			switch (ctx->chroma_format) {
			case 2:
				//422 decoding doesn't seem supported ...
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_422YpCbCr10;
					ctx->pix_fmt = GF_PIXEL_YUV422_10;
				} else {
					kColorSpace = kCVPixelFormatType_422YpCbCr8;
					ctx->pix_fmt = GF_PIXEL_YUV422;
				}
				break;
			case 3:
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_444YpCbCr10;
					ctx->pix_fmt = GF_PIXEL_YUV444_10;
				} else {
					kColorSpace = kCVPixelFormatType_444YpCbCr8;
					ctx->pix_fmt = GF_PIXEL_YUV444;
				}
				break;
			default:
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
					ctx->pix_fmt = GF_PIXEL_YV12_10;
				}
				break;
			}
		
			if (!ctx->esd->decoderConfig->decoderSpecificInfo || force_dsi_rewrite || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
				GF_AVCConfigSlot *slc_s, *slc_p;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_new();
				cfg->configurationVersion = 1;
				cfg->profile_compatibility = avc.sps[idx].prof_compat;
				cfg->AVCProfileIndication = avc.sps[idx].profile_idc;
				cfg->AVCLevelIndication = avc.sps[idx].level_idc;
				cfg->chroma_format = avc.sps[idx].chroma_format;
				cfg->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
				cfg->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
				cfg->nal_unit_size = 4;
				
				GF_SAFEALLOC(slc_s, GF_AVCConfigSlot);
				slc_s->data = ctx->sps;
				slc_s->size = ctx->sps_size;
				gf_list_add(cfg->sequenceParameterSets, slc_s);
				
				GF_SAFEALLOC(slc_p, GF_AVCConfigSlot);
				slc_p->data = ctx->pps;
				slc_p->size = ctx->pps_size;
				gf_list_add(cfg->pictureParameterSets , slc_p);
				
				gf_odf_avc_cfg_write(cfg, &dsi_data, &dsi_data_size);
				slc_s->data = slc_p->data = NULL;
				gf_odf_avc_cfg_del((cfg));
			} else {
				dsi_data = ctx->esd->decoderConfig->decoderSpecificInfo->data;
				dsi_data_size = ctx->esd->decoderConfig->decoderSpecificInfo->dataLength;
			}
			
			dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
			data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size);
			if (data) {
				CFDictionarySetValue(dsi, CFSTR("avcC"), data);
				CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi);
				CFRelease(data);
			}
			CFRelease(dsi);
		
			if (!ctx->esd->decoderConfig->decoderSpecificInfo || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
				gf_free(ctx->sps);
				ctx->sps = NULL;
				gf_free(ctx->pps);
				ctx->pps = NULL;
				gf_free(dsi_data);
			}
		}
        break;
	case GPAC_OTI_VIDEO_MPEG2_SIMPLE:
	case GPAC_OTI_VIDEO_MPEG2_MAIN:
	case GPAC_OTI_VIDEO_MPEG2_SNR:
	case GPAC_OTI_VIDEO_MPEG2_SPATIAL:
	case GPAC_OTI_VIDEO_MPEG2_HIGH:
	case GPAC_OTI_VIDEO_MPEG2_422:
        ctx->vtb_type = kCMVideoCodecType_MPEG2Video;
		if (!ctx->width || !ctx->height) {
			ctx->init_mpeg12 = GF_TRUE;
			return GF_OK;
		}
		ctx->init_mpeg12 = GF_FALSE;
        break;
		
	case GPAC_OTI_VIDEO_MPEG1:
		ctx->vtb_type = kCMVideoCodecType_MPEG1Video;
		if (!ctx->width || !ctx->height) {
			ctx->init_mpeg12 = GF_TRUE;
			return GF_OK;
		}
		ctx->init_mpeg12 = GF_FALSE;
		break;
    case GPAC_OTI_VIDEO_MPEG4_PART2 :
	{
		Bool reset_dsi = GF_FALSE;
		ctx->vtb_type = kCMVideoCodecType_MPEG4Video;
		if (!ctx->esd->decoderConfig->decoderSpecificInfo) {
			ctx->esd->decoderConfig->decoderSpecificInfo = (GF_DefaultDescriptor *) gf_odf_desc_new(GF_ODF_DSI_TAG);
		}

		if (!ctx->esd->decoderConfig->decoderSpecificInfo->data) {
			reset_dsi = GF_TRUE;
			ctx->esd->decoderConfig->decoderSpecificInfo->data = ctx->vosh;
			ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = ctx->vosh_size;
		}
		
		if (ctx->esd->decoderConfig->decoderSpecificInfo->data) {
			GF_M4VDecSpecInfo vcfg;
			GF_BitStream *bs;
			
			gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg);
			ctx->width = vcfg.width;
			ctx->height = vcfg.height;
			if (ctx->esd->slConfig) {
				ctx->esd->slConfig->predefined  = 2;
			}
			bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
			gf_bs_write_u32(bs, 0);
			gf_odf_desc_write_bs((GF_Descriptor *) ctx->esd, bs);
			gf_bs_get_content(bs, &dsi_data, &dsi_data_size);
			gf_bs_del(bs);
			
			dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
			data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size);
			gf_free(dsi_data);
			
			if (data) {
				CFDictionarySetValue(dsi, CFSTR("esds"), data);
				CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi);
				CFRelease(data);
			}
			CFRelease(dsi);
			
			if (reset_dsi) {
				ctx->esd->decoderConfig->decoderSpecificInfo->data = NULL;
				ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = 0;
			}
			ctx->skip_mpeg4_vosh = GF_FALSE;
		} else {
			ctx->skip_mpeg4_vosh = GF_TRUE;
			return GF_OK;
		}
        break;
    }
	case GPAC_OTI_MEDIA_GENERIC:
		if (ctx->esd->decoderConfig->decoderSpecificInfo && ctx->esd->decoderConfig->decoderSpecificInfo->dataLength) {
			char *dsi = ctx->esd->decoderConfig->decoderSpecificInfo->data;
			if (ctx->esd->decoderConfig->decoderSpecificInfo->dataLength<8) return GF_NON_COMPLIANT_BITSTREAM;
			if (strnicmp(dsi, "s263", 4)) return GF_NOT_SUPPORTED;
			
			ctx->width = ((u8) dsi[4]); ctx->width<<=8; ctx->width |= ((u8) dsi[5]);
			ctx->height = ((u8) dsi[6]); ctx->height<<=8; ctx->height |= ((u8) dsi[7]);
			ctx->vtb_type = kCMVideoCodecType_H263;
		}
		break;
		
	default :
		return GF_NOT_SUPPORTED;
    }

	if (! ctx->width || !ctx->height) return GF_NOT_SUPPORTED;

    status = CMVideoFormatDescriptionCreate(kCFAllocatorDefault, ctx->vtb_type, ctx->width, ctx->height, dec_dsi, &ctx->fmt_desc);

    if (!ctx->fmt_desc) {
		if (dec_dsi) CFRelease(dec_dsi);
        return GF_NON_COMPLIANT_BITSTREAM;
    }
	buffer_attribs = VTBDec_CreateBufferAttributes(ctx->width, ctx->height, kColorSpace);
	
	cbacks.decompressionOutputCallback = VTBDec_on_frame;
    cbacks.decompressionOutputRefCon   = ctx;

    dec_type = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
    CFDictionarySetValue(dec_type, kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder, kCFBooleanTrue);
	ctx->is_hardware = GF_TRUE;

    status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, dec_type, NULL, &cbacks, &ctx->vtb_session);
	//if HW decoder not available, try soft one
	if (status) {
		status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, NULL, buffer_attribs, &cbacks, &ctx->vtb_session);
		ctx->is_hardware = GF_FALSE;
	}
	
	if (dec_dsi)
		CFRelease(dec_dsi);
	if (dec_type)
		CFRelease(dec_type);
    if (buffer_attribs)
        CFRelease(buffer_attribs);

    switch (status) {
    case kVTVideoDecoderNotAvailableNowErr:
    case kVTVideoDecoderUnsupportedDataFormatErr:
        return GF_NOT_SUPPORTED;
    case kVTVideoDecoderMalfunctionErr:
        return GF_IO_ERR;
    case kVTVideoDecoderBadDataErr :
        return GF_BAD_PARAM;

	case kVTPixelTransferNotSupportedErr:
	case kVTCouldNotFindVideoDecoderErr:
		return GF_NOT_SUPPORTED;
    case 0:
        break;
    default:
        return GF_SERVICE_ERROR;
    }
	
	//good to go !
	if (ctx->pix_fmt == GF_PIXEL_YUV422) {
		ctx->out_size = ctx->width*ctx->height*2;
	} else if (ctx->pix_fmt == GF_PIXEL_YUV444) {
		ctx->out_size = ctx->width*ctx->height*3;
	} else {
		// (ctx->pix_fmt == GF_PIXEL_YV12)
		ctx->out_size = ctx->width*ctx->height*3/2;
	}
	if (ctx->luma_bit_depth>8) {
		ctx->out_size *= 2;
	}
	
	return GF_OK;
}