int CVideoEncodeVt::GetNaluCnt(CMSampleBufferRef sampleBuffer, int aiLengthCodeSize, int* aiNaluCnt) { if(NULL == sampleBuffer || NULL == aiNaluCnt || aiLengthCodeSize > 4) { assert(false); return -1; } size_t nSampleSize = CMSampleBufferGetTotalSampleSize(sampleBuffer); if((int)nSampleSize < 0) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMSampleBufferGetTotalSampleSize failed!"); assert(false); return -1; } CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); if(NULL == blockBuffer) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMSampleBufferGetDataBuffer failed!"); assert(false); return -1; } int iNaluCnt = 0; size_t nOffset = 0; uint8_t pBuf[4] = {0}; //遍历整个数据,取得每个nalu的大小 while(nOffset < nSampleSize) { size_t nLength = 0; int ret = CMBlockBufferCopyDataBytes(blockBuffer, nOffset, aiLengthCodeSize, pBuf); if(0 != ret) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMBlockBufferCopyDataBytes failed!"); assert(false); return -1; } for(int i = 0; i < aiLengthCodeSize; i++) { nLength <<= 8; nLength |= pBuf[i]; } nOffset += (nLength + aiLengthCodeSize); iNaluCnt++; } *aiNaluCnt = iNaluCnt; return 0; }
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf, gboolean use_video_meta, GstVideoTextureCache * cache) { CVImageBufferRef image_buf; CMBlockBufferRef block_buf; GstBuffer *buf; image_buf = CMSampleBufferGetImageBuffer (sample_buf); block_buf = CMSampleBufferGetDataBuffer (sample_buf); buf = gst_buffer_new (); gst_core_media_meta_add (buf, sample_buf, image_buf, block_buf); if (image_buf != NULL && CFGetTypeID (image_buf) == CVPixelBufferGetTypeID () && cache) { GstVideoInfo info; gboolean has_padding = FALSE; CVPixelBufferRef pixel_buf = (CVPixelBufferRef) image_buf; if (!gst_video_info_init_from_pixel_buffer (&info, pixel_buf)) { goto error; } gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, cache, &has_padding); /* If the video meta API is not supported, remove padding by * copying the core media buffer to a system memory buffer */ if (has_padding && !use_video_meta) { GstBuffer *copy_buf; copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info); if (!copy_buf) { goto error; } gst_buffer_unref (buf); buf = copy_buf; } } else if (block_buf != NULL) { if (!gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) { goto error; } } else { goto error; } return buf; error: if (buf) { gst_buffer_unref (buf); } return NULL; }
void vtCallback(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer ) { CMBlockBufferRef block = CMSampleBufferGetDataBuffer(sampleBuffer); CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, false); CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); bool isKeyframe = false; if(attachments != NULL) { CFDictionaryRef attachment; CFBooleanRef dependsOnOthers; attachment = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); dependsOnOthers = (CFBooleanRef)CFDictionaryGetValue(attachment, kCMSampleAttachmentKey_DependsOnOthers); isKeyframe = (dependsOnOthers == kCFBooleanFalse); } if(isKeyframe) { // Send the SPS and PPS. CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); size_t spsSize, ppsSize; size_t parmCount; const uint8_t* sps, *pps; CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sps, &spsSize, &parmCount, nullptr ); CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pps, &ppsSize, &parmCount, nullptr ); std::unique_ptr<uint8_t[]> sps_buf (new uint8_t[spsSize + 4]) ; std::unique_ptr<uint8_t[]> pps_buf (new uint8_t[ppsSize + 4]) ; memcpy(&sps_buf[4], sps, spsSize); spsSize+=4 ; memcpy(&sps_buf[0], &spsSize, 4); memcpy(&pps_buf[4], pps, ppsSize); ppsSize += 4; memcpy(&pps_buf[0], &ppsSize, 4); ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)sps_buf.get(),spsSize, pts.value); ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)pps_buf.get(),ppsSize, pts.value); } char* bufferData; size_t size; CMBlockBufferGetDataPointer(block, 0, NULL, &size, &bufferData); ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)bufferData,size, pts.value); }
static void h264_enc_output_cb(VTH264EncCtx *ctx, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) { MSQueue nalu_queue; CMBlockBufferRef block_buffer; size_t read_size, frame_size; bool_t is_keyframe = FALSE; mblk_t *nalu; int i; if(sampleBuffer == NULL || status != noErr) { ms_error("VideoToolbox: could not encode frame: error %d", status); return; } ms_mutex_lock(&ctx->mutex); if(ctx->is_configured) { ms_queue_init(&nalu_queue); block_buffer = CMSampleBufferGetDataBuffer(sampleBuffer); frame_size = CMBlockBufferGetDataLength(block_buffer); for(i=0, read_size=0; read_size < frame_size; i++) { char *chunk; size_t chunk_size; int idr_count; CMBlockBufferGetDataPointer(block_buffer, i, &chunk_size, NULL, &chunk); ms_h264_stream_to_nalus((uint8_t *)chunk, chunk_size, &nalu_queue, &idr_count); if(idr_count) is_keyframe = TRUE; read_size += chunk_size; } if(is_keyframe) { mblk_t *insertion_point = ms_queue_peek_first(&nalu_queue); const uint8_t *parameter_set; size_t parameter_set_size; size_t parameter_set_count; CMFormatDescriptionRef format_desc = CMSampleBufferGetFormatDescription(sampleBuffer); i=0; do { CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format_desc, i, ¶meter_set, ¶meter_set_size, ¶meter_set_count, NULL); nalu = allocb(parameter_set_size, 0); memcpy(nalu->b_wptr, parameter_set, parameter_set_size); nalu->b_wptr += parameter_set_size; ms_queue_insert(&nalu_queue, insertion_point, nalu); i++; } while(i < parameter_set_count); } rfc3984_pack(&ctx->packer_ctx, &nalu_queue, &ctx->queue, (uint32_t)(ctx->f->ticker->time * 90)); } ms_mutex_unlock(&ctx->mutex); }
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf, gboolean use_video_meta, gboolean map) { CVImageBufferRef image_buf; CMBlockBufferRef block_buf; GstCoreMediaMeta *meta; GstBuffer *buf; image_buf = CMSampleBufferGetImageBuffer (sample_buf); block_buf = CMSampleBufferGetDataBuffer (sample_buf); buf = gst_buffer_new (); meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf, gst_core_media_meta_get_info (), NULL); CFRetain (sample_buf); if (image_buf) CVBufferRetain (image_buf); if (block_buf) CFRetain (block_buf); meta->sample_buf = sample_buf; meta->image_buf = image_buf; meta->pixel_buf = NULL; meta->block_buf = block_buf; if (image_buf != NULL && CFGetTypeID (image_buf) == CVPixelBufferGetTypeID ()) { GstVideoInfo info; gboolean has_padding = FALSE; meta->pixel_buf = (CVPixelBufferRef) image_buf; if (!gst_video_info_init_from_pixel_buffer (&info, meta->pixel_buf)) { goto error; } if (!gst_core_media_buffer_wrap_pixel_buffer (buf, &info, meta->pixel_buf, &has_padding, map)) { goto error; } /* If the video meta API is not supported, remove padding by * copying the core media buffer to a system memory buffer */ if (map && has_padding && !use_video_meta) { GstBuffer *copy_buf; copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info); if (!copy_buf) { goto error; } gst_buffer_unref (buf); buf = copy_buf; } } else if (block_buf != NULL) { if (map && !gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) { goto error; } } else { goto error; } return buf; error: if (buf) { gst_buffer_unref (buf); } return NULL; }
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf) { CVImageBufferRef image_buf; CVPixelBufferRef pixel_buf; CMBlockBufferRef block_buf; gchar *data = NULL; UInt32 size; OSStatus status; GstBuffer *buf; GstCoreMediaMeta *meta; image_buf = CMSampleBufferGetImageBuffer (sample_buf); pixel_buf = NULL; block_buf = CMSampleBufferGetDataBuffer (sample_buf); if (image_buf != NULL && CFGetTypeID (image_buf) == CVPixelBufferGetTypeID ()) { pixel_buf = (CVPixelBufferRef) image_buf; if (CVPixelBufferLockBaseAddress (pixel_buf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } if (CVPixelBufferIsPlanar (pixel_buf)) { gint plane_count, plane_idx; data = CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0); size = 0; plane_count = CVPixelBufferGetPlaneCount (pixel_buf); for (plane_idx = 0; plane_idx != plane_count; plane_idx++) { size += CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) * CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx); } } else { data = CVPixelBufferGetBaseAddress (pixel_buf); size = CVPixelBufferGetBytesPerRow (pixel_buf) * CVPixelBufferGetHeight (pixel_buf); } } else if (block_buf != NULL) { status = CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data); if (status != noErr) goto error; size = CMBlockBufferGetDataLength (block_buf); } else { goto error; } buf = gst_buffer_new (); meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf, gst_core_media_meta_get_info (), NULL); CVBufferRetain ((CVBufferRef)sample_buf); meta->sample_buf = sample_buf; meta->image_buf = image_buf; meta->pixel_buf = pixel_buf; meta->block_buf = block_buf; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return buf; error: return NULL; }
int CVideoEncodeVt::CopyNalus(CMSampleBufferRef sampleBuffer, int aiLengthCodeSize, uint8_t* apData, int aiDataSize) { if(NULL == sampleBuffer || aiLengthCodeSize <= 0 || aiLengthCodeSize > 4 || NULL == apData || aiDataSize < 0) { return -1; } CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); if(NULL == blockBuffer) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMSampleBufferGetDataBuffer failed!"); assert(false); return -1; } int ret = 0; size_t nSrcOffset = 0; size_t nSrcSize = CMSampleBufferGetTotalSampleSize(sampleBuffer); size_t nSrcRemainSize = nSrcSize; size_t nDstRemainSize = aiDataSize; uint8_t pSizeBuf[4] = {0}; uint8_t* pDst = apData; while (nSrcRemainSize > 0) { ret = CMBlockBufferCopyDataBytes(blockBuffer, nSrcOffset, aiLengthCodeSize, pSizeBuf); if(0 != ret) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMBlockBufferCopyDataBytes failed!"); assert(false); return -1; } size_t nLength = 0; for(int i = 0; i < aiLengthCodeSize; i++) { nLength <<= 8; nLength |= pSizeBuf[i]; } size_t nCurrSrcLen = nLength + aiLengthCodeSize; size_t nCurrDstLen = nLength + sizeof(StartCode); if(nCurrSrcLen > nSrcRemainSize) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "src data bad!"); assert(false); return -1; } if(nCurrDstLen > nDstRemainSize) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "dst data too small!"); assert(false); return -1; } memcpy(pDst, StartCode, sizeof(StartCode)); ret = CMBlockBufferCopyDataBytes(blockBuffer, nSrcOffset + aiLengthCodeSize, nLength, pDst + sizeof(StartCode)); if(0 != ret) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMBlockBufferCopyDataBytes failed!"); assert(false); return -1; } nSrcOffset += nCurrSrcLen; pDst += nCurrDstLen; nSrcRemainSize -= nCurrSrcLen; nDstRemainSize -= nCurrDstLen; } return 0; }