int main(void) { int result; call_fun(add, &result, 3, 2); /* 最终调用了 add(&result, 3, 2) */ printf("result = %d\n", result); call_fun(sub, &result, 3, 2); /* 最终调用了 sub(&result, 3, 2) */ printf("result = %d\n", result); return 0; }
inline int overload_rep::call(lua_State* L, bool force_static_call) const { if (force_static_call) return call_fun_static(L); else return call_fun(L); }
/*************************************************** *int decode_main_frame(uint8_t *data, int size) * 解码处理主函数 *************************************************** */ int QDecodeStream::decode_main_frame(uint8_t *data, int size) /* * data: 待解码数据 * size: 数据大小 */ { int got_picture=0; int iRet; AVPicture pFrameRGB; av_init_packet(&decode_h264.avp); decode_h264.avp.data=data; decode_h264.avp.size=size; iRet = avcodec_decode_video2(decode_h264.codecCtx, \ decode_h264.frame, \ &got_picture, \ &decode_h264.avp); #if 1 if((first == 0) &&(got_picture)) { decode_h264.img_convert_ctx = sws_getContext(decode_h264.codecCtx->width, \ decode_h264.codecCtx->height, \ decode_h264.codecCtx->pix_fmt, \ play_win->image.width, \ play_win->image.height, \ AV_PIX_FMT_RGB24, \ SWS_BICUBIC, /* AV_PIX_FMT_RGB24 SWS_BICUBIC AV_PIX_FMT_YUV420P */ \ NULL, NULL, NULL); if(decode_h264.img_convert_ctx == NULL) return -1; first = 1; } if((got_picture) && (first == 1)) { avpicture_fill(&pFrameRGB, one_frame_buffer, // *play_win->image.buffer, //decode_h264.frame_buffer, AV_PIX_FMT_RGB24, play_win->image.width, play_win->image.height); sws_scale(decode_h264.img_convert_ctx, decode_h264.frame->data, decode_h264.frame->linesize, 0 , decode_h264.codecCtx->height, pFrameRGB.data, pFrameRGB.linesize); call_fun((char *)one_frame_buffer); } else printf("lost frame\n"); #endif return got_picture; }
result_type operator()(CArgs && ... cargs) { return call_fun(call_type(), typename gen<f_traits::parameter_count>::type(), std::forward<CArgs>(cargs)...); }