bool ImageConverterFF::convert(const quint8 *const src[], const int srcStride[], quint8 *const dst[], const int dstStride[]) { DPTR_D(ImageConverterFF); //Check out dimension. equals to in dimension if not setted. TODO: move to another common func if (d.w_out == 0 || d.h_out == 0) { if (d.w_in == 0 || d.h_in == 0) return false; setOutSize(d.w_in, d.h_in); } //TODO: move those code to prepare() d.sws_ctx = sws_getCachedContext(d.sws_ctx , d.w_in, d.h_in, (AVPixelFormat)d.fmt_in , d.w_out, d.h_out, (AVPixelFormat)d.fmt_out , (d.w_in == d.w_out && d.h_in == d.h_out) ? SWS_POINT : SWS_FAST_BILINEAR //SWS_BICUBIC , NULL, NULL, NULL ); //int64_t flags = SWS_CPU_CAPS_SSE2 | SWS_CPU_CAPS_MMX | SWS_CPU_CAPS_MMX2; //av_opt_set_int(d.sws_ctx, "sws_flags", flags, 0); if (!d.sws_ctx) return false; d.setupColorspaceDetails(false); int result_h = sws_scale(d.sws_ctx, src, srcStride, 0, d.h_in, dst, dstStride); if (result_h != d.h_out) { qDebug("convert failed: %d, %d", result_h, d.h_out); return false; } Q_UNUSED(result_h); for (int i = 0; i < d.pitchs.size(); ++i) { d.bits[i] = dst[i]; d.pitchs[i] = dstStride[i]; } return true; }
bool ImageConverterFF::convert(const quint8 *const srcSlice[], const int srcStride[]) { DPTR_D(ImageConverterFF); //Check out dimension. equals to in dimension if not setted. TODO: move to another common func if (d.w_out == 0 || d.h_out == 0) { if (d.w_in == 0 || d.h_in == 0) return false; setOutSize(d.w_in, d.h_in); } //TODO: move those code to prepare() d.sws_ctx = sws_getCachedContext(d.sws_ctx , d.w_in, d.h_in, (AVPixelFormat)d.fmt_in , d.w_out, d.h_out, (AVPixelFormat)d.fmt_out , (d.w_in == d.w_out && d.h_in == d.h_out) ? SWS_POINT : SWS_FAST_BILINEAR //SWS_BICUBIC , NULL, NULL, NULL ); //int64_t flags = SWS_CPU_CAPS_SSE2 | SWS_CPU_CAPS_MMX | SWS_CPU_CAPS_MMX2; //av_opt_set_int(d.sws_ctx, "sws_flags", flags, 0); if (!d.sws_ctx) return false; setupColorspaceDetails(); #if PREPAREDATA_NO_PICTURE //for YUV420 <=> RGB #if 0 struct { uint8_t *data[4]; //AV_NUM_DATA_POINTERS int linesize[4]; //AV_NUM_DATA_POINTERS } #else AVPicture #endif pic_in, pic_out; if ((AVPixelFormat)fmt_in == PIX_FMT_YUV420P) { pic_in.data[0] = (uint8_t*)in; pic_in.data[2] = (uint8_t*)pic_in.data[0] + (w_in * h_in); pic_in.data[1] = (uint8_t*)pic_in.data[2] + (w_in * h_in) / 4; pic_in.linesize[0] = w_in; pic_in.linesize[1] = w_in / 2; pic_in.linesize[2] = w_in / 2; //pic_in.linesize[3] = 0; //not used } else { pic_in.data[0] = (uint8_t*)in; pic_in.linesize[0] = w_in * 4; //TODO: not 0 } if ((AVPixelFormat)fmt_out == PIX_FMT_YUV420P) { pic_out.data[0] = (uint8_t*)out; pic_out.data[2] = (uint8_t*)pic_out.data[0] + (w_out * h_in); pic_out.data[1] = (uint8_t*)pic_out.data[2] + (w_out * h_in) / 4; //pic_out.data[3] = (uint8_t*)pic_out.data[0] - 1; pic_out.linesize[0] = w_out; pic_out.linesize[1] = w_out / 2; pic_out.linesize[2] = w_out / 2; //3 not used } else { pic_out.data[0] = (uint8_t*)out; pic_out.linesize[0] = w_out * 4; } #endif //PREPAREDATA_NO_PICTURE int result_h = sws_scale(d.sws_ctx, srcSlice, srcStride, 0, d.h_in, d.picture.data, d.picture.linesize); if (result_h != d.h_out) { qDebug("convert failed: %d, %d", result_h, d.h_out); return false; } #if 0 if (isInterlaced()) { //deprecated avpicture_deinterlace(&d.picture, &d.picture, (AVPixelFormat)d.fmt_out, d.w_out, d.h_out); } #endif //0 Q_UNUSED(result_h); return true; }