static void ffmpeg_scale_input(ffmpeg_t *handle, const struct ffemu_video_data *data) { // Attempt to preserve more information if we scale down. bool shrunk = handle->params.out_width < data->width || handle->params.out_height < data->height; if (handle->video.use_sws) { handle->video.sws = sws_getCachedContext(handle->video.sws, data->width, data->height, handle->video.in_pix_fmt, handle->params.out_width, handle->params.out_height, handle->video.pix_fmt, shrunk ? SWS_BILINEAR : SWS_POINT, NULL, NULL, NULL); int linesize = data->pitch; sws_scale(handle->video.sws, (const uint8_t* const*)&data->data, &linesize, 0, data->height, handle->video.conv_frame->data, handle->video.conv_frame->linesize); } else { if ((int)data->width != handle->video.scaler.in_width || (int)data->height != handle->video.scaler.in_height) { handle->video.scaler.in_width = data->width; handle->video.scaler.in_height = data->height; handle->video.scaler.in_stride = data->pitch; handle->video.scaler.scaler_type = shrunk ? SCALER_TYPE_BILINEAR : SCALER_TYPE_POINT; handle->video.scaler.out_width = handle->params.out_width; handle->video.scaler.out_height = handle->params.out_height; handle->video.scaler.out_stride = handle->video.conv_frame->linesize[0]; scaler_ctx_gen_filter(&handle->video.scaler); } scaler_ctx_scale(&handle->video.scaler, handle->video.conv_frame->data[0], data->data); } }
static void switch_set_texture_frame( void *data, const void *frame, bool rgb32, unsigned width, unsigned height, float alpha) { switch_video_t *sw = data; if ( !sw->menu_texture.pixels || sw->menu_texture.width != width || sw->menu_texture.height != height) { if (sw->menu_texture.pixels) free(sw->menu_texture.pixels); sw->menu_texture.pixels = malloc(width * height * (rgb32 ? 4 : 2)); if (!sw->menu_texture.pixels) { RARCH_ERR("failed to allocate buffer for menu texture\n"); return; } int xsf = 1280 / width; int ysf = 720 / height; int sf = xsf; if (ysf < sf) sf = ysf; sw->menu_texture.width = width; sw->menu_texture.height = height; sw->menu_texture.tgtw = width * sf; sw->menu_texture.tgth = height * sf; struct scaler_ctx *sctx = &sw->menu_texture.scaler; scaler_ctx_gen_reset(sctx); sctx->in_width = width; sctx->in_height = height; sctx->in_stride = width * (rgb32 ? 4 : 2); sctx->in_fmt = rgb32 ? SCALER_FMT_ARGB8888 : SCALER_FMT_RGB565; sctx->out_width = sw->menu_texture.tgtw; sctx->out_height = sw->menu_texture.tgth; sctx->out_stride = 1280 * 4; sctx->out_fmt = SCALER_FMT_ABGR8888; sctx->scaler_type = SCALER_TYPE_POINT; if (!scaler_ctx_gen_filter(sctx)) { RARCH_ERR("failed to generate scaler for menu texture\n"); return; } } memcpy(sw->menu_texture.pixels, frame, width * height * (rgb32 ? 4 : 2)); }
bool init_video_pixel_converter(unsigned size) { /* This function can be called multiple times * without deiniting first on consoles. */ deinit_pixel_converter(); /* If pixel format is not 0RGB1555, we don't need to do * any internal pixel conversion. */ if (video_driver_get_pixel_format() != RETRO_PIXEL_FORMAT_0RGB1555) return true; RARCH_WARN("0RGB1555 pixel format is deprecated, and will be slower. For 15/16-bit, RGB565 format is preferred.\n"); scaler_ptr = (video_pixel_scaler_t*)calloc(1, sizeof(*scaler_ptr)); if (!scaler_ptr) goto error; scaler_ptr->scaler = (struct scaler_ctx*)calloc(1, sizeof(*scaler_ptr->scaler)); if (!scaler_ptr->scaler) goto error; scaler_ptr->scaler->scaler_type = SCALER_TYPE_POINT; scaler_ptr->scaler->in_fmt = SCALER_FMT_0RGB1555; /* TODO: Pick either ARGB8888 or RGB565 depending on driver. */ scaler_ptr->scaler->out_fmt = SCALER_FMT_RGB565; if (!scaler_ctx_gen_filter(scaler_ptr->scaler)) goto error; scaler_ptr->scaler_out = calloc(sizeof(uint16_t), size * size); if (!scaler_ptr->scaler_out) goto error; return true; error: if (scaler_ptr->scaler_out) free(scaler_ptr->scaler_out); if (scaler_ptr->scaler) free(scaler_ptr->scaler); if (scaler_ptr) free(scaler_ptr); scaler_ptr = NULL; return false; }
static bool ffemu_push_video_thread(ffemu_t *handle, const struct ffemu_video_data *data) { if (!data->is_dupe) { if (data->width != handle->video.scaler.in_width || data->height != handle->video.scaler.in_height) { handle->video.scaler.in_width = data->width; handle->video.scaler.in_height = data->height; handle->video.scaler.in_stride = data->pitch; // Attempt to preserve more information if we scale down. bool shrunk = handle->params.out_width < data->width || handle->params.out_height < data->height; handle->video.scaler.scaler_type = shrunk ? SCALER_TYPE_BILINEAR : SCALER_TYPE_POINT; handle->video.scaler.out_width = handle->params.out_width; handle->video.scaler.out_height = handle->params.out_height; handle->video.scaler.out_stride = handle->video.conv_frame->linesize[0]; scaler_ctx_gen_filter(&handle->video.scaler); } scaler_ctx_scale(&handle->video.scaler, handle->video.conv_frame->data[0], data->data); } handle->video.conv_frame->pts = handle->video.frame_cnt; AVPacket pkt; if (!encode_video(handle, &pkt, handle->video.conv_frame)) return false; if (pkt.size) { if (av_interleaved_write_frame(handle->muxer.ctx, &pkt) < 0) return false; } handle->video.frame_cnt++; return true; }
static bool init_video_pixel_converter(unsigned size) { // This function can be called multiple times without deiniting first on consoles. deinit_pixel_converter(); if (g_extern.system.pix_fmt == RETRO_PIXEL_FORMAT_0RGB1555) { RARCH_WARN("0RGB1555 pixel format is deprecated, and will be slower. For 15/16-bit, RGB565 format is preferred.\n"); driver.scaler.scaler_type = SCALER_TYPE_POINT; driver.scaler.in_fmt = SCALER_FMT_0RGB1555; // TODO: Pick either ARGB8888 or RGB565 depending on driver ... driver.scaler.out_fmt = SCALER_FMT_RGB565; if (!scaler_ctx_gen_filter(&driver.scaler)) return false; driver.scaler_out = calloc(sizeof(uint16_t), size * size); } return true; }
static bool switch_frame(void *data, const void *frame, unsigned width, unsigned height, uint64_t frame_count, unsigned pitch, const char *msg, video_frame_info_t *video_info) { static uint64_t last_frame = 0; unsigned x, y; result_t r; int tgtw, tgth, centerx, centery; uint32_t *out_buffer = NULL; switch_video_t *sw = data; int xsf = 1280 / width; int ysf = 720 / height; int sf = xsf; if (ysf < sf) sf = ysf; tgtw = width * sf; tgth = height * sf; centerx = (1280-tgtw)/2; centery = (720-tgth)/2; // clear image to black for(y = 0; y < 720; y++) { for(x = 0; x < 1280; x++) { sw->image[y*1280+x] = 0xFF000000; } } if(width > 0 && height > 0) { if(sw->last_width != width || sw->last_height != height) { scaler_ctx_gen_reset(&sw->scaler); sw->scaler.in_width = width; sw->scaler.in_height = height; sw->scaler.in_stride = pitch; sw->scaler.in_fmt = sw->rgb32 ? SCALER_FMT_ARGB8888 : SCALER_FMT_RGB565; sw->scaler.out_width = tgtw; sw->scaler.out_height = tgth; sw->scaler.out_stride = 1280 * sizeof(uint32_t); sw->scaler.out_fmt = SCALER_FMT_ABGR8888; sw->scaler.scaler_type = SCALER_TYPE_POINT; if(!scaler_ctx_gen_filter(&sw->scaler)) { RARCH_ERR("failed to generate scaler for main image\n"); return false; } sw->last_width = width; sw->last_height = height; } scaler_ctx_scale(&sw->scaler, sw->image + (centery * 1280) + centerx, frame); } #if defined(HAVE_MENU) if (sw->menu_texture.enable) { menu_driver_frame(video_info); if (sw->menu_texture.pixels) { #if 0 if (sw->menu_texture.fullscreen) { #endif scaler_ctx_scale(&sw->menu_texture.scaler, sw->image + ((720-sw->menu_texture.tgth)/2)*1280 + ((1280-sw->menu_texture.tgtw)/2), sw->menu_texture.pixels); #if 0 } else { } #endif } } else if (video_info->statistics_show) { struct font_params *osd_params = (struct font_params*) &video_info->osd_stat_params; if (osd_params) { font_driver_render_msg(video_info, NULL, video_info->stat_text, (const struct font_params*)&video_info->osd_stat_params); } } #endif #if 0 if (frame_count > 6000) { display_finalize(); exit(0); } #endif if (msg && strlen(msg) > 0) RARCH_LOG("message: %s\n", msg); r = surface_dequeue_buffer(&sw->surface, &out_buffer); if (sw->vsync) switch_wait_vsync(sw); svcSleepThread(10000); if(r != RESULT_OK) { return true; // just skip the frame } gfx_slow_swizzling_blit(out_buffer, sw->image, 1280, 720, 0, 0); r = surface_queue_buffer(&sw->surface); if (r != RESULT_OK) return false; last_frame = svcGetSystemTick(); return true; }
static void *v4l_init(const char *device, uint64_t caps, unsigned width, unsigned height) { struct stat st; if ((caps & (1ULL << RETRO_CAMERA_BUFFER_RAW_FRAMEBUFFER)) == 0) { RARCH_ERR("video4linux2 returns raw framebuffers.\n"); return NULL; } video4linux_t *v4l = (video4linux_t*)calloc(1, sizeof(video4linux_t)); if (!v4l) return NULL; strlcpy(v4l->dev_name, device ? device : "/dev/video0", sizeof(v4l->dev_name)); v4l->width = width; v4l->height = height; v4l->ready = false; if (stat(v4l->dev_name, &st) == -1) { RARCH_ERR("Cannot identify '%s' : %d, %s\n", v4l->dev_name, errno, strerror(errno)); goto error; } if (!S_ISCHR(st.st_mode)) { RARCH_ERR("%s is no device.\n", v4l->dev_name); goto error; } v4l->fd = open(v4l->dev_name, O_RDWR | O_NONBLOCK, 0); if (v4l->fd == -1) { RARCH_ERR("Cannot open '%s': %d, %s\n", v4l->dev_name, errno, strerror(errno)); goto error; } if (!init_device(v4l)) goto error; v4l->buffer_output = (uint32_t*) malloc(v4l->width * v4l->height * sizeof(uint32_t)); if (!v4l->buffer_output) { RARCH_ERR("Failed to allocate output buffer.\n"); goto error; } v4l->scaler.in_width = v4l->scaler.out_width = v4l->width; v4l->scaler.in_height = v4l->scaler.out_height = v4l->height; v4l->scaler.in_fmt = SCALER_FMT_YUYV; v4l->scaler.out_fmt = SCALER_FMT_ARGB8888; v4l->scaler.in_stride = v4l->pitch; v4l->scaler.out_stride = v4l->width * 4; if (!scaler_ctx_gen_filter(&v4l->scaler)) { RARCH_ERR("Failed to create scaler.\n"); goto error; } return v4l; error: RARCH_ERR("V4L2: Failed to initialize camera.\n"); v4l_free(v4l); return NULL; }