obs_encoder_t *obs_audio_encoder_create(const char *id, const char *name, obs_data_t *settings, size_t mixer_idx, obs_data_t *hotkey_data) { if (!name || !id) return NULL; return create_encoder(id, OBS_ENCODER_AUDIO, name, settings, mixer_idx, hotkey_data); }
obs_encoder_t *obs_video_encoder_create(const char *id, const char *name, obs_data_t *settings, obs_data_t *hotkey_data) { if (!name || !id) return NULL; return create_encoder(id, OBS_ENCODER_VIDEO, name, settings, 0, hotkey_data); }
int main(int argc, char **argv) { FILE *dest = NULL; image frame_image; evx1_encoder *encoder; bit_stream cairo_stream; int32 frame_index = 0; int32 encoded_size = 0; int32 content_width = 0; int32 content_height = 0; int32 content_format = 0; EVX_MEDIA_FILE_HEADER header; evx_msg("Copyright (c) 2010-2014 Joe Bertolami. All Right Reserved."); if (4 != argc) { // No need to get fancy. evx_msg("Required syntax: convert <input_filename> quality <output_filename>"); return 0; } dest = fopen(argv[3], "wb"); if (!dest) { evx_msg("Error opening dest file %s", argv[3]); return 0; } ffmpeg_initialize(); if (0 != ffmpeg_play_file(argv[1], (int*) &content_format, (int*) &content_width, (int*) &content_height)) { evx_msg("Failed to open content file %s", argv[1]); ffmpeg_deinitialize(); fclose(dest); return 0; } create_encoder(&encoder); encoder->set_quality(atoi(argv[2])); create_image(EVX_IMAGE_FORMAT_R8G8B8, content_width, content_height, &frame_image); cairo_stream.resize_capacity((4*EVX_MB) << 3); _prepare_evx_header(&header, content_width, content_height); fwrite(&header, sizeof(header), 1, dest); while (ffmpeg_refresh(&encoded_size) >= 0) { ffmpeg_copy_current_frame(frame_image.query_data(), frame_image.query_row_pitch()); // encode using cairo and then flush the frame to disk. encoder->encode(frame_image.query_data(), frame_image.query_width(), frame_image.query_height(), &cairo_stream); EVX_MEDIA_FRAME_HEADER frame_header; frame_header.magic[0] = 'E'; frame_header.magic[0] = 'V'; frame_header.magic[0] = 'F'; frame_header.magic[0] = 'H'; frame_header.frame_index = frame_index++; frame_header.frame_size = cairo_stream.query_byte_occupancy(); frame_header.header_size = sizeof(frame_header); fwrite(&frame_header, sizeof(frame_header), 1, dest); fwrite(cairo_stream.query_data(), cairo_stream.query_byte_occupancy(), 1, dest); cairo_stream.empty(); if (0 == (frame_index % 10)) { evx_msg("Processing frame %i", frame_index); } } destroy_encoder(encoder); ffmpeg_deinitialize(); fclose(dest); return 0; }
static int omap_modeset_init(struct drm_device *dev) { const struct omap_drm_platform_data *pdata = dev->dev->platform_data; struct omap_kms_platform_data *kms_pdata = NULL; struct omap_drm_private *priv = dev->dev_private; struct omap_dss_device *dssdev = NULL; int i, j; unsigned int connected_connectors = 0; drm_mode_config_init(dev); if (pdata && pdata->kms_pdata) { kms_pdata = pdata->kms_pdata; /* if platform data is provided by the board file, use it to * control which overlays, managers, and devices we own. */ for (i = 0; i < kms_pdata->mgr_cnt; i++) { struct omap_overlay_manager *mgr = omap_dss_get_overlay_manager( kms_pdata->mgr_ids[i]); create_encoder(dev, mgr); } for (i = 0; i < kms_pdata->dev_cnt; i++) { struct omap_dss_device *dssdev = omap_dss_find_device( (void *)kms_pdata->dev_names[i], match_dev_name); if (!dssdev) { dev_warn(dev->dev, "no such dssdev: %s\n", kms_pdata->dev_names[i]); continue; } create_connector(dev, dssdev); } connected_connectors = detect_connectors(dev); j = 0; for (i = 0; i < kms_pdata->ovl_cnt; i++) { struct omap_overlay *ovl = omap_dss_get_overlay(kms_pdata->ovl_ids[i]); create_crtc(dev, ovl, &j, connected_connectors); } for (i = 0; i < kms_pdata->pln_cnt; i++) { struct omap_overlay *ovl = omap_dss_get_overlay(kms_pdata->pln_ids[i]); create_plane(dev, ovl, (1 << priv->num_crtcs) - 1); } } else { /* otherwise just grab up to CONFIG_DRM_OMAP_NUM_CRTCS and try * to make educated guesses about everything else */ int max_overlays = min(omap_dss_get_num_overlays(), num_crtc); for (i = 0; i < omap_dss_get_num_overlay_managers(); i++) { create_encoder(dev, omap_dss_get_overlay_manager(i)); } for_each_dss_dev(dssdev) { create_connector(dev, dssdev); } connected_connectors = detect_connectors(dev); j = 0; for (i = 0; i < max_overlays; i++) { create_crtc(dev, omap_dss_get_overlay(i), &j, connected_connectors); } /* use any remaining overlays as drm planes */ for (; i < omap_dss_get_num_overlays(); i++) { struct omap_overlay *ovl = omap_dss_get_overlay(i); create_plane(dev, ovl, (1 << priv->num_crtcs) - 1); } } /* for now keep the mapping of CRTCs and encoders static.. */ for (i = 0; i < priv->num_encoders; i++) { struct drm_encoder *encoder = priv->encoders[i]; struct omap_overlay_manager *mgr = omap_encoder_get_manager(encoder); encoder->possible_crtcs = (1 << priv->num_crtcs) - 1; DBG("%s: possible_crtcs=%08x", mgr->name, encoder->possible_crtcs); } dump_video_chains(); dev->mode_config.min_width = 32; dev->mode_config.min_height = 32; /* note: eventually will need some cpu_is_omapXYZ() type stuff here * to fill in these limits properly on different OMAP generations.. */ dev->mode_config.max_width = 2048; dev->mode_config.max_height = 2048; dev->mode_config.funcs = &omap_mode_config_funcs; return 0; }
static GstElement * create_audio_payloader (GstRTSPCamMediaFactory *factory, GstElement *bin, gint payloader_number) { GstElement *encoder; GstElement *pay; GstElement *audiosrc; GstElement *audioconvert; GstElement *audiorate; GstElement *capsfilter; gchar *audio_formats[] = {"audio/x-raw-float", "audio/x-raw-int", NULL}; GstCaps *audio_caps; gchar *capss; gboolean linked; int i; encoder = create_encoder (factory, factory->audio_codec); if (encoder == NULL) { GST_WARNING_OBJECT (factory, "couldn't create encoder "); return NULL; } pay = create_payloader (factory, factory->audio_codec, payloader_number, AUDIO_PAYLOAD_TYPE); if (pay == NULL) { GST_WARNING_OBJECT (factory, "couldn't create payloader "); gst_object_unref (encoder); return NULL; } audiosrc = gst_element_factory_make(factory->audio_source, NULL); if (audiosrc == NULL) { GST_WARNING_OBJECT (factory, "couldn't create audio source"); gst_object_unref (encoder); gst_object_unref (pay); return NULL; } else if (!g_strcmp0 (factory->audio_source, "jackaudiosrc")) g_object_set(audiosrc, "connect", 2, NULL); audioconvert = gst_element_factory_make ("audioconvert", NULL); audiorate = gst_element_factory_make ("audiorate", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); audio_caps = gst_caps_new_empty (); for (i = 0; audio_formats[i] != NULL; i++) { GstStructure *structure = gst_structure_new (audio_formats[i], NULL); if (factory->audio_channels != -1) gst_structure_set (structure, "channels", G_TYPE_INT, factory->audio_channels, NULL); gst_caps_append_structure (audio_caps, structure); } capss = gst_caps_to_string (audio_caps); GST_INFO_OBJECT (factory, "setting audio caps %s", capss); g_free (capss); g_object_set (capsfilter, "caps", audio_caps, NULL); gst_caps_unref (audio_caps); gst_bin_add_many (GST_BIN (bin), audiosrc, capsfilter, audioconvert, audiorate, encoder, pay, NULL); linked = gst_element_link_many (audiosrc, capsfilter, audioconvert, audiorate, encoder, pay, NULL); if (!linked) { gst_object_unref (bin); return NULL; } return pay; }
static GstElement * create_video_payloader (GstRTSPCamMediaFactory *factory, GstElement *bin, gint payloader_number) { GstElement *encoder; GstElement *pay; GstElement *videosrc; GstElement *queue, *ffmpegcolorspace, *videoscale, *videorate; GstElement *capsfilter; gchar *image_formats[] = {"video/x-raw-rgb", "video/x-raw-yuv", "video/x-raw-gray", NULL}; GstCaps *video_caps; gchar *capss; gboolean linked; int i; encoder = create_encoder (factory, factory->video_codec); if (encoder == NULL) return NULL; if (factory->video_bitrate != -1) g_object_set (encoder, "bitrate", factory->video_bitrate, NULL); pay = create_payloader (factory, factory->video_codec, payloader_number, VIDEO_PAYLOAD_TYPE); if (pay == NULL) return NULL; videosrc = gst_element_factory_make (factory->video_source, NULL); if (!g_strcmp0 (factory->video_source, "videotestsrc")) g_object_set (videosrc, "is-live", TRUE, NULL); else if (factory->video_device) /* don't set device for testsrc */ g_object_set (videosrc, "device", factory->video_device, NULL); queue = gst_element_factory_make ("queue", NULL); ffmpegcolorspace = gst_element_factory_make ("ffmpegcolorspace", NULL); videorate = gst_element_factory_make ("videorate", NULL); videoscale = gst_element_factory_make ("videoscale", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); video_caps = gst_caps_new_empty (); for (i = 0; image_formats[i] != NULL; i++) { GstStructure *structure = gst_structure_new (image_formats[i], NULL); if (factory->video_width != -1) gst_structure_set (structure, "width", G_TYPE_INT, factory->video_width, NULL); if (factory->video_height != -1) gst_structure_set (structure, "height", G_TYPE_INT, factory->video_height, NULL); if (factory->fps_n != 0 && factory->fps_d != 0) gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, factory->fps_n, factory->fps_d, NULL); gst_caps_append_structure (video_caps, structure); } capss = gst_caps_to_string (video_caps); GST_INFO_OBJECT (factory, "setting video caps %s", capss); g_free (capss); g_object_set (capsfilter, "caps", video_caps, NULL); gst_caps_unref (video_caps); gst_bin_add_many (GST_BIN (bin), videosrc, queue, ffmpegcolorspace, videoscale, videorate, capsfilter, encoder, pay, NULL); linked = gst_element_link_many (videosrc, queue, videorate, ffmpegcolorspace, videoscale, capsfilter, encoder, pay, NULL); g_assert(linked); return pay; }
obs_encoder_t obs_audio_encoder_create(const char *id, const char *name, obs_data_t settings) { if (!name || !id) return NULL; return create_encoder(id, OBS_ENCODER_AUDIO, name, settings); }
static int omap_modeset_init(struct drm_device *dev) { const struct omap_gpu_platform_data *pdata = dev->dev->platform_data; struct omap_gpu_private *priv = dev->dev_private; struct omap_dss_device *dssdev = NULL; int i, j; unsigned int connected_connectors = 0; /* create encoders for each manager */ int create_encoder(int i) { struct omap_overlay_manager *mgr = omap_dss_get_overlay_manager(i); struct drm_encoder *encoder = omap_encoder_init(dev, mgr); if (!encoder) { dev_err(dev->dev, "could not create encoder\n"); return -ENOMEM; } priv->encoders[priv->num_encoders++] = encoder; return 0; } /* create connectors for each display device */ int create_connector(struct omap_dss_device *dssdev) { static struct notifier_block *notifier; struct drm_connector *connector; if (!dssdev->driver) { dev_warn(dev->dev, "%s has no driver.. skipping it\n", dssdev->name); return 0; } if (!(dssdev->driver->get_timings || dssdev->driver->get_edid)) { dev_warn(dev->dev, "%s driver does not support " "get_timings or get_edid.. skipping it!\n", dssdev->name); return 0; } connector = omap_connector_init(dev, get_connector_type(dssdev->type), dssdev); if (!connector) { dev_err(dev->dev, "could not create connector\n"); return -ENOMEM; } /* track what is already connected.. rather than looping thru all * connectors twice later, first for connected then for remainder * (which could be a race condition if connected status changes) */ if (omap_connector_detect (connector) == connector_status_connected) { connected_connectors |= (1 << priv->num_connectors); } priv->connectors[priv->num_connectors++] = connector; notifier = kzalloc(sizeof(struct notifier_block), GFP_KERNEL); notifier->notifier_call = omap_gpu_notifier; omap_dss_add_notify(dssdev, notifier); for (j = 0; j < priv->num_encoders; j++) { struct omap_overlay_manager *mgr = omap_encoder_get_manager(priv->encoders[j]); if (mgr->device == dssdev) { drm_mode_connector_attach_encoder(connector, priv->encoders[j]); } } return 0; } /* create up to max_overlays CRTCs mapping to overlays.. by default, * connect the overlays to different managers/encoders, giving priority * to encoders connected to connectors with a detected connection */ int create_crtc(int i) { struct omap_overlay *ovl = omap_dss_get_overlay(i); struct omap_overlay_manager *mgr = NULL; struct drm_crtc *crtc; if (ovl->manager) { DBG("disconnecting %s from %s", ovl->name, ovl->manager->name); ovl->unset_manager(ovl); } /* find next best connector... ones with detected connection first */ while (j < priv->num_connectors && !mgr) { if (connected_connectors & (1 << j)) { struct drm_encoder * encoder = omap_connector_attached_encoder (priv->connectors[j]); if (encoder) { mgr = omap_encoder_get_manager (encoder); } } j++; } /* if we couldn't find another connected connector, lets start looking * at the unconnected connectors: */ while (j < 2 * priv->num_connectors && !mgr) { int idx = j - priv->num_connectors; if (!(connected_connectors & (1 << idx))) { struct drm_encoder * encoder = omap_connector_attached_encoder (priv->connectors[idx]); if (encoder) { mgr = omap_encoder_get_manager (encoder); } } j++; } if (mgr) { DBG("connecting %s to %s", ovl->name, mgr->name); ovl->set_manager(ovl, mgr); } crtc = omap_crtc_init(dev, ovl); if (!crtc) { dev_err(dev->dev, "could not create CRTC\n"); return -ENOMEM; } priv->crtcs[priv->num_crtcs++] = crtc; return 0; } drm_mode_config_init(dev); if (pdata) { /* if platform data is provided by the board file, use it to control * which overlays, managers, and devices we own. */ for (i = 0; i < pdata->mgr_cnt; i++) { if (create_encoder(pdata->mgr_ids[i])) { goto fail; } } for (i = 0; i < pdata->dev_cnt; i++) { int m(struct omap_dss_device *dssdev, void *data) { return ! strcmp(dssdev->name, data); } struct omap_dss_device *dssdev = omap_dss_find_device((void *)pdata->dev_names[i], m); if (!dssdev) { dev_warn(dev->dev, "no such dssdev: %s\n", pdata->dev_names[i]); continue; } if (create_connector(dssdev)) { goto fail; } } j = 0; for (i = 0; i < pdata->ovl_cnt; i++) { if (create_crtc(pdata->ovl_ids[i])) { goto fail; } } } else {