static obs_properties_t *scale_filter_properties(void *data) { obs_properties_t *props = obs_properties_create(); struct obs_video_info ovi; obs_property_t *p; uint32_t cx; uint32_t cy; struct { int cx; int cy; } downscales[NUM_DOWNSCALES]; /* ----------------- */ obs_get_video_info(&ovi); cx = ovi.base_width; cy = ovi.base_height; for (size_t i = 0; i < NUM_DOWNSCALES; i++) { downscales[i].cx = (int)((double)cx / downscale_vals[i]); downscales[i].cy = (int)((double)cy / downscale_vals[i]); } p = obs_properties_add_list(props, S_SAMPLING, T_SAMPLING, OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_set_modified_callback(p, sampling_modified); obs_property_list_add_string(p, T_SAMPLING_POINT, S_SAMPLING_POINT); obs_property_list_add_string(p, T_SAMPLING_BILINEAR, S_SAMPLING_BILINEAR); obs_property_list_add_string(p, T_SAMPLING_BICUBIC, S_SAMPLING_BICUBIC); obs_property_list_add_string(p, T_SAMPLING_LANCZOS, S_SAMPLING_LANCZOS); /* ----------------- */ p = obs_properties_add_list(props, S_RESOLUTION, T_RESOLUTION, OBS_COMBO_TYPE_EDITABLE, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, T_NONE, T_NONE); obs_property_list_add_string(p, T_BASE, T_BASE); for (size_t i = 0; i < NUM_ASPECTS; i++) obs_property_list_add_string(p, aspects[i], aspects[i]); for (size_t i = 0; i < NUM_DOWNSCALES; i++) { char str[32]; snprintf(str, 32, "%dx%d", downscales[i].cx, downscales[i].cy); obs_property_list_add_string(p, str, str); } obs_properties_add_bool(props, S_UNDISTORT, T_UNDISTORT); /* ----------------- */ UNUSED_PARAMETER(data); return props; }
void OBSProjector::OBSRender(void *data, uint32_t cx, uint32_t cy) { OBSProjector *window = reinterpret_cast<OBSProjector*>(data); if (!window->ready) return; OBSBasic *main = reinterpret_cast<OBSBasic*>(App()->GetMainWindow()); OBSSource source = window->source; uint32_t targetCX; uint32_t targetCY; int x, y; int newCX, newCY; float scale; if (source) { targetCX = std::max(obs_source_get_width(source), 1u); targetCY = std::max(obs_source_get_height(source), 1u); } else { struct obs_video_info ovi; obs_get_video_info(&ovi); targetCX = ovi.base_width; targetCY = ovi.base_height; } GetScaleAndCenterPos(targetCX, targetCY, cx, cy, x, y, scale); newCX = int(scale * float(targetCX)); newCY = int(scale * float(targetCY)); startRegion(x, y, newCX, newCY, 0.0f, float(targetCX), 0.0f, float(targetCY)); if (window->type == ProjectorType::Preview && main->IsPreviewProgramMode()) { OBSSource curSource = main->GetCurrentSceneSource(); if (source != curSource) { obs_source_dec_showing(source); obs_source_inc_showing(curSource); source = curSource; window->source = source; } } if (source) obs_source_video_render(source); else obs_render_main_texture(); endRegion(); }
static inline vec2 GetOBSScreenSize() { obs_video_info ovi; vec2 size; vec2_zero(&size); if (obs_get_video_info(&ovi)) { size.x = float(ovi.base_width); size.y = float(ovi.base_height); } return size; }
static void scale_filter_update(void *data, obs_data_t *settings) { struct scale_filter_data *filter = data; int ret; const char *res_str = obs_data_get_string(settings, S_RESOLUTION); const char *sampling = obs_data_get_string(settings, S_SAMPLING); filter->valid = true; filter->base_canvas_resolution = false; if (strcmp(res_str, T_BASE) == 0) { struct obs_video_info ovi; obs_get_video_info(&ovi); filter->aspect_ratio_only = false; filter->base_canvas_resolution = true; filter->cx_in = ovi.base_width; filter->cy_in = ovi.base_height; } else { ret = sscanf(res_str, "%dx%d", &filter->cx_in, &filter->cy_in); if (ret == 2) { filter->aspect_ratio_only = false; } else { ret = sscanf(res_str, "%d:%d", &filter->cx_in, &filter->cy_in); if (ret != 2) { filter->valid = false; return; } filter->aspect_ratio_only = true; } } if (astrcmpi(sampling, S_SAMPLING_POINT) == 0) { filter->sampling = OBS_SCALE_POINT; } else if (astrcmpi(sampling, S_SAMPLING_BILINEAR) == 0) { filter->sampling = OBS_SCALE_BILINEAR; } else if (astrcmpi(sampling, S_SAMPLING_LANCZOS) == 0) { filter->sampling = OBS_SCALE_LANCZOS; } else { /* S_SAMPLING_BICUBIC */ filter->sampling = OBS_SCALE_BICUBIC; } filter->undistort = obs_data_get_bool(settings, S_UNDISTORT); }
bool AutoConfigVideoPage::validatePage() { int encRes = ui->canvasRes->currentData().toInt(); wiz->baseResolutionCX = encRes >> 16; wiz->baseResolutionCY = encRes & 0xFFFF; wiz->fpsType = (AutoConfig::FPSType)ui->fps->currentData().toInt(); obs_video_info ovi; obs_get_video_info(&ovi); switch (wiz->fpsType) { case AutoConfig::FPSType::PreferHighFPS: wiz->specificFPSNum = 0; wiz->specificFPSDen = 0; wiz->preferHighFPS = true; break; case AutoConfig::FPSType::PreferHighRes: wiz->specificFPSNum = 0; wiz->specificFPSDen = 0; wiz->preferHighFPS = false; break; case AutoConfig::FPSType::UseCurrent: wiz->specificFPSNum = ovi.fps_num; wiz->specificFPSDen = ovi.fps_den; wiz->preferHighFPS = false; break; case AutoConfig::FPSType::fps30: wiz->specificFPSNum = 30; wiz->specificFPSDen = 1; wiz->preferHighFPS = false; break; case AutoConfig::FPSType::fps60: wiz->specificFPSNum = 60; wiz->specificFPSDen = 1; wiz->preferHighFPS = false; break; } return true; }
void OBSProjector::OBSRender(void *data, uint32_t cx, uint32_t cy) { OBSProjector *window = reinterpret_cast<OBSProjector*>(data); uint32_t targetCX; uint32_t targetCY; int x, y; int newCX, newCY; float scale; if (window->source) { targetCX = std::max(obs_source_get_width(window->source), 1u); targetCY = std::max(obs_source_get_height(window->source), 1u); } else { struct obs_video_info ovi; obs_get_video_info(&ovi); targetCX = ovi.base_width; targetCY = ovi.base_height; } GetScaleAndCenterPos(targetCX, targetCY, cx, cy, x, y, scale); newCX = int(scale * float(targetCX)); newCY = int(scale * float(targetCY)); gs_viewport_push(); gs_projection_push(); gs_ortho(0.0f, float(targetCX), 0.0f, float(targetCY), -100.0f, 100.0f); gs_set_viewport(x, y, newCX, newCY); if (window->source) obs_source_video_render(window->source); else obs_render_main_view(); gs_projection_pop(); gs_viewport_pop(); }
static void scale_filter_tick(void *data, float seconds) { struct scale_filter_data *filter = data; enum obs_base_effect type; obs_source_t *target; bool lower_than_2x; double cx_f; double cy_f; int cx; int cy; if (filter->base_canvas_resolution) { struct obs_video_info ovi; obs_get_video_info(&ovi); filter->cx_in = ovi.base_width; filter->cy_in = ovi.base_height; } target = obs_filter_get_target(filter->context); filter->cx_out = 0; filter->cy_out = 0; filter->target_valid = !!target; if (!filter->target_valid) return; cx = obs_source_get_base_width(target); cy = obs_source_get_base_height(target); if (!cx || !cy) { filter->target_valid = false; return; } filter->cx_out = cx; filter->cy_out = cy; if (!filter->valid) return; /* ------------------------- */ cx_f = (double)cx; cy_f = (double)cy; double old_aspect = cx_f / cy_f; double new_aspect = (double)filter->cx_in / (double)filter->cy_in; if (filter->aspect_ratio_only) { if (fabs(old_aspect - new_aspect) <= EPSILON) { filter->target_valid = false; return; } else { if (new_aspect > old_aspect) { filter->cx_out = (int)(cy_f * new_aspect); filter->cy_out = cy; } else { filter->cx_out = cx; filter->cy_out = (int)(cx_f / new_aspect); } } } else { filter->cx_out = filter->cx_in; filter->cy_out = filter->cy_in; } vec2_set(&filter->dimension_i, 1.0f / (float)cx, 1.0f / (float)cy); if (filter->undistort) { filter->undistort_factor = new_aspect / old_aspect; } else { filter->undistort_factor = 1.0; } /* ------------------------- */ lower_than_2x = filter->cx_out < cx / 2 || filter->cy_out < cy / 2; if (lower_than_2x && filter->sampling != OBS_SCALE_POINT) { type = OBS_EFFECT_BILINEAR_LOWRES; } else { switch (filter->sampling) { default: case OBS_SCALE_POINT: case OBS_SCALE_BILINEAR: type = OBS_EFFECT_DEFAULT; break; case OBS_SCALE_BICUBIC: type = OBS_EFFECT_BICUBIC; break; case OBS_SCALE_LANCZOS: type = OBS_EFFECT_LANCZOS; break; } } filter->effect = obs_get_base_effect(type); filter->image_param = gs_effect_get_param_by_name(filter->effect, "image"); if (type != OBS_EFFECT_DEFAULT) { filter->dimension_param = gs_effect_get_param_by_name( filter->effect, "base_dimension_i"); } else { filter->dimension_param = NULL; } if (type == OBS_EFFECT_BICUBIC || type == OBS_EFFECT_LANCZOS) { filter->undistort_factor_param = gs_effect_get_param_by_name( filter->effect, "undistort_factor"); } else { filter->undistort_factor_param = NULL; } UNUSED_PARAMETER(seconds); }
void OBSProjector::UpdateMultiview() { multiviewScenes.clear(); multiviewLabels.clear(); struct obs_video_info ovi; obs_get_video_info(&ovi); uint32_t w = ovi.base_width; uint32_t h = ovi.base_height; fw = float(w); fh = float(h); ratio = fw / fh; struct obs_frontend_source_list scenes = {}; obs_frontend_get_scenes(&scenes); multiviewLabels.emplace_back(CreateLabel(Str("StudioMode.Preview"), h / 2)); multiviewLabels.emplace_back(CreateLabel(Str("StudioMode.Program"), h / 2)); multiviewLayout = static_cast<MultiviewLayout>(config_get_int( GetGlobalConfig(), "BasicWindow", "MultiviewLayout")); drawLabel = config_get_bool(GetGlobalConfig(), "BasicWindow", "MultiviewDrawNames"); drawSafeArea = config_get_bool(GetGlobalConfig(), "BasicWindow", "MultiviewDrawAreas"); mouseSwitching = config_get_bool(GetGlobalConfig(), "BasicWindow", "MultiviewMouseSwitch"); transitionOnDoubleClick = config_get_bool(GetGlobalConfig(), "BasicWindow", "TransitionOnDoubleClick"); switch(multiviewLayout) { case MultiviewLayout::HORIZONTAL_TOP_24_SCENES: pvwprgCX = fw / 3; pvwprgCY = fh / 3; maxSrcs = 24; break; default: pvwprgCX = fw / 2; pvwprgCY = fh / 2; maxSrcs = 8; } ppiCX = pvwprgCX - thicknessx2; ppiCY = pvwprgCY - thicknessx2; ppiScaleX = (pvwprgCX - thicknessx2) / fw; ppiScaleY = (pvwprgCY - thicknessx2) / fh; scenesCX = pvwprgCX / 2; scenesCY = pvwprgCY / 2; siCX = scenesCX - thicknessx2; siCY = scenesCY - thicknessx2; siScaleX = (scenesCX - thicknessx2) / fw; siScaleY = (scenesCY - thicknessx2) / fh; numSrcs = 0; size_t i = 0; while (i < scenes.sources.num && numSrcs < maxSrcs) { obs_source_t *src = scenes.sources.array[i++]; OBSData data = obs_source_get_private_settings(src); obs_data_release(data); obs_data_set_default_bool(data, "show_in_multiview", true); if (!obs_data_get_bool(data, "show_in_multiview")) continue; // We have a displayable source. numSrcs++; multiviewScenes.emplace_back(OBSGetWeakRef(src)); obs_source_inc_showing(src); std::string name = std::to_string(numSrcs) + " - " + obs_source_get_name(src); multiviewLabels.emplace_back(CreateLabel(name.c_str(), h / 3)); } obs_frontend_source_list_free(&scenes); }
AutoConfig::AutoConfig(QWidget *parent) : QWizard(parent) { EnableThreadedMessageBoxes(true); calldata_t cd = {0}; calldata_set_int(&cd, "seconds", 5); proc_handler_t *ph = obs_get_proc_handler(); proc_handler_call(ph, "twitch_ingests_refresh", &cd); calldata_free(&cd); OBSBasic *main = reinterpret_cast<OBSBasic*>(parent); main->EnableOutputs(false); installEventFilter(CreateShortcutFilter()); std::string serviceType; GetServiceInfo(serviceType, serviceName, server, key); #ifdef _WIN32 setWizardStyle(QWizard::ModernStyle); #endif streamPage = new AutoConfigStreamPage(); setPage(StartPage, new AutoConfigStartPage()); setPage(VideoPage, new AutoConfigVideoPage()); setPage(StreamPage, streamPage); setPage(TestPage, new AutoConfigTestPage()); setWindowTitle(QTStr("Basic.AutoConfig")); setWindowFlags(windowFlags() & ~Qt::WindowContextHelpButtonHint); obs_video_info ovi; obs_get_video_info(&ovi); baseResolutionCX = ovi.base_width; baseResolutionCY = ovi.base_height; /* ----------------------------------------- */ /* check to see if Twitch's "auto" available */ OBSData twitchSettings = obs_data_create(); obs_data_release(twitchSettings); obs_data_set_string(twitchSettings, "service", "Twitch"); obs_properties_t *props = obs_get_service_properties("rtmp_common"); obs_properties_apply_settings(props, twitchSettings); obs_property_t *p = obs_properties_get(props, "server"); const char *first = obs_property_list_item_string(p, 0); twitchAuto = strcmp(first, "auto") == 0; obs_properties_destroy(props); /* ----------------------------------------- */ /* load service/servers */ customServer = serviceType == "rtmp_custom"; QComboBox *serviceList = streamPage->ui->service; if (!serviceName.empty()) { serviceList->blockSignals(true); int count = serviceList->count(); bool found = false; for (int i = 0; i < count; i++) { QString name = serviceList->itemText(i); if (name == serviceName.c_str()) { serviceList->setCurrentIndex(i); found = true; break; } } if (!found) { serviceList->insertItem(0, serviceName.c_str()); serviceList->setCurrentIndex(0); } serviceList->blockSignals(false); } streamPage->UpdateServerList(); streamPage->UpdateKeyLink(); streamPage->lastService.clear(); if (!customServer) { QComboBox *serverList = streamPage->ui->server; int idx = serverList->findData(QString(server.c_str())); if (idx == -1) idx = 0; serverList->setCurrentIndex(idx); } else { streamPage->ui->customServer->setText(server.c_str()); int idx = streamPage->ui->service->findData( QVariant((int)ListOpt::Custom)); streamPage->ui->service->setCurrentIndex(idx); } if (!key.empty()) streamPage->ui->key->setText(key.c_str()); int bitrate = config_get_int(main->Config(), "SimpleOutput", "VBitrate"); streamPage->ui->bitrate->setValue(bitrate); streamPage->ServiceChanged(); TestHardwareEncoding(); if (!hardwareEncodingAvailable) { delete streamPage->ui->preferHardware; streamPage->ui->preferHardware = nullptr; } else { /* Newer generations of NVENC have a high enough quality to * bitrate ratio that if NVENC is available, it makes sense to * just always prefer hardware encoding by default */ bool preferHardware = nvencAvailable || os_get_physical_cores() <= 4; streamPage->ui->preferHardware->setChecked(preferHardware); } setOptions(0); setButtonText(QWizard::FinishButton, QTStr("Basic.AutoConfig.ApplySettings")); setButtonText(QWizard::BackButton, QTStr("Back")); setButtonText(QWizard::NextButton, QTStr("Next")); setButtonText(QWizard::CancelButton, QTStr("Cancel")); }
AutoConfigVideoPage::AutoConfigVideoPage(QWidget *parent) : QWizardPage (parent), ui (new Ui_AutoConfigVideoPage) { ui->setupUi(this); setTitle(QTStr("Basic.AutoConfig.VideoPage")); setSubTitle(QTStr("Basic.AutoConfig.VideoPage.SubTitle")); obs_video_info ovi; obs_get_video_info(&ovi); long double fpsVal = (long double)ovi.fps_num / (long double)ovi.fps_den; QString fpsStr = (ovi.fps_den > 1) ? QString::number(fpsVal, 'f', 2) : QString::number(fpsVal, 'g', 2); ui->fps->addItem(QTStr(FPS_PREFER_HIGH_FPS), (int)AutoConfig::FPSType::PreferHighFPS); ui->fps->addItem(QTStr(FPS_PREFER_HIGH_RES), (int)AutoConfig::FPSType::PreferHighRes); ui->fps->addItem(QTStr(FPS_USE_CURRENT).arg(fpsStr), (int)AutoConfig::FPSType::UseCurrent); ui->fps->addItem(QStringLiteral("30"), (int)AutoConfig::FPSType::fps30); ui->fps->addItem(QStringLiteral("60"), (int)AutoConfig::FPSType::fps60); ui->fps->setCurrentIndex(0); QString cxStr = QString::number(ovi.base_width); QString cyStr = QString::number(ovi.base_height); int encRes = int(ovi.base_width << 16) | int(ovi.base_height); ui->canvasRes->addItem(QTStr(RES_USE_CURRENT).arg(cxStr, cyStr), (int)encRes); QList<QScreen*> screens = QGuiApplication::screens(); for (int i = 0; i < screens.size(); i++) { QScreen *screen = screens[i]; QSize as = screen->size(); encRes = int(as.width() << 16) | int(as.height()); QString str = QTStr(RES_USE_DISPLAY) .arg(QString::number(i + 1), QString::number(as.width()), QString::number(as.height())); ui->canvasRes->addItem(str, encRes); } auto addRes = [&] (int cx, int cy) { encRes = (cx << 16) | cy; QString str = QString("%1x%2").arg( QString::number(cx), QString::number(cy)); ui->canvasRes->addItem(str, encRes); }; addRes(1920, 1080); addRes(1280, 720); ui->canvasRes->setCurrentIndex(0); }