Ejemplo n.º 1
0
RtlException* CreateRtlException(const char* hint)
{
  std::string msg;
  strerror_x(errno, &msg);
  if (!IsEmpty(hint))
    return new RtlException(errno, FormattedString("%s: %s", hint, msg.c_str()));
  else
    return new RtlException(errno, FormattedString("%s", msg.c_str()));
}
Ejemplo n.º 2
0
BOOL CALLBACK RecordAllLoadedModules (PCTSTR ModuleName, DWORD64 ModuleBase, ULONG ModuleSize, PVOID UserContext)
{
    String &str = *(String*)UserContext;

#ifdef _WIN64
    str << FormattedString(TEXT("%016I64X-%016I64X %s\r\n"), ModuleBase, ModuleBase+ModuleSize, ModuleName);
#else
    str << FormattedString(TEXT("%08.8I64X-%08.8I64X %s\r\n"), ModuleBase, ModuleBase+ModuleSize, ModuleName);
#endif
    return TRUE;
}
Ejemplo n.º 3
0
BOOL GraphicsCaptureSource::CheckFileIntegrity(LPCTSTR strDLL)
{
    HANDLE hFileTest = CreateFile(strDLL, GENERIC_READ | GENERIC_EXECUTE, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL);
    if (hFileTest == INVALID_HANDLE_VALUE)
    {
        String strWarning;

        DWORD err = GetLastError();
        if (err == ERROR_FILE_NOT_FOUND)
            strWarning = TEXT("Important game capture files have been deleted. This is likely due to anti-virus software. Please make sure the OBS folder is excluded or ignored from any anti-virus / security software and re-install OBS.");
        else if (err == ERROR_ACCESS_DENIED)
            strWarning = TEXT("Important game capture files can not be loaded. This is likely due to anti-virus or security software. Please make sure the OBS folder is excluded / ignored from any anti-virus / security software.");
        else
            strWarning = FormattedString(TEXT("Important game capture files can not be loaded (error %d). This is likely due to anti-virus or security software. Please make sure the OBS folder is excluded / ignored from any anti-virus / security software."), err);

        Log(TEXT("GraphicsCaptureSource::CheckFileIntegrity: Error %d while accessing %s"), err, strDLL);

        //not sure if we should be using messagebox here, but probably better than "help why do i have black screen"
        OBSMessageBox(API->GetMainWindow(), strWarning.Array(), NULL, MB_ICONERROR | MB_OK);

        return FALSE;
    }
    else
    {
        CloseHandle(hFileTest);
        return TRUE;
    }
}
Ejemplo n.º 4
0
Archivo: API.cpp Proyecto: Aslai/OBS
bool OBS::SetSceneCollection(CTSTR lpCollection) {
    if (bRunning)
        return false;

    App->scenesConfig.Save();
    CTSTR collection = GetCurrentSceneCollection();
    String strSceneCollectionPath;
    strSceneCollectionPath = FormattedString(L"%s\\sceneCollection\\%s.xconfig", lpAppDataPath, collection);

    if (!App->scenesConfig.Open(strSceneCollectionPath))
    {
        return false;
    }

    GlobalConfig->SetString(TEXT("General"), TEXT("SceneCollection"), lpCollection);
    App->scenesConfig.Close();
    App->ReloadSceneCollection();
    ResetSceneCollectionMenu();
    ResetApplicationName();
    App->UpdateNotificationAreaIcon();
    App->scenesConfig.SaveTo(String() << lpAppDataPath << "\\scenes.xconfig");

    if (API != NULL)
        ReportSwitchSceneCollections(lpCollection);

    return true;
}
Ejemplo n.º 5
0
Archivo: XT.cpp Proyecto: robessog/OBS
void STDCALL TraceCrashEnd()
{
    String strStackTrace = TEXT("\r\nException Fault - Stack Trace:");

    for(unsigned int i=0; i<TraceFuncList.Num(); i++)
    {
        if(i) strStackTrace << TEXT(" -> ");
        if(!(i%10)) strStackTrace << TEXT("\r\n    ");
        strStackTrace << TraceFuncList[i];
    }

    if(TraceFuncList.Num() == MAX_STACK_TRACE)
        strStackTrace << TEXT(" -> ...");

    String strOut = FormattedString(TEXT("%s\r\n"), strStackTrace.Array());

    OpenLogFile();
    LogFile.WriteAsUTF8(strOut, strOut.Length());
    LogFile.WriteAsUTF8(TEXT("\r\n"));
    CloseLogFile();

    OSMessageBox(TEXT("Error: Exception fault - More info in the log file.\r\n\r\nMake sure you're using the latest verison, otherwise send your log to [email protected]"));

    TraceFuncList.Clear();
    CriticalExit();
}
Ejemplo n.º 6
0
bool UploadLog(String filename, LogUploadResult &result)
{
    String path = FormattedString(L"%s\\logs\\%s", OBSGetAppDataPath(), filename.Array());
    XFile f(path.Array(), XFILE_READ, XFILE_OPENEXISTING);
    if (!f.IsOpen()) {
        result.errors << FormattedString(Str("LogUpload.CannotOpenFile"), path.Array());
        return false;
    }

    String data;
    f.ReadFileToString(data);
    if (data.IsEmpty()) {
        result.errors << Str("LogUpload.EmptyLog");
        return false;
    }

    AppendGameCaptureLog(data);

    return UploadLogGitHub(filename.Array(), data, result);
}
Ejemplo n.º 7
0
// Game Capture log is always appended, as requested by Jim (yes, this can result in two game capture logs in one upload)
static void AppendGameCaptureLog(String &data)
{
    String path = FormattedString(L"%s\\captureHookLog.txt", OBSGetPluginDataPath().Array());
    XFile f(path.Array(), XFILE_READ | XFILE_SHARED, XFILE_OPENEXISTING);
    if (!f.IsOpen())
        return;

    String append;
    f.ReadFileToString(append);
    data << L"\r\n\r\nLast Game Capture Log:\r\n" << append;
}
Ejemplo n.º 8
0
    ~DelayedPublisher()
    {
        if(!bStopping)
        {
            App->EnableSceneSwitching(FALSE);
            EnableWindow (hwndMain, FALSE);

            bStreamEnding = true;
            HWND hwndProgressDialog = OBSCreateDialog(hinstMain, MAKEINTRESOURCE(IDD_ENDINGDELAY), hwndMain, (DLGPROC)EndDelayProc, (LPARAM)this);
            ProcessEvents();

            ShowWindow(hwndProgressDialog, TRUE);

            DWORD totalTimeLeft = delayTime;

            String strTimeLeftVal = Str("EndingDelay.TimeLeft");

            DWORD lastTimeLeft = -1;

            DWORD firstTime = OSGetTime();
            while(delayedPackets.Num() && !bCancelEnd)
            {
                ProcessEvents();

                DWORD timeElapsed = (OSGetTime()-firstTime);

                DWORD timeLeft = (totalTimeLeft-timeElapsed)/1000;
                DWORD timeLeftMinutes = timeLeft/60;
                DWORD timeLeftSeconds = timeLeft%60;

                if((timeLeft != lastTimeLeft) && (totalTimeLeft >= timeElapsed))
                {
                    String strTimeLeft = strTimeLeftVal;
                    strTimeLeft.FindReplace(TEXT("$1"), FormattedString(TEXT("%u:%02u"), timeLeftMinutes, timeLeftSeconds));
                    SetWindowText(GetDlgItem(hwndProgressDialog, IDC_TIMELEFT), strTimeLeft);
                    lastTimeLeft = timeLeft;
                }

                ProcessDelayedPackets(lastTimestamp+timeElapsed);
                if(bStopping)
                    bCancelEnd = true;

                Sleep(10);
            }

            EnableWindow (hwndMain, TRUE);
            App->EnableSceneSwitching(TRUE);
            DestroyWindow(hwndProgressDialog);
        }

        for(UINT i=0; i<delayedPackets.Num(); i++)
            delayedPackets[i].data.Clear();
    }
Ejemplo n.º 9
0
void ThrowWin32FileException(const char* filePath)
{
  DWORD err = GetLastError();
  if (ERROR_SUCCESS != err)
  {
    switch (err)
    {
      case ERROR_FILE_NOT_FOUND:
      case ERROR_PATH_NOT_FOUND:
        throw new IOException(FormattedString(ERRMSG_FILE_NOT_FOUND, filePath));
      case ERROR_ACCESS_DENIED:
        throw new IOException(FormattedString(ERRMSG_CANNOT_ACCESS_FILE, filePath));
      case ERROR_DISK_FULL:
        throw new IOException(FormattedString(ERRMSG_NOT_ENOUGH_DISK_SPACE, filePath));
      case ERROR_BAD_PATHNAME:
        throw new IOException(FormattedString(ERRMSG_INVALID_FILE_NAME, filePath));
      default:
        throw new IOException(FormattedString(ERRMSG_CANNOT_ACCESS_FILE, filePath));
    }
  }
}
Ejemplo n.º 10
0
int SetSliderText(HWND hwndParent, int controlSlider, int controlText)
{
    HWND hwndSlider = GetDlgItem(hwndParent, controlSlider);
    HWND hwndText   = GetDlgItem(hwndParent, controlText);

    int sliderVal = (int)SendMessage(hwndSlider, TBM_GETPOS, 0, 0);
    float floatVal = float(sliderVal)*0.01f;

    SetWindowText(hwndText, FormattedString(TEXT("%.02f"), floatVal));

    return sliderVal;
}
Ejemplo n.º 11
0
    virtual bool SetBitRate(DWORD maxBitrate, DWORD bufferSize)
    {
        DWORD old_bitrate = paramData.rc.i_vbv_max_bitrate;
        DWORD old_buffer  = paramData.rc.i_vbv_buffer_size;

        SetBitRateParams(maxBitrate, bufferSize);

        int retVal = x264_encoder_reconfig(x264, &paramData);
        if (retVal < 0)
            Log(TEXT("Could not set new encoder bitrate, error value %u"), retVal);
        else
        {
            String changes;
            if (old_bitrate != maxBitrate)
                changes << FormattedString(L"bitrate %d->%d", old_bitrate, maxBitrate);
            if (old_buffer != bufferSize)
                changes << FormattedString(L"%sbuffer size %d->%d", changes.Length() ? L", " : L"", old_buffer, bufferSize);
            if (changes)
                Log(L"x264: %s", changes.Array());
        }

        return retVal == 0;
    }
Ejemplo n.º 12
0
std::string NormalizePath(const char* path)
{
  std::string result;
  const char* t = path;
  // Handle path prefixes: C:, C:/, /, //server/
  if (ExtractPathPrefix(path, &result))
  {
    t += result.size();
#ifdef CFG_OS_WINDOWS
    ReplaceChar(result, '/', '\\');
#endif
  }
  // Split path into elements
  std::string token;
  TValueArray<std::string> elements;
  MultiString ms(t, FILE_PATH_DELIMITERS, NULL);
  for (int i = 0; i < ms.word_count(); ++i)
  {
    token = ms.word(i);
    if (!token.empty() && token != ".")
    {
      if (token == "..")
      {
        if (elements.Count() > 0 && elements[elements.Count() - 1] != "..")
          elements.SetCount(elements.Count() - 1);
        else if (result.empty())
          elements.Add(token);
        else
          throw new ArgumentException(FormattedString(
            "path '%s' cannot be normalized", path));
      }
      else
        elements.Add(token);
    }
  }
  // Join remaining elements
  if (elements.Count() > 0)
  {
    result.append(elements.Item(0));
    for (int i = 1; i < elements.Count(); ++i)
    {
      result.append(1, FILE_PATH_DELIMITERS[0]);
      result.append(elements.Item(i));
    }
  }
  return result;
}
Ejemplo n.º 13
0
void SettingsVideo::RefreshDownscales(HWND hwnd, int cx, int cy)
{
    int lastID = (int)SendMessage(hwnd, CB_GETCURSEL, 0, 0);

    SendMessage(hwnd, CB_RESETCONTENT, 0, 0);

    float downscale = AppConfig->GetFloat(TEXT("Video"), TEXT("Downscale"));
    bool bFoundVal = false;

    for(int i=0; i<multiplierCount; i++)
    {
        float multiplier = downscaleMultipliers[i];

        int scaleCX = int(float(cx)/multiplier) & 0xFFFFFFFE;
        int scaleCY = int(float(cy)/multiplier) & 0xFFFFFFFE;

        String strText;
        if(i == 0)
            strText << Str("None") << TEXT("  (") << IntString(scaleCX) << TEXT("x") << IntString(scaleCY) << TEXT(")");
        else
            strText << FormattedString(TEXT("%0.2f"), multiplier) << TEXT("  (") << IntString(scaleCX) << TEXT("x") << IntString(scaleCY) << TEXT(")");

        int id = (int)SendMessage(hwnd, CB_ADDSTRING, 0, (LPARAM)strText.Array());
        SendMessage(hwnd, CB_SETITEMDATA, id, (LPARAM)*(DWORD*)&multiplier);

        if(CloseFloat(downscale, multiplier))
        {
            if(lastID == CB_ERR)
                SendMessage(hwnd, CB_SETCURSEL, id, 0);
            downscale = multiplier;
            bFoundVal = true;
        }
    }

    if(!bFoundVal)
    {
        AppConfig->SetFloat(TEXT("Video"), TEXT("Downscale"), 1.0f);
        if(lastID == CB_ERR)
            SendMessage(hwnd, CB_SETCURSEL, 0, 0);

        SetChangedSettings(true);
    }

    if(lastID != CB_ERR)
        SendMessage(hwnd, CB_SETCURSEL, lastID, 0);
}
Ejemplo n.º 14
0
// On Windows platform converts exception to external error info. Exception
// object passed as an argument is deleted before return. Returns
// error code, which should be returned by a calling procedure.
// On Unix platform the function simply re-throws exception.
HRESULT ConvertExceptionToComError(Exception* e, const char* source)
{
#ifdef CFG_WIN32
  //int errCode = e->ErrCode();
  int errCode = EXTERNAL_ERROR;
  std::string msg = ExceptionString(e);
#ifdef _DEBUG
  if (!IsEmpty(source))
    msg += FormattedString("\nSource: %s", source);
#endif // DEBUG
  CreateComErrorInfo(msg.c_str(), source, NULL, 0);
  delete e;
  return errCode;
#endif // CFG_WIN32
#ifdef API_POSIX
  throw e;
#endif // API_POSIX
}
Ejemplo n.º 15
0
void LogInterfaceType (RTMP *rtmp)
{
    MIB_IPFORWARDROW    route;
    DWORD               destAddr;
    CHAR                hostname[256];

    if (rtmp->Link.hostname.av_len >= sizeof(hostname)-1)
        return;

    strncpy (hostname, rtmp->Link.hostname.av_val, sizeof(hostname)-1);
    hostname[rtmp->Link.hostname.av_len] = 0;

    HOSTENT *h = gethostbyname(hostname);
    if (!h)
        return;

    destAddr = *(DWORD *)h->h_addr_list[0];

    if (!GetBestRoute (destAddr, rtmp->m_bindIP.addr.sin_addr.S_un.S_addr, &route))
    {
        MIB_IFROW row;
        zero (&row, sizeof(row));
        row.dwIndex = route.dwForwardIfIndex;

        if (!GetIfEntry (&row))
        {
            DWORD speed = row.dwSpeed / 1000000;
            TCHAR *type;
            String otherType;

            if (row.dwType == IF_TYPE_ETHERNET_CSMACD)
                type = TEXT("ethernet");
            else if (row.dwType == IF_TYPE_IEEE80211)
                type = TEXT("802.11");
            else
            {
                otherType = FormattedString (TEXT("type %d"), row.dwType);
                type = otherType.Array();
            }

            Log (TEXT("  Interface: %S (%s, %d mbps)"), row.bDescr, type, speed);
        }
    }
}
Ejemplo n.º 16
0
void SettingsEncoding::ApplySettings()
{
    int quality = (int)SendMessage(GetDlgItem(hwnd, IDC_QUALITY), CB_GETCURSEL, 0, 0);
    if(quality != CB_ERR)
        AppConfig->SetInt(TEXT("Video Encoding"), TEXT("Quality"), quality);

    UINT bitrate = GetEditText(GetDlgItem(hwnd, IDC_MAXBITRATE)).ToInt();
    if(bitrate < 100) bitrate = 100;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), bitrate);

    UINT bufSize = GetEditText(GetDlgItem(hwnd, IDC_BUFFERSIZE)).ToInt();
    //if(bufSize < 100) bufSize = bitrate;  //R1CH: Allow users to enter 0 buffer size to disable VBV, its protected by checkbox anyway
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("BufferSize"), bufSize);

    if(App->GetVideoEncoder() != NULL) {
        if(App->GetVideoEncoder()->DynamicBitrateSupported())
        {
            int oldBitrate = App->GetVideoEncoder()->GetBitRate();
            App->GetVideoEncoder()->SetBitRate(bitrate, bufSize);
            if(oldBitrate != bitrate)
                Log(FormattedString(TEXT("Settings::Encoding: Changing bitrate from %dkb/s to %dkb/s"), oldBitrate, bitrate));
        }
    }

    String strTemp = GetCBText(GetDlgItem(hwnd, IDC_AUDIOCODEC));
    AppConfig->SetString(TEXT("Audio Encoding"), TEXT("Codec"), strTemp);

    strTemp = GetCBText(GetDlgItem(hwnd, IDC_AUDIOBITRATE));
    AppConfig->SetString(TEXT("Audio Encoding"), TEXT("Bitrate"), strTemp);

    int curSel = (int)SendMessage(GetDlgItem(hwnd, IDC_AUDIOFORMAT), CB_GETCURSEL, 0, 0);
    if(curSel != CB_ERR)
        AppConfig->SetInt(TEXT("Audio Encoding"), TEXT("Format"), curSel);

    bool bUseCBR = SendMessage(GetDlgItem(hwnd, IDC_USECBR), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("UseCBR"), bUseCBR);

    bool bPadCBR = SendMessage(GetDlgItem(hwnd, IDC_PADCBR), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("PadCBR"), bPadCBR);

    bool bCustomBuffer = SendMessage(GetDlgItem(hwnd, IDC_CUSTOMBUFFER), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("UseBufferSize"), bCustomBuffer);
}
Ejemplo n.º 17
0
String ExpandRecordingFilename(String filename)
{
    SYSTEMTIME st;
    GetLocalTime(&st);
    filename.FindReplace(L"$Y", UIntString(st.wYear).Array());
    filename.FindReplace(L"$M", UIntString(st.wMonth).Array());
    filename.FindReplace(L"$0M", FormattedString(L"%02u", st.wMonth).Array());
    filename.FindReplace(L"$D", UIntString(st.wDay).Array());
    filename.FindReplace(L"$0D", FormattedString(L"%02u", st.wDay).Array());
    filename.FindReplace(L"$h", UIntString(st.wHour).Array());
    filename.FindReplace(L"$0h", FormattedString(L"%02u", st.wHour).Array());
    filename.FindReplace(L"$m", UIntString(st.wMinute).Array());
    filename.FindReplace(L"$0m", FormattedString(L"%02u", st.wMinute).Array());
    filename.FindReplace(L"$s", UIntString(st.wSecond).Array());
    filename.FindReplace(L"$0s", FormattedString(L"%02u", st.wSecond).Array());

    filename.FindReplace(L"$T", FormattedString(L"%u-%02u-%02u-%02u%02u-%02u", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond).Array());
    return filename;
}
Ejemplo n.º 18
0
        }
        else if(operatorToken == TEXT(">>"))
        {
            int val1 = tstoi(curVal);
            int val2 = tstoi(nextVal);

            val1 >>= val2;
            curVal = IntString(val1);
        }
        else if(operatorToken == TEXT("*"))
        {
            float val1 = (float)tstof(curVal);
            float val2 = (float)tstof(nextVal);

            val1 *= val2;
            curVal = FormattedString(TEXT("%g"), val1);
        }
        else if(operatorToken == TEXT("/"))
        {
            float val1 = (float)tstof(curVal);
            float val2 = (float)tstof(nextVal);

            val1 /= val2;
            curVal = FormattedString(TEXT("%g"), val1);
        }
        else if(operatorToken == TEXT("+"))
        {
            float val1 = (float)tstof(curVal);
            float val2 = (float)tstof(nextVal);

            val1 += val2;
void SettingsEncoding::ApplySettings()
{
    bool useQSV   = SendMessage(GetDlgItem(hwnd, IDC_ENCODERQSV), BM_GETCHECK, 0, 0) == BST_CHECKED;
    bool useNVENC = SendMessage(GetDlgItem(hwnd, IDC_ENCODERNVENC), BM_GETCHECK, 0, 0) == BST_CHECKED;
    bool usex264 = !useQSV && !useNVENC;

    String vcodec = AppConfig->GetString(L"Video Encoding", L"Encoder");

    bool useQSV_prev   = !!(vcodec == L"QSV");
    bool useNVENC_prev = !!(vcodec == L"NVENC");

    if (!hasQSV && !useQSV && useQSV_prev &&
            OBSMessageBox(hwnd, Str("Settings.Encoding.Video.EncoderQSVDisabledAfterApply"), Str("MessageBoxWarningCaption"), MB_ICONEXCLAMATION | MB_OKCANCEL) != IDOK)
    {
        SetAbortApplySettings(true);
        return;
    }

    if (!hasNVENC && !useNVENC && useNVENC_prev &&
            OBSMessageBox(hwnd, Str("Settings.Encoding.Video.EncoderNVENCDisabledAfterApply"), Str("MessageBoxWarningCaption"), MB_ICONEXCLAMATION | MB_OKCANCEL) != IDOK)
    {
        SetAbortApplySettings(true);
        return;
    }

    EnableWindow(GetDlgItem(hwnd, IDC_ENCODERQSV), hasQSV || useQSV);
    EnableWindow(GetDlgItem(hwnd, IDC_ENCODERNVENC), hasNVENC || useNVENC);

    AppConfig->SetString(L"Video Encoding", L"Encoder", useQSV ? L"QSV" : useNVENC ? L"NVENC" : L"x264");

    int quality = (int)SendMessage(GetDlgItem(hwnd, IDC_QUALITY), CB_GETCURSEL, 0, 0);
    if(quality != CB_ERR)
        AppConfig->SetInt(TEXT("Video Encoding"), TEXT("Quality"), quality);

    static const int minBitRate = 64;

    UINT bitrate = GetEditText(GetDlgItem(hwnd, IDC_MAXBITRATE)).ToInt();
    if (bitrate < minBitRate) bitrate = minBitRate;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), bitrate);

    UINT bufSize = GetEditText(GetDlgItem(hwnd, IDC_BUFFERSIZE)).ToInt();
    //if(bufSize < minBitRate) bufSize = bitrate;  //R1CH: Allow users to enter 0 buffer size to disable VBV, its protected by checkbox anyway
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("BufferSize"), bufSize);

    if(App->GetVideoEncoder() != NULL) {
        if(App->GetVideoEncoder()->DynamicBitrateSupported())
        {
            int oldBitrate = App->GetVideoEncoder()->GetBitRate();
            App->GetVideoEncoder()->SetBitRate(bitrate, bufSize);
            if(oldBitrate != bitrate)
                Log(FormattedString(TEXT("Settings::Encoding: Changing bitrate from %dkb/s to %dkb/s"), oldBitrate, bitrate));
        }
    }

    String strTemp = GetCBText(GetDlgItem(hwnd, IDC_AUDIOCODEC));
    AppConfig->SetString(TEXT("Audio Encoding"), TEXT("Codec"), strTemp);

    strTemp = GetCBText(GetDlgItem(hwnd, IDC_AUDIOBITRATE));
    AppConfig->SetString(TEXT("Audio Encoding"), TEXT("Bitrate"), strTemp);

    int curSel = (int)SendMessage(GetDlgItem(hwnd, IDC_AUDIOFORMAT), CB_GETCURSEL, 0, 0);
    if(curSel != CB_ERR)
        AppConfig->SetInt(TEXT("Audio Encoding"), TEXT("Format"), curSel);

    int curSelCh = (int)SendMessage(GetDlgItem(hwnd, IDC_AUDIOCHANNEL), CB_GETCURSEL, 0, 0);
    if(curSelCh != CB_ERR)
        AppConfig->SetInt(TEXT("Audio Encoding"), TEXT("isStereo"), curSelCh);

    bool bUseCBR = SendMessage(GetDlgItem(hwnd, IDC_USECBR), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("UseCBR"), bUseCBR);

    bool bPadCBR = SendMessage(GetDlgItem(hwnd, IDC_PADCBR), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("PadCBR"), bPadCBR);

    bool bCustomBuffer = SendMessage(GetDlgItem(hwnd, IDC_CUSTOMBUFFER), BM_GETCHECK, 0, 0) == BST_CHECKED;
    AppConfig->SetInt(TEXT("Video Encoding"), TEXT("UseBufferSize"), bCustomBuffer);
}
Ejemplo n.º 20
0
void OBS::StartRecording()
{
    if (bRecording) return;
    int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2);

    bWriteToFile = networkMode == 1 || AppConfig->GetInt(TEXT("Publish"), TEXT("SaveToFile")) != 0;
    String strOutputFile = AppConfig->GetString(TEXT("Publish"), TEXT("SavePath"));

    strOutputFile.FindReplace(TEXT("\\"), TEXT("/"));

    // Don't request a keyframe while everything is starting up for the first time
    if(!bStartingUp) videoEncoder->RequestKeyframe();

    if (bWriteToFile)
    {
        OSFindData ofd;
        HANDLE hFind = NULL;
        bool bUseDateTimeName = true;
        bool bOverwrite = GlobalConfig->GetInt(L"General", L"OverwriteRecordings", false) != 0;

        if(!bOverwrite && (hFind = OSFindFirstFile(strOutputFile, ofd)))
        {
            String strFileExtension = GetPathExtension(strOutputFile);
            String strFileWithoutExtension = GetPathWithoutExtension(strOutputFile);

            if(strFileExtension.IsValid() && !ofd.bDirectory)
            {
                String strNewFilePath;
                UINT curFile = 0;

                do 
                {
                    strNewFilePath.Clear() << strFileWithoutExtension << TEXT(" (") << FormattedString(TEXT("%02u"), ++curFile) << TEXT(").") << strFileExtension;
                } while(OSFileExists(strNewFilePath));

                strOutputFile = strNewFilePath;

                bUseDateTimeName = false;
            }

            if(ofd.bDirectory)
                strOutputFile.AppendChar('/');

            OSFindClose(hFind);
        }

        if(bUseDateTimeName)
        {
            String strFileName = GetPathFileName(strOutputFile);

            if(!strFileName.IsValid() || !IsSafeFilename(strFileName))
            {
                SYSTEMTIME st;
                GetLocalTime(&st);

                String strDirectory = GetPathDirectory(strOutputFile);
                String file = strOutputFile.Right(strOutputFile.Length() - strDirectory.Length());
                String extension;

                if (!file.IsEmpty())
                    extension = GetPathExtension(file.Array());

                if(extension.IsEmpty())
                    extension = TEXT("mp4");
                strOutputFile = FormattedString(TEXT("%s/%u-%02u-%02u-%02u%02u-%02u.%s"), strDirectory.Array(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond, extension.Array());
            }
        }
    }

    if(!bTestStream && bWriteToFile && strOutputFile.IsValid())
    {
        String strFileExtension = GetPathExtension(strOutputFile);
        if(strFileExtension.CompareI(TEXT("flv")))
            fileStream = CreateFLVFileStream(strOutputFile);
        else if(strFileExtension.CompareI(TEXT("mp4")))
            fileStream = CreateMP4FileStream(strOutputFile);

        if(!fileStream)
        {
            Log(TEXT("Warning - OBSCapture::Start: Unable to create the file stream. Check the file path in Broadcast Settings."));
            MessageBox(hwndMain, Str("Capture.Start.FileStream.Warning"), Str("Capture.Start.FileStream.WarningCaption"), MB_OK | MB_ICONWARNING);        
            bRecording = false;
        }
        else {
            bRecording = true;
            ReportStartRecordingTrigger();
        }
        ConfigureStreamButtons();
    }
}
Ejemplo n.º 21
0
String GetOutputFilename(bool replayBuffer=false)
{
    String path = OSGetDefaultVideoSavePath(replayBuffer ? L"\\Replay-$T.flv" : L"\\.flv");
    String strOutputFile = AppConfig->GetString(TEXT("Publish"), replayBuffer ? L"ReplayBufferSavePath" : L"SavePath", path.IsValid() ? path.Array() : nullptr);
    strOutputFile.FindReplace(TEXT("\\"), TEXT("/"));

    OSFindData ofd;
    HANDLE hFind = NULL;
    bool bUseDateTimeName = true;
    bool bOverwrite = GlobalConfig->GetInt(L"General", L"OverwriteRecordings", false) != 0;
    
    strOutputFile = ExpandRecordingFilename(strOutputFile);

    CreatePath(GetPathDirectory(strOutputFile));

    if (!bOverwrite && (hFind = OSFindFirstFile(strOutputFile, ofd)))
    {
        String strFileExtension = GetPathExtension(strOutputFile);
        String strFileWithoutExtension = GetPathWithoutExtension(strOutputFile);

        if (strFileExtension.IsValid() && !ofd.bDirectory)
        {
            String strNewFilePath;
            UINT curFile = 0;

            do
            {
                strNewFilePath.Clear() << strFileWithoutExtension << TEXT(" (") << FormattedString(TEXT("%02u"), ++curFile) << TEXT(").") << strFileExtension;
            } while (OSFileExists(strNewFilePath));

            strOutputFile = strNewFilePath;

            bUseDateTimeName = false;
        }

        if (ofd.bDirectory)
            strOutputFile.AppendChar('/');

        OSFindClose(hFind);
    }

    if (bUseDateTimeName)
    {
        String strFileName = GetPathFileName(strOutputFile);

        if (!strFileName.IsValid() || !IsSafeFilename(strFileName))
        {
            SYSTEMTIME st;
            GetLocalTime(&st);

            String strDirectory = GetPathDirectory(strOutputFile);
            String file = strOutputFile.Right(strOutputFile.Length() - strDirectory.Length());
            String extension;

            if (!file.IsEmpty())
                extension = GetPathExtension(file.Array());

            if (extension.IsEmpty())
                extension = TEXT("mp4");
            strOutputFile = FormattedString(TEXT("%s/%u-%02u-%02u-%02u%02u-%02u.%s"), strDirectory.Array(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond, extension.Array());
        }
    }

    return strOutputFile;
}
Ejemplo n.º 22
0
void SettingsPublish::OptimizeSettings()
{
    auto refresh_on_exit = GuardScope([&] { SetWarningInfo(); });
    XConfig serverData;
    if (!serverData.Open(L"services.xconfig"))
        return;

    XElement *services = serverData.GetElement(L"services");
    if (!services)
        return;

    UINT numServices = services->NumElements();

    int serviceID = (int)SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETITEMDATA, SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETCURSEL, 0, 0), 0);
    XElement *r = nullptr;
    for (UINT i = 0; i < numServices; i++)
    {
        XElement *service = services->GetElementByID(i);
        if (service->GetInt(L"id") != serviceID)
            continue;

        //check to see if the service we're using has recommendations
        if (!service->HasItem(L"recommended"))
            return;

        r = service->GetElement(L"recommended");
        break;
    }

    if (!r)
        return;

    using optimizers_t = std::vector<std::function<void()>>;
    optimizers_t optimizers;

    String changes = Str("Settings.Publish.Optimize.Optimizations");

    String currentAudioCodec = AppConfig->GetString(L"Audio Encoding", L"Codec", L"AAC");
    int audioBitrate = AppConfig->GetInt(L"Audio Encoding", L"Bitrate", 96);

    if (r->HasItem(L"ratecontrol"))
    {
        bool useCBR = AppConfig->GetInt(L"Video Encoding", L"UseCBR", 1) != 0;
        CTSTR rc = r->GetString(L"ratecontrol");
        if (!scmp(rc, L"cbr") && !useCBR)
        {
            optimizers.push_back([] { AppConfig->SetInt(L"Video Encoding", L"UseCBR", 1); });
            changes << Str("Settings.Publish.Optimize.UseCBR");
        }
    }

    if (r->HasItem(L"max bitrate"))
    {
        int maxBitrate = AppConfig->GetInt(L"Video Encoding", L"MaxBitrate", 1000);
        int max_bitrate = r->GetInt(L"max bitrate");
        if (maxBitrate > max_bitrate)
        {
            optimizers.push_back([max_bitrate] { AppConfig->SetInt(L"Video Encoding", L"MaxBitrate", max_bitrate); });
            changes << FormattedString(Str("Settings.Publish.Optimize.Maxbitrate"), max_bitrate);
        }
    }

    if (r->HasItem(L"supported audio codec"))
    {
        StringList codecs;
        r->GetStringList(L"supported audio codec", codecs);
        if (codecs.FindValueIndex(currentAudioCodec) == INVALID)
        {
            String codec = codecs[0];
            optimizers.push_back([codec]
            {
                AppConfig->SetString(L"Audio Encoding", L"Codec", codec.Array());
                AppConfig->SetInt(L"Audio Encoding", L"Format", codec.CompareI(L"AAC") ? 1 : 0); //set to 44.1 kHz in case of MP3, see SettingsEncoding.cpp
            });
            changes << FormattedString(Str("Settings.Publish.Optimize.UnsupportedAudioCodec"), codec.Array());
        }
    }

    if (r->HasItem(L"max audio bitrate aac") && (!scmp(currentAudioCodec, L"AAC")))
    {
        int maxaudioaac = r->GetInt(L"max audio bitrate aac");
        if (audioBitrate > maxaudioaac)
        {
            optimizers.push_back([maxaudioaac] { AppConfig->SetInt(L"Audio Encoding", L"Bitrate", maxaudioaac); });
            changes << FormattedString(Str("Settings.Publish.Optimize.MaxAudiobitrate"), maxaudioaac);
        }
    }

    if (r->HasItem(L"max audio bitrate mp3") && (!scmp(currentAudioCodec, L"MP3")))
    {
        int maxaudiomp3 = r->GetInt(L"max audio bitrate mp3");
        if (audioBitrate > maxaudiomp3)
        {
            optimizers.push_back([maxaudiomp3] { AppConfig->SetInt(L"Audio Encoding", L"Bitrate", maxaudiomp3); });
            changes << FormattedString(Str("Settings.Publish.Optimize.MaxAudiobitrate"), maxaudiomp3);
        }
    }

    if (r->HasItem(L"profile"))
    {
        String currentx264Profile = AppConfig->GetString(L"Video Encoding", L"X264Profile", L"high");
        String expectedProfile = r->GetString(L"profile");
        if (!expectedProfile.CompareI(currentx264Profile))
        {
            optimizers.push_back([expectedProfile] { AppConfig->SetString(L"Video Encoding", L"X264Profile", expectedProfile); });
            changes << FormattedString(Str("Settings.Publish.Optimize.RecommendMainProfile"), expectedProfile.Array());
        }
    }

    if (r->HasItem(L"keyint"))
    {
        int keyframeInt = AppConfig->GetInt(L"Video Encoding", L"KeyframeInterval", 0);
        int keyint = r->GetInt(L"keyint");
        if (!keyframeInt || keyframeInt * 1000 > keyint)
        {
            optimizers.push_back([keyint] { AppConfig->SetInt(L"Video Encoding", L"KeyframeInterval", keyint / 1000); });
            changes << FormattedString(Str("Settings.Publish.Optimize.Keyint"), keyint / 1000);
        }
    }

    if (OBSMessageBox(hwnd, changes.Array(), Str("Optimize"), MB_OKCANCEL | MB_ICONINFORMATION) != IDOK)
        return;

    for (optimizers_t::const_reference i : optimizers)
        i();
}
Ejemplo n.º 23
0
void SettingsPublish::SetWarningInfo()
{
    int serviceID = (int)SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETITEMDATA, SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETCURSEL, 0, 0), 0);

    bool bUseCBR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCBR"), 1) != 0;
    int maxBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int keyframeInt = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("KeyframeInterval"), 0);
    int audioBitRate = AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96);
    String currentx264Profile = AppConfig->GetString(TEXT("Video Encoding"), TEXT("X264Profile"), L"high");
    String currentAudioCodec = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC"));
    float currentAspect = AppConfig->GetInt(L"Video", L"BaseWidth") / (float)max(1, AppConfig->GetInt(L"Video", L"BaseHeight"));

    //ignore for non-livestreams
    if (data.mode != 0)
    {
        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));
        return;
    }

    bool hasErrors = false;
    bool canOptimize = false;
    String strWarnings;

    XConfig serverData;
    if(serverData.Open(TEXT("services.xconfig")))
    {
        XElement *services = serverData.GetElement(TEXT("services"));
        if(services)
        {
            UINT numServices = services->NumElements();

            for(UINT i=0; i<numServices; i++)
            {
                XElement *service = services->GetElementByID(i);
                if (service->GetInt(TEXT("id")) == serviceID)
                {
                    strWarnings = FormattedString(Str("Settings.Publish.Warning.BadSettings"), service->GetName());

                    //check to see if the service we're using has recommendations
                    if (!service->HasItem(TEXT("recommended")))
                    {
                        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));
                        return;
                    }

                    XElement *r = service->GetElement(TEXT("recommended"));

                    if (r->HasItem(TEXT("ratecontrol")))
                    {
                        CTSTR rc = r->GetString(TEXT("ratecontrol"));
                        if (!scmp (rc, TEXT("cbr")) && !bUseCBR)
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << Str("Settings.Publish.Warning.UseCBR");
                        }
                    }

                    if (r->HasItem(TEXT("max bitrate")))
                    {
                        int max_bitrate = r->GetInt(TEXT("max bitrate"));
                        if (maxBitRate > max_bitrate)
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.Maxbitrate"), max_bitrate);
                        }
                    }

                    if (r->HasItem(L"supported audio codec"))
                    {
                        StringList codecs;
                        r->GetStringList(L"supported audio codec", codecs);
                        if (codecs.FindValueIndex(currentAudioCodec) == INVALID)
                        {
                            String msg = Str("Settings.Publish.Warning.UnsupportedAudioCodec"); //good thing OBS only supports MP3 (and AAC), otherwise I'd have to come up with a better translation solution
                            msg.FindReplace(L"$1", codecs[0].Array());
                            msg.FindReplace(L"$2", currentAudioCodec.Array());
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << msg;
                        }
                    }

                    if (r->HasItem(TEXT("max audio bitrate aac")) && (!scmp(currentAudioCodec, TEXT("AAC"))))
                    {
                        int maxaudioaac = r->GetInt(TEXT("max audio bitrate aac"));
                        if (audioBitRate > maxaudioaac)
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.MaxAudiobitrate"), maxaudioaac);
                        }
                    }

                    if (r->HasItem(TEXT("max audio bitrate mp3")) && (!scmp(currentAudioCodec, TEXT("MP3"))))
                    {
                        int maxaudiomp3 = r->GetInt(TEXT("max audio bitrate mp3"));
                        if (audioBitRate > maxaudiomp3)
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.MaxAudiobitrate"), maxaudiomp3);
                        }
                    }

                    if (r->HasItem(L"video aspect ratio"))
                    {
                        String aspectRatio = r->GetString(L"video aspect ratio");
                        StringList numbers;
                        aspectRatio.GetTokenList(numbers, ':');
                        if (numbers.Num() == 2)
                        {
                            float aspect = numbers[0].ToInt() / max(1.f, numbers[1].ToFloat());
                            if (!CloseFloat(aspect, currentAspect))
                            {
                                String aspectLocalized = Str("Settings.Video.AspectRatioFormat");
                                aspectLocalized.FindReplace(L"$1", UIntString(numbers[0].ToInt()));
                                aspectLocalized.FindReplace(L"$2", UIntString(numbers[1].ToInt()));

                                String msg = Str("Settings.Publish.Warning.VideoAspectRatio");
                                msg.FindReplace(L"$1", aspectLocalized);
                                strWarnings << msg;
                                hasErrors = true;
                            }
                        }
                    }

                    if (r->HasItem(TEXT("profile")))
                    {
                        String expectedProfile = r->GetString(TEXT("profile"));

                        if (!expectedProfile.CompareI(currentx264Profile))
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << Str("Settings.Publish.Warning.RecommendMainProfile");
                        }
                    }

                    if (r->HasItem(TEXT("keyint")))
                    {
                        int keyint = r->GetInt(TEXT("keyint"));
                        if (!keyframeInt || keyframeInt * 1000 > keyint)
                        {
                            hasErrors = true;
                            canOptimize = true;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.Keyint"), keyint / 1000);
                        }
                    }

                    break;
                }
            }
        }
    }

    if (hasErrors)
    {
        if (canOptimize)
            strWarnings << Str("Settings.Publish.Warning.CanOptimize");
        SetDlgItemText(hwnd, IDC_WARNINGS, strWarnings.Array());
    }
    else
        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));
    SetCanOptimizeSettings(canOptimize);
}
Ejemplo n.º 24
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bCheckForceAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));
    strAudioGUID = data->GetString(TEXT("audioDeviceCLSID"));

    if(strAudioGUID.Compare(TEXT("CLSID_AudioInputDeviceCategory"))) matchGUID = CLSID_AudioInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_VideoInputDeviceCategory"))) matchGUID = CLSID_VideoInputDeviceCategory;
    if(strAudioGUID.Compare(TEXT("CLSID_AudioRendererCategory"))) {
        //Log(TEXT("Dese are spekers.\n"));
        matchGUID = CLSID_AudioRendererCategory;
    }

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strAudioDevice.IsValid())
    {
        audioDeviceFilter = GetDeviceByValue(matchGUID, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
        if(!audioDeviceFilter) {
            AppWarning(TEXT("DShowAudioPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
        }
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType"));

    if(soundOutputType != 0)
    {
        if(matchGUID == CLSID_AudioRendererCategory) {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        else {
            err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        }
        if(FAILED(err))
        {
            Log(TEXT("DShowAudioPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    //GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data


    //------------------------------------------------
    // log audio info
    {
    String strTest;

        strTest = FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
                //Log(TEXT("Fixed size samples: %s\r\n"), (audioMediaType->bFixedSizeSamples) ? "Yes" : "No");
                //Log(TEXT("Temporal Compression: %s\r\n"), (audioMediaType->bTemporalCompression) ? "Yes" : "No");
                //Log(TEXT("cbFormat: %.16X\r\n"), audioMediaType->cbFormat);
                //Log(TEXT("Sample size: %u\r\n"), audioMediaType->lSampleSize);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowAudioPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowAudioPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);
            goto cleanFinish;
        }

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    bool bConnected;

    if(soundOutputType != 0)
    {
        bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        if(!bConnected)
        {
            AppWarning(TEXT("DShowAudioPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowAudioPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);
        if(bAddedDevice) {
            graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
Ejemplo n.º 25
0
LONG CALLBACK OBSExceptionHandler (PEXCEPTION_POINTERS exceptionInfo)
{
    HANDLE  hProcess;

    HMODULE hDbgHelp;

    MINIDUMP_EXCEPTION_INFORMATION miniInfo;

    STACKFRAME64        frame = {0};
    CONTEXT             context = *exceptionInfo->ContextRecord;
    SYMBOL_INFO         *symInfo;
    DWORD64             fnOffset;
    TCHAR               logPath[MAX_PATH];

    OSVERSIONINFOEX     osInfo;
    SYSTEMTIME          timeInfo;

    ENUMERATELOADEDMODULES64    fnEnumerateLoadedModules64;
    SYMSETOPTIONS               fnSymSetOptions;
    SYMINITIALIZE               fnSymInitialize;
    STACKWALK64                 fnStackWalk64;
    SYMFUNCTIONTABLEACCESS64    fnSymFunctionTableAccess64;
    SYMGETMODULEBASE64          fnSymGetModuleBase64;
    SYMFROMADDR                 fnSymFromAddr;
    SYMCLEANUP                  fnSymCleanup;
    MINIDUMPWRITEDUMP           fnMiniDumpWriteDump;
    SYMGETMODULEINFO64          fnSymGetModuleInfo64;

    DWORD                       i;
    DWORD64                     InstructionPtr;
    DWORD                       imageType;

    TCHAR                       searchPath[MAX_PATH], *p;

    static BOOL                 inExceptionHandler = FALSE;

    moduleinfo_t                moduleInfo;

    //always break into a debugger if one is present
    if (IsDebuggerPresent ())
        return EXCEPTION_CONTINUE_SEARCH;

    //exception codes < 0x80000000 are typically informative only and not crash worthy
    //0xe06d7363 indicates a c++ exception was thrown, let's just hope it was caught.
    //this is no longer needed since we're an unhandled handler vs a vectored handler
    
    /*if (exceptionInfo->ExceptionRecord->ExceptionCode < 0x80000000 || exceptionInfo->ExceptionRecord->ExceptionCode == 0xe06d7363 ||
        exceptionInfo->ExceptionRecord->ExceptionCode == 0x800706b5)
        return EXCEPTION_CONTINUE_SEARCH;*/

    //uh oh, we're crashing inside ourselves... this is really bad!
    if (inExceptionHandler)
        return EXCEPTION_CONTINUE_SEARCH;

    inExceptionHandler = TRUE;

    //load dbghelp dynamically
    hDbgHelp = LoadLibrary (TEXT("DBGHELP"));

    if (!hDbgHelp)
        return EXCEPTION_CONTINUE_SEARCH;

    fnEnumerateLoadedModules64 = (ENUMERATELOADEDMODULES64)GetProcAddress (hDbgHelp, "EnumerateLoadedModulesW64");
    fnSymSetOptions = (SYMSETOPTIONS)GetProcAddress (hDbgHelp, "SymSetOptions");
    fnSymInitialize = (SYMINITIALIZE)GetProcAddress (hDbgHelp, "SymInitialize");
    fnSymFunctionTableAccess64 = (SYMFUNCTIONTABLEACCESS64)GetProcAddress (hDbgHelp, "SymFunctionTableAccess64");
    fnSymGetModuleBase64 = (SYMGETMODULEBASE64)GetProcAddress (hDbgHelp, "SymGetModuleBase64");
    fnStackWalk64 = (STACKWALK64)GetProcAddress (hDbgHelp, "StackWalk64");
    fnSymFromAddr = (SYMFROMADDR)GetProcAddress (hDbgHelp, "SymFromAddrW");
    fnSymCleanup = (SYMCLEANUP)GetProcAddress (hDbgHelp, "SymCleanup");
    fnSymGetModuleInfo64 = (SYMGETMODULEINFO64)GetProcAddress (hDbgHelp, "SymGetModuleInfo64");
    fnMiniDumpWriteDump = (MINIDUMPWRITEDUMP)GetProcAddress (hDbgHelp, "MiniDumpWriteDump");

    if (!fnEnumerateLoadedModules64 || !fnSymSetOptions || !fnSymInitialize || !fnSymFunctionTableAccess64 ||
        !fnSymGetModuleBase64 || !fnStackWalk64 || !fnSymFromAddr || !fnSymCleanup || !fnSymGetModuleInfo64)
    {
        FreeLibrary (hDbgHelp);
        return EXCEPTION_CONTINUE_SEARCH;
    }

    hProcess = GetCurrentProcess();

    fnSymSetOptions (SYMOPT_UNDNAME | SYMOPT_FAIL_CRITICAL_ERRORS | SYMOPT_LOAD_ANYTHING);

    GetModuleFileName (NULL, searchPath, _countof(searchPath)-1);
    p = srchr (searchPath, '\\');
    if (p)
        *p = 0;

    //create a log file
    GetSystemTime (&timeInfo);
    for (i = 1;;)
    {
        tsprintf_s (logPath, _countof(logPath)-1, TEXT("%s\\crashDumps\\OBSCrashLog%.4d-%.2d-%.2d_%d.txt"), lpAppDataPath, timeInfo.wYear, timeInfo.wMonth, timeInfo.wDay, i);
        if (GetFileAttributes(logPath) == INVALID_FILE_ATTRIBUTES)
            break;
        i++;
    }

    XFile   crashDumpLog;

    if (!crashDumpLog.Open(logPath, XFILE_WRITE, XFILE_CREATENEW))
    {
        FreeLibrary (hDbgHelp);
        return EXCEPTION_CONTINUE_SEARCH;
    }

    //initialize debug symbols
    fnSymInitialize (hProcess, NULL, TRUE);

#ifdef _WIN64
    InstructionPtr = context.Rip;
    frame.AddrPC.Offset = InstructionPtr;
    frame.AddrFrame.Offset = context.Rbp;
    frame.AddrStack.Offset = context.Rsp;
    imageType = IMAGE_FILE_MACHINE_AMD64;
#else
    InstructionPtr = context.Eip;
    frame.AddrPC.Offset = InstructionPtr;
    frame.AddrFrame.Offset = context.Ebp;
    frame.AddrStack.Offset = context.Esp;
    imageType = IMAGE_FILE_MACHINE_I386;
#endif

    frame.AddrFrame.Mode = AddrModeFlat;
    frame.AddrPC.Mode = AddrModeFlat;
    frame.AddrStack.Mode = AddrModeFlat;

    symInfo = (SYMBOL_INFO *)LocalAlloc (LPTR, sizeof(*symInfo) + 256);
    symInfo->SizeOfStruct = sizeof(SYMBOL_INFO);
    symInfo->MaxNameLen = 256;
    fnOffset = 0;

    //get os info
    memset (&osInfo, 0, sizeof(osInfo));
    osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);

    if (!GetVersionEx ((OSVERSIONINFO *)&osInfo))
    {
        osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
        GetVersionEx ((OSVERSIONINFO *)&osInfo);
    }

    String cpuInfo;
    HKEY key;

    // get cpu info
    if(RegOpenKey(HKEY_LOCAL_MACHINE, TEXT("HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0"), &key) == ERROR_SUCCESS)
    {
        DWORD dwSize = 1024;
        cpuInfo.SetLength(dwSize);
        if (RegQueryValueEx(key, TEXT("ProcessorNameString"), NULL, NULL, (LPBYTE)cpuInfo.Array(), &dwSize) != ERROR_SUCCESS)
            cpuInfo = TEXT("<unable to query>");
        RegCloseKey(key);
    }
    else
        cpuInfo = TEXT("<unable to query>");

    //determine which module the crash occured in
    scpy (moduleInfo.moduleName, TEXT("<unknown>"));
    moduleInfo.faultAddress = InstructionPtr;
    fnEnumerateLoadedModules64 (hProcess, (PENUMLOADED_MODULES_CALLBACK64)EnumerateLoadedModulesProcInfo, (VOID *)&moduleInfo);
    slwr (moduleInfo.moduleName);

    BOOL isPlugin = FALSE;

    if (sstr (moduleInfo.moduleName, TEXT("plugins\\")))
        isPlugin = TRUE;

    String strModuleInfo;
    String crashMessage;

    fnEnumerateLoadedModules64(hProcess, (PENUMLOADED_MODULES_CALLBACK64)RecordAllLoadedModules, (VOID *)&strModuleInfo);

    crashMessage << 
        TEXT("OBS has encountered an unhandled exception and has terminated. If you are able to\r\n")
        TEXT("reproduce this crash, please submit this crash report on the forums at\r\n")
        TEXT("http://www.obsproject.com/ - include the contents of this crash log and the\r\n")
        TEXT("minidump .dmp file (if available) as well as your regular OBS log files and\r\n")
        TEXT("a description of what you were doing at the time of the crash.\r\n")
        TEXT("\r\n")
        TEXT("This crash appears to have occured in the '") << moduleInfo.moduleName << TEXT("' module.\r\n\r\n");

    crashDumpLog.WriteStr(crashMessage.Array());

    crashDumpLog.WriteStr(FormattedString(TEXT("**** UNHANDLED EXCEPTION: %x\r\nFault address: %I64p (%s)\r\n"), exceptionInfo->ExceptionRecord->ExceptionCode, InstructionPtr, moduleInfo.moduleName));

    crashDumpLog.WriteStr(TEXT("OBS version: ") OBS_VERSION_STRING TEXT("\r\n"));
    crashDumpLog.WriteStr(FormattedString(TEXT("Windows version: %d.%d (Build %d) %s\r\nCPU: %s\r\n\r\n"), osInfo.dwMajorVersion, osInfo.dwMinorVersion, osInfo.dwBuildNumber, osInfo.szCSDVersion, cpuInfo.Array()));

    crashDumpLog.WriteStr(TEXT("Crashing thread stack trace:\r\n"));
#ifdef _WIN64
    crashDumpLog.WriteStr(TEXT("Stack            EIP              Arg0             Arg1             Arg2             Arg3             Address\r\n"));
#else
    crashDumpLog.WriteStr(TEXT("Stack    EIP      Arg0     Arg1     Arg2     Arg3     Address\r\n"));
#endif
    crashDumpLog.FlushFileBuffers();

    while (fnStackWalk64 (imageType, hProcess, GetCurrentThread(), &frame, &context, NULL, (PFUNCTION_TABLE_ACCESS_ROUTINE64)fnSymFunctionTableAccess64, (PGET_MODULE_BASE_ROUTINE64)fnSymGetModuleBase64, NULL))
    {
        scpy (moduleInfo.moduleName, TEXT("<unknown>"));
        moduleInfo.faultAddress = frame.AddrPC.Offset;
        fnEnumerateLoadedModules64 (hProcess, (PENUMLOADED_MODULES_CALLBACK64)EnumerateLoadedModulesProcInfo, (VOID *)&moduleInfo);
        slwr (moduleInfo.moduleName);

        p = srchr (moduleInfo.moduleName, '\\');
        if (p)
            p++;
        else
            p = moduleInfo.moduleName;

#ifdef _WIN64
        if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
        {
            crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!%s+0x%I64x\r\n"),
                frame.AddrStack.Offset,
                frame.AddrPC.Offset,
                frame.Params[0],
                frame.Params[1],
                frame.Params[2],
                frame.Params[3],
                p,
                symInfo->Name,
                fnOffset));
        }
        else
        {
            crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!0x%I64x\r\n"),
                frame.AddrStack.Offset,
                frame.AddrPC.Offset,
                frame.Params[0],
                frame.Params[1],
                frame.Params[2],
                frame.Params[3],
                p,
                frame.AddrPC.Offset));
        }
#else
        if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
        {
            crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!%s+0x%I64x\r\n"),
                frame.AddrStack.Offset,
                frame.AddrPC.Offset,
                (DWORD)frame.Params[0],
                (DWORD)frame.Params[1],
                (DWORD)frame.Params[2],
                (DWORD)frame.Params[3],
                p,
                symInfo->Name,
                fnOffset));
        }
        else
        {
            crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!0x%I64x\r\n"),
                frame.AddrStack.Offset,
                frame.AddrPC.Offset,
                (DWORD)frame.Params[0],
                (DWORD)frame.Params[1],
                (DWORD)frame.Params[2],
                (DWORD)frame.Params[3],
                p,
                frame.AddrPC.Offset
                ));
        }
#endif

        crashDumpLog.FlushFileBuffers();
    }

    //if we manually crashed due to a deadlocked thread, record some extra info
    if (exceptionInfo->ExceptionRecord->ExceptionCode == EXCEPTION_BREAKPOINT)
    {
        HANDLE hVideoThread = NULL, hEncodeThread = NULL;
        App->GetThreadHandles (&hVideoThread, &hEncodeThread);

        if (hVideoThread)
        {
            crashDumpLog.WriteStr(TEXT("\r\nVideo thread stack trace:\r\n"));
#ifdef _WIN64
            crashDumpLog.WriteStr(TEXT("Stack            EIP              Arg0             Arg1             Arg2             Arg3             Address\r\n"));
#else
            crashDumpLog.WriteStr(TEXT("Stack    EIP      Arg0     Arg1     Arg2     Arg3     Address\r\n"));
#endif
            crashDumpLog.FlushFileBuffers();

            context.ContextFlags = CONTEXT_ALL;
            GetThreadContext (hVideoThread, &context);
            ZeroMemory (&frame, sizeof(frame));
#ifdef _WIN64
            InstructionPtr = context.Rip;
            frame.AddrPC.Offset = InstructionPtr;
            frame.AddrFrame.Offset = context.Rbp;
            frame.AddrStack.Offset = context.Rsp;
            imageType = IMAGE_FILE_MACHINE_AMD64;
#else
            InstructionPtr = context.Eip;
            frame.AddrPC.Offset = InstructionPtr;
            frame.AddrFrame.Offset = context.Ebp;
            frame.AddrStack.Offset = context.Esp;
            imageType = IMAGE_FILE_MACHINE_I386;
#endif

            frame.AddrFrame.Mode = AddrModeFlat;
            frame.AddrPC.Mode = AddrModeFlat;
            frame.AddrStack.Mode = AddrModeFlat;
            while (fnStackWalk64 (imageType, hProcess, hVideoThread, &frame, &context, NULL, (PFUNCTION_TABLE_ACCESS_ROUTINE64)fnSymFunctionTableAccess64, (PGET_MODULE_BASE_ROUTINE64)fnSymGetModuleBase64, NULL))
            {
                scpy (moduleInfo.moduleName, TEXT("<unknown>"));
                moduleInfo.faultAddress = frame.AddrPC.Offset;
                fnEnumerateLoadedModules64 (hProcess, (PENUMLOADED_MODULES_CALLBACK64)EnumerateLoadedModulesProcInfo, (VOID *)&moduleInfo);
                slwr (moduleInfo.moduleName);

                p = srchr (moduleInfo.moduleName, '\\');
                if (p)
                    p++;
                else
                    p = moduleInfo.moduleName;

#ifdef _WIN64
                if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!%s+0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        frame.Params[0],
                        frame.Params[1],
                        frame.Params[2],
                        frame.Params[3],
                        p,
                        symInfo->Name,
                        fnOffset));
                }
                else
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        frame.Params[0],
                        frame.Params[1],
                        frame.Params[2],
                        frame.Params[3],
                        p,
                        frame.AddrPC.Offset));
                }
#else
                if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!%s+0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        (DWORD)frame.Params[0],
                        (DWORD)frame.Params[1],
                        (DWORD)frame.Params[2],
                        (DWORD)frame.Params[3],
                        p,
                        symInfo->Name,
                        fnOffset));
                }
                else
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        (DWORD)frame.Params[0],
                        (DWORD)frame.Params[1],
                        (DWORD)frame.Params[2],
                        (DWORD)frame.Params[3],
                        p,
                        frame.AddrPC.Offset
                        ));
                }
#endif

                crashDumpLog.FlushFileBuffers();
            }
        }

        if (hEncodeThread)
        {
            crashDumpLog.WriteStr(TEXT("\r\nEncode thread stack trace:\r\n"));
#ifdef _WIN64
            crashDumpLog.WriteStr(TEXT("Stack            EIP              Arg0             Arg1             Arg2             Arg3             Address\r\n"));
#else
            crashDumpLog.WriteStr(TEXT("Stack    EIP      Arg0     Arg1     Arg2     Arg3     Address\r\n"));
#endif
            crashDumpLog.FlushFileBuffers();

            context.ContextFlags = CONTEXT_ALL;
            GetThreadContext (hEncodeThread, &context);
            ZeroMemory (&frame, sizeof(frame));
#ifdef _WIN64
            InstructionPtr = context.Rip;
            frame.AddrPC.Offset = InstructionPtr;
            frame.AddrFrame.Offset = context.Rbp;
            frame.AddrStack.Offset = context.Rsp;
            imageType = IMAGE_FILE_MACHINE_AMD64;
#else
            InstructionPtr = context.Eip;
            frame.AddrPC.Offset = InstructionPtr;
            frame.AddrFrame.Offset = context.Ebp;
            frame.AddrStack.Offset = context.Esp;
            imageType = IMAGE_FILE_MACHINE_I386;
#endif

            frame.AddrFrame.Mode = AddrModeFlat;
            frame.AddrPC.Mode = AddrModeFlat;
            frame.AddrStack.Mode = AddrModeFlat;
            while (fnStackWalk64 (imageType, hProcess, hEncodeThread, &frame, &context, NULL, (PFUNCTION_TABLE_ACCESS_ROUTINE64)fnSymFunctionTableAccess64, (PGET_MODULE_BASE_ROUTINE64)fnSymGetModuleBase64, NULL))
            {
                scpy (moduleInfo.moduleName, TEXT("<unknown>"));
                moduleInfo.faultAddress = frame.AddrPC.Offset;
                fnEnumerateLoadedModules64 (hProcess, (PENUMLOADED_MODULES_CALLBACK64)EnumerateLoadedModulesProcInfo, (VOID *)&moduleInfo);
                slwr (moduleInfo.moduleName);

                p = srchr (moduleInfo.moduleName, '\\');
                if (p)
                    p++;
                else
                    p = moduleInfo.moduleName;

#ifdef _WIN64
                if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!%s+0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        frame.Params[0],
                        frame.Params[1],
                        frame.Params[2],
                        frame.Params[3],
                        p,
                        symInfo->Name,
                        fnOffset));
                }
                else
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%016I64X %016I64X %016I64X %016I64X %016I64X %016I64X %s!0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        frame.Params[0],
                        frame.Params[1],
                        frame.Params[2],
                        frame.Params[3],
                        p,
                        frame.AddrPC.Offset));
                }
#else
                if (fnSymFromAddr (hProcess, frame.AddrPC.Offset, &fnOffset, symInfo) && !(symInfo->Flags & SYMFLAG_EXPORT))
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!%s+0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        (DWORD)frame.Params[0],
                        (DWORD)frame.Params[1],
                        (DWORD)frame.Params[2],
                        (DWORD)frame.Params[3],
                        p,
                        symInfo->Name,
                        fnOffset));
                }
                else
                {
                    crashDumpLog.WriteStr(FormattedString(TEXT("%08.8I64X %08.8I64X %08.8X %08.8X %08.8X %08.8X %s!0x%I64x\r\n"),
                        frame.AddrStack.Offset,
                        frame.AddrPC.Offset,
                        (DWORD)frame.Params[0],
                        (DWORD)frame.Params[1],
                        (DWORD)frame.Params[2],
                        (DWORD)frame.Params[3],
                        p,
                        frame.AddrPC.Offset
                        ));
                }
#endif

                crashDumpLog.FlushFileBuffers();
            }
        }

    }

    //generate a minidump if possible
    if (fnMiniDumpWriteDump)
    {
        TCHAR     dumpPath[MAX_PATH];
        HANDLE    hFile;

        tsprintf_s (dumpPath, _countof(dumpPath)-1, TEXT("%s\\crashDumps\\OBSCrashDump%.4d-%.2d-%.2d_%d.dmp"), lpAppDataPath, timeInfo.wYear, timeInfo.wMonth, timeInfo.wDay, i);

        hFile = CreateFile (dumpPath, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
        if (hFile != INVALID_HANDLE_VALUE)
        {
            MINIDUMP_TYPE dumpFlags = (MINIDUMP_TYPE)(MiniDumpWithIndirectlyReferencedMemory | MiniDumpWithUnloadedModules | MiniDumpWithProcessThreadData);

            miniInfo.ClientPointers = TRUE;
            miniInfo.ExceptionPointers = exceptionInfo;
            miniInfo.ThreadId = GetCurrentThreadId ();

            if (fnMiniDumpWriteDump (hProcess, GetCurrentProcessId(), hFile, dumpFlags, &miniInfo, NULL, NULL))
            {
                crashDumpLog.WriteStr(FormattedString(TEXT("\r\nA minidump was saved to %s.\r\nPlease include this file when posting a crash report.\r\n"), dumpPath));
            }
            else
            {
                CloseHandle (hFile);
                DeleteFile (dumpPath);
            }
        }
    }
    else
    {
        crashDumpLog.WriteStr(TEXT("\r\nA minidump could not be created. Please check dbghelp.dll is present.\r\n"));
    }

    crashDumpLog.WriteStr("\r\nList of loaded modules:\r\n");
#ifdef _WIN64
    crashDumpLog.WriteStr("Base Address                      Module\r\n");
#else
    crashDumpLog.WriteStr("Base Address      Module\r\n");
#endif
    crashDumpLog.WriteStr(strModuleInfo);

    crashDumpLog.Close();

    LocalFree (symInfo);

    fnSymCleanup (hProcess);

    if (OBSMessageBox(hwndMain, TEXT("Woops! OBS has crashed. Would you like to view a crash report?"), NULL, MB_ICONERROR | MB_YESNO) == IDYES)
        ShellExecute(NULL, NULL, logPath, NULL, searchPath, SW_SHOWDEFAULT);

    FreeLibrary (hDbgHelp);

    //we really shouldn't be returning here, if we're at the bottom of the VEH chain this is a pretty legitimate crash
    //and if we return we could end up invoking a second crash handler or other weird / annoying things
    //ExitProcess(exceptionInfo->ExceptionRecord->ExceptionCode);
    return EXCEPTION_CONTINUE_SEARCH;
}
Ejemplo n.º 26
0
BOOL CodeTokenizer::GetNextToken(String &token, BOOL bPeek)
{
    TSTR lpStart = lpTemp;

    TSTR lpTokenStart = NULL;
    BOOL bAlphaNumeric = FALSE;

    while(*lpTemp)
    {
        if(mcmp(lpTemp, TEXT("//"), 2*sizeof(TCHAR)))
        {
            lpTemp = schr(lpTemp, '\n');

            if(!lpTemp)
                return FALSE;
        }
        else if(mcmp(lpTemp, TEXT("/*"), 2*sizeof(TCHAR)))
        {
            lpTemp = sstr(lpTemp+2, TEXT("*/"));

            if(!lpTemp)
                return FALSE;

            lpTemp += 2;
        }

        if((*lpTemp == '_') || iswalnum(*lpTemp))
        {
            if(lpTokenStart)
            {
                if(!bAlphaNumeric)
                    break;
            }
            else
            {
                lpTokenStart = lpTemp;
                bAlphaNumeric = TRUE;
            }
        }
        else
        {
            if(lpTokenStart)
            {
                if(bAlphaNumeric)
                    break;

                if(*lpTokenStart == '>' || *lpTokenStart == '<')
                {
                    if((*lpTemp != '=') && (*lpTemp != '>') && (*lpTemp != '<'))
                        break;
                }

                if( ((*lpTokenStart == '=') && (*lpTemp != '=')) ||
                    (*lpTokenStart == ';') ||
                    (*lpTemp == ' ')   ||
                    (*lpTemp == L'�@') ||
                    (*lpTemp == '\'')  ||
                    (*lpTemp == '"')   ||
                    (*lpTemp == ';')   ||
                    (*lpTemp == '(')   ||
                    (*lpTemp == ')')   ||
                    (*lpTemp == '[')   ||
                    (*lpTemp == ']')   ||
                    (*lpTemp == '{')   ||
                    (*lpTemp == '}')   ||
                    (*lpTemp == '\r')  ||
                    (*lpTemp == '\t')  ||
                    (*lpTemp == '\n')  )
                {
                    break;
                }
            }
            else
            {
                if(*lpTemp == '"')
                {
                    lpTokenStart = lpTemp;

                    BOOL bFoundEnd = TRUE;
                    while(*++lpTemp != '"')
                    {
                        if(!*lpTemp)
                        {
                            bFoundEnd = FALSE;
                            break;
                        }
                    }

                    if(!bFoundEnd)
                        return FALSE;

                    ++lpTemp;
                    break;
                }
                if(*lpTemp == ';')
                {
                    lpTokenStart = lpTemp;
                    ++lpTemp;
                    break;
                }

                if(*lpTemp == '\'')
                {
                    lpTokenStart = lpTemp;

                    BOOL bFoundEnd = TRUE;
                    while(*++lpTemp != '\'')
                    {
                        if(!*lpTemp)
                        {
                            bFoundEnd = FALSE;
                            break;
                        }
                    }

                    if(!bFoundEnd)
                        return FALSE;

                    ++lpTemp;
                    break;
                }
                else if((*lpTemp == '(') ||
                        (*lpTemp == ')') ||
                        (*lpTemp == '[') ||
                        (*lpTemp == ']') ||
                        (*lpTemp == '{') ||
                        (*lpTemp == '}'))
                {
                    lpTokenStart = lpTemp++;
                    break;
                }

                if( (*lpTemp != ' ')   &&
                    (*lpTemp != L'�@') &&
                    (*lpTemp != '\r')  &&
                    (*lpTemp != '\t')  &&
                    (*lpTemp != '\n')  )
                {
                    lpTokenStart = lpTemp;
                    bAlphaNumeric = FALSE;
                }
            }
        }

        ++lpTemp;
    }

    if(!lpTokenStart)
        return FALSE;

    TCHAR oldCH = *lpTemp;
    *lpTemp = 0;

    token = lpTokenStart;

    *lpTemp = oldCH;

    if(bAlphaNumeric && iswdigit(*lpTokenStart)) //handle floating points
    {
        if( (token.Length() > 2) && 
            (lpTokenStart[0] == '0') &&
            (lpTokenStart[1] == 'x')) //convert hex
        {
            unsigned int val = tstring_base_to_uint(lpTokenStart, NULL, 0);
            token = FormattedString(TEXT("%d"), val);
        }
        else
        {
            String nextToken;

            TSTR lpPos = lpTemp;
            if(!GetNextToken(nextToken)) return FALSE;
            if(nextToken[0] == '.')
            {
                lpPos = lpTemp;

                token << nextToken;
                if(!GetNextToken(nextToken)) return FALSE;
                if(iswdigit(nextToken[0]) || nextToken == TEXT("f"))
                    token << nextToken;
                else
                    lpTemp = lpPos;
            }
            else
                lpTemp = lpPos;

            if(token[token.Length()-1] == 'e')
            {
                if(*lpTemp == '-')
                {
                    TSTR lpPos = lpTemp++;

                    if(!GetNextToken(nextToken)) return FALSE;
                    if(!iswdigit(nextToken[0]))
                        lpTemp = lpPos;
                    else
                        token << TEXT("-") << nextToken;
                }
            }

            lpPos = lpTemp;
            if(!GetNextToken(nextToken)) return FALSE;
            if(nextToken[0] == '.')
            {
                lpPos = lpTemp;

                token << nextToken;
                if(!GetNextToken(nextToken)) return FALSE;
                if(iswdigit(nextToken[0]) || nextToken == TEXT("f"))
                    token << nextToken;
                else
                    lpTemp = lpPos;
            }
            else
                lpTemp = lpPos;
        }
    }

    if(bPeek)
        lpTemp = lpStart;

    return TRUE;
}
Ejemplo n.º 27
0
void SettingsPublish::SetWarningInfo()
{
    int serviceID = (int)SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETITEMDATA, SendMessage(GetDlgItem(hwnd, IDC_SERVICE), CB_GETCURSEL, 0, 0), 0);

    bool bUseCBR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCBR"), 1) != 0;
    int maxBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int keyframeInt = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("KeyframeInterval"), 0);
    int audioBitRate = AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96);
    String currentAudioCodec = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC"));

    //ignore for non-livestreams
    if (data->mode != 0)
    {
        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));
        return;
    }

    int errors = 0;
    String strWarnings;

    XConfig serverData;
    if(serverData.Open(TEXT("services.xconfig")))
    {
        XElement *services = serverData.GetElement(TEXT("services"));
        if(services)
        {
            UINT numServices = services->NumElements();

            for(UINT i=0; i<numServices; i++)
            {
                XElement *service = services->GetElementByID(i);
                if (service->GetInt(TEXT("id")) == serviceID)
                {
                    strWarnings = FormattedString(Str("Settings.Publish.Warning.BadSettings"), service->GetName());

                    //check to see if the service we're using has recommendations
                    if (!service->HasItem(TEXT("recommended")))
                    {
                        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));
                        return;
                    }

                    XElement *r = service->GetElement(TEXT("recommended"));

                    if (r->HasItem(TEXT("ratecontrol")))
                    {
                        CTSTR rc = r->GetString(TEXT("ratecontrol"));
                        if (!scmp (rc, TEXT("cbr")) && !bUseCBR)
                        {
                            errors++;
                            strWarnings << Str("Settings.Publish.Warning.UseCBR");
                        }
                    }

                    if (r->HasItem(TEXT("max bitrate")))
                    {
                        int max_bitrate = r->GetInt(TEXT("max bitrate"));
                        if (maxBitRate > max_bitrate)
                        {
                            errors++;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.Maxbitrate"), max_bitrate);
                        }
                    }

                    if (r->HasItem(TEXT("max audio bitrate aac")) && (!scmp(currentAudioCodec, TEXT("AAC"))))
                    {
                        int maxaudioaac = r->GetInt(TEXT("max audio bitrate aac"));
                        if (audioBitRate > maxaudioaac)
                        {
                            errors++;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.MaxAudiobitrate"), maxaudioaac);
                        }
                    }
                    
                    if (r->HasItem(TEXT("max audio bitrate mp3")) && (!scmp(currentAudioCodec, TEXT("MP3"))))
                    {
                        int maxaudiomp3 = r->GetInt(TEXT("max audio bitrate mp3"));
                        if (audioBitRate > maxaudiomp3)
                        {
                            errors++;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.MaxAudiobitrate"), maxaudiomp3);
                        }
                    }

                    if (r->HasItem(TEXT("keyint")))
                    {
                        int keyint = r->GetInt(TEXT("keyint"));
                        if (!keyframeInt || keyframeInt * 1000 > keyint)
                        {
                            errors++;
                            strWarnings << FormattedString(Str("Settings.Publish.Warning.Keyint"), keyint / 1000);
                        }
                    }

                    break;
                }
            }
        }
    }

    if (errors)
        SetDlgItemText(hwnd, IDC_WARNINGS, strWarnings.Array());
    else
        SetDlgItemText(hwnd, IDC_WARNINGS, TEXT(""));

}
Ejemplo n.º 28
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hMatrix   = yuvScalePixelShader->GetParameterByName(TEXT("yuvMat"));
    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //std::unique_ptr<ProfilerNode> encodeThreadProfiler;

    //----------------------------------------
    // time/timestamp stuff

    bool bWasLaggedFrame = false;

    totalStreamTime = 0;
    lastAudioTimestamp = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
    double bpsTime = 0.0;

    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;
        convertInfo[i].numThreads = numThreads;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bEncode;
    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirstEncode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD streamTimeStart  = GetQPCTimeNS();
    QWORD lastStreamTime   = 0;
    QWORD firstFrameTimeMS = streamTimeStart/1000000;
    QWORD frameLengthNS    = 1000000000/fps;

    while(WaitForSingleObject(hVideoEvent, INFINITE) == WAIT_OBJECT_0)
    {
        if (bShutdownVideoThread)
            break;

        QWORD renderStartTime = GetQPCTimeNS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/1000000);

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        QWORD renderStartTimeMS = renderStartTime/1000000;

        QWORD curStreamTime = latestVideoTimeNS;
        if (!lastStreamTime)
            lastStreamTime = curStreamTime-frameLengthNS;
        QWORD frameDelta = curStreamTime-lastStreamTime;
        //if (!lastStreamTime)
        //    lastStreamTime = renderStartTime-frameLengthNS;
        //QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.000000001;
        //lastStreamTime = renderStartTime;

        bool bUpdateBPS = false;

        profileIn("video thread frame");

        //Log(TEXT("Stream Time: %llu"), curStreamTime);
        //Log(TEXT("frameDelta: %lf"), fSeconds);

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!pushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if (bPleaseEnableProjector)
            ActuallyEnableProjector();
        else if(bPleaseDisableProjector)
            DisableProjector();

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = 0;
        
        if (network) {
            curBytesSent = network->GetCurrentSentBytes();
            curFramesDropped = network->NumDroppedFrames();
        } else if (numSecondsWaited) {
            //reset stats if the network disappears
            bytesPerSec = 0;
            bpsTime = 0;
            numSecondsWaited = 0;
            curBytesSent = 0;
            zero(lastBytesSent, sizeof(lastBytesSent));
        }

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        if(network) curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if (bProjector) {
            SetRenderTarget(projectorTexture);

            Vect2 renderFrameSize, renderFrameOffset;
            Vect2 projectorSize = Vect2(float(projectorWidth), float(projectorHeight));

            float projectorAspect = (projectorSize.x / projectorSize.y);
            float baseAspect = (baseSize.x / baseSize.y);

            if (projectorAspect < baseAspect) {
                float fProjectorWidth = float(projectorWidth);

                renderFrameSize   = Vect2(fProjectorWidth, fProjectorWidth / baseAspect);
                renderFrameOffset = Vect2(0.0f, (projectorSize.y-renderFrameSize.y) * 0.5f);
            } else {
                float fProjectorHeight = float(projectorHeight);

                renderFrameSize   = Vect2(fProjectorHeight * baseAspect, fProjectorHeight);
                renderFrameOffset = Vect2((projectorSize.x-renderFrameSize.x) * 0.5f, 0.0f);
            }

            DrawPreview(renderFrameSize, renderFrameOffset, projectorSize, curRenderTarget, Preview_Projector);

            SetRenderTarget(NULL);
        }

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);
            DrawPreview(renderFrameSize, renderFrameOffset, renderFrameCtrlSize, curRenderTarget,
                    bFullscreenMode ? Preview_Fullscreen : Preview_Standard);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        switch(colorDesc.matrix)
        {
        case ColorMatrix_GBR:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[0] : (float*)yuvMat[0]);
            break;
        case ColorMatrix_YCgCo:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[1] : (float*)yuvMat[1]);
            break;
        case ColorMatrix_BT2020NCL:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[2] : (float*)yuvMat[2]);
            break;
        case ColorMatrix_BT709:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[3] : (float*)yuvMat[3]);
            break;
        case ColorMatrix_SMPTE240M:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[4] : (float*)yuvMat[4]);
            break;
        default:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[5] : (float*)yuvMat[5]);
        }

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if (bProjector && !copyWait)
            projectorSwap->Present(0, 0);

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("GPU download and conversion");

        bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (renderStartTimeMS-firstFrameTimeMS > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTimestamp = lastStreamTime/1000000;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        lastStreamTime = curStreamTime;

        if(bEncode)
        {
            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirstEncode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirstEncode)
                                bFirstEncode = bEncode = false;
                        }
                        else
                        {
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }

                    if(bEncode)
                    {
                        //encodeThreadProfiler.reset(::new ProfilerNode(TEXT("EncodeThread"), true));
                        //encodeThreadProfiler->MonitorThread(hEncodeThread);
                        curFramePic = &picOut;
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        message << TEXT(" This error can also occur if you have enabled opencl in x264 custom settings.");

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream && totalStreamTime > 15000)
            {
                if (curStrain > 25)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        App->RemoveStreamInfo(adjustmentStreamId);
                        adjustmentStreamId = 0;

                        bUpdateBPS = true;

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;
        profileOut;
        profileOut; //frame

        //------------------------------------
        // frame sync

        //QWORD renderStopTime = GetQPCTimeNS();

        if(bWasLaggedFrame = (frameDelta > frameLengthNS))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    DisableProjector();

    //encodeThreadProfiler.reset();

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirstEncode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of late frames: %d (%0.2f%%) (it's okay for some frames to be late)"), numTotalFrames, numLongFrames, (numTotalFrames > 0) ? (double(numLongFrames)/double(numTotalFrames))*100.0 : 0.0f);
}
Ejemplo n.º 29
0
void OBS::Start()
{
    if(bRunning) return;

    OSEnterMutex (hStartupShutdownMutex);

    scenesConfig.Save();

    //-------------------------------------------------------------

    fps = AppConfig->GetInt(TEXT("Video"), TEXT("FPS"), 30);
    frameTime = 1000/fps;

    //-------------------------------------------------------------

    if(!bLoggedSystemStats)
    {
        LogSystemStats();
        bLoggedSystemStats = TRUE;
    }

    OSCheckForBuggyDLLs();

    //-------------------------------------------------------------
retryHookTest:
    bool alreadyWarnedAboutModules = false;
    if (OSIncompatibleModulesLoaded())
    {
        Log(TEXT("Incompatible modules (pre-D3D) detected."));
        int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
        if (ret == IDABORT)
        {
            OSLeaveMutex (hStartupShutdownMutex);
            return;
        }
        else if (ret == IDRETRY)
        {
            goto retryHookTest;
        }

        alreadyWarnedAboutModules = true;
    }

    String strPatchesError;
    if (OSIncompatiblePatchesLoaded(strPatchesError))
    {
        OSLeaveMutex (hStartupShutdownMutex);
        MessageBox(hwndMain, strPatchesError.Array(), NULL, MB_ICONERROR);
        Log(TEXT("Incompatible patches detected."));
        return;
    }

    //-------------------------------------------------------------

    String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal"));
    if (!scmp(processPriority, TEXT("Idle")))
        SetPriorityClass(GetCurrentProcess(), IDLE_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("Above Normal")))
        SetPriorityClass(GetCurrentProcess(), ABOVE_NORMAL_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("High")))
        SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS);

    int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2);
    DWORD delayTime = (DWORD)AppConfig->GetInt(TEXT("Publish"), TEXT("Delay"));

    String strError;

    bFirstConnect = !bReconnecting;

    if(bTestStream)
        network = CreateNullNetwork();
    else
    {
        switch(networkMode)
        {
        case 0: network = (delayTime > 0) ? CreateDelayedPublisher(delayTime) : CreateRTMPPublisher(); break;
        case 1: network = CreateNullNetwork(); break;
        }
    }

    if(!network)
    {
        OSLeaveMutex (hStartupShutdownMutex);

        if(!bReconnecting)
            MessageBox(hwndMain, strError, NULL, MB_ICONERROR);
        else
            DialogBox(hinstMain, MAKEINTRESOURCE(IDD_RECONNECTING), hwndMain, OBS::ReconnectDialogProc);
        return;
    }

    bReconnecting = false;

    //-------------------------------------------------------------

    Log(TEXT("=====Stream Start: %s==============================================="), CurrentDateTimeString().Array());

    //-------------------------------------------------------------

    bEnableProjectorCursor = GlobalConfig->GetInt(L"General", L"EnableProjectorCursor", 1) != 0;
    bPleaseEnableProjector = bPleaseDisableProjector = false;

    int monitorID = AppConfig->GetInt(TEXT("Video"), TEXT("Monitor"));
    if(monitorID >= (int)monitors.Num())
        monitorID = 0;

    RECT &screenRect = monitors[monitorID].rect;
    int defCX = screenRect.right  - screenRect.left;
    int defCY = screenRect.bottom - screenRect.top;

    downscaleType = AppConfig->GetInt(TEXT("Video"), TEXT("Filter"), 0);
    downscale = AppConfig->GetFloat(TEXT("Video"), TEXT("Downscale"), 1.0f);
    baseCX = AppConfig->GetInt(TEXT("Video"), TEXT("BaseWidth"),  defCX);
    baseCY = AppConfig->GetInt(TEXT("Video"), TEXT("BaseHeight"), defCY);

    baseCX = MIN(MAX(baseCX, 128), 4096);
    baseCY = MIN(MAX(baseCY, 128), 4096);

    scaleCX = UINT(double(baseCX) / double(downscale));
    scaleCY = UINT(double(baseCY) / double(downscale));

    //align width to 128bit for fast SSE YUV4:2:0 conversion
    outputCX = scaleCX & 0xFFFFFFFC;
    outputCY = scaleCY & 0xFFFFFFFE;

    bUseMultithreadedOptimizations = AppConfig->GetInt(TEXT("General"), TEXT("UseMultithreadedOptimizations"), TRUE) != 0;
    Log(TEXT("  Multithreaded optimizations: %s"), (CTSTR)(bUseMultithreadedOptimizations ? TEXT("On") : TEXT("Off")));

    encoderSkipThreshold = GlobalConfig->GetInt(TEXT("Video"), TEXT("EncoderSkipThreshold"), fps/4);

    //------------------------------------------------------------------

    Log(TEXT("  Base resolution: %ux%u"), baseCX, baseCY);
    Log(TEXT("  Output resolution: %ux%u"), outputCX, outputCY);
    Log(TEXT("------------------------------------------"));

    //------------------------------------------------------------------

    GS = new D3D10System;
    GS->Init();

    //Thanks to ASUS OSD hooking the goddamn user mode driver framework (!!!!), we have to re-check for dangerous
    //hooks after initializing D3D.
retryHookTestV2:
    if (!alreadyWarnedAboutModules)
    {
        if (OSIncompatibleModulesLoaded())
        {
            Log(TEXT("Incompatible modules (post-D3D) detected."));
            int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
            if (ret == IDABORT)
            {
                //FIXME: really need a better way to abort startup than this...
                delete network;
                delete GS;

                OSLeaveMutex (hStartupShutdownMutex);
                return;
            }
            else if (ret == IDRETRY)
            {
                goto retryHookTestV2;
            }
        }
    }

    //-------------------------------------------------------------

    mainVertexShader    = CreateVertexShaderFromFile(TEXT("shaders/DrawTexture.vShader"));
    mainPixelShader     = CreatePixelShaderFromFile(TEXT("shaders/DrawTexture.pShader"));

    solidVertexShader   = CreateVertexShaderFromFile(TEXT("shaders/DrawSolid.vShader"));
    solidPixelShader    = CreatePixelShaderFromFile(TEXT("shaders/DrawSolid.pShader"));

    if(!mainVertexShader || !mainPixelShader)
        CrashError(TEXT("Unable to load DrawTexture shaders"));

    if(!solidVertexShader || !solidPixelShader)
        CrashError(TEXT("Unable to load DrawSolid shaders"));

    //------------------------------------------------------------------

    CTSTR lpShader;
    if(CloseFloat(downscale, 1.0))
        lpShader = TEXT("shaders/DrawYUVTexture.pShader");
    else if(downscale < 2.01)
    {
        switch(downscaleType)
        {
            case 0: lpShader = TEXT("shaders/DownscaleBilinear1YUV.pShader"); break;
            case 1: lpShader = TEXT("shaders/DownscaleBicubicYUV.pShader"); break;
            case 2: lpShader = TEXT("shaders/DownscaleLanczos6tapYUV.pShader"); break;
        }
    }
    else if(downscale < 3.01)
        lpShader = TEXT("shaders/DownscaleBilinear9YUV.pShader");
    else
        CrashError(TEXT("Invalid downscale value (must be either 1.0, 1.5, 2.0, 2.25, or 3.0)"));

    yuvScalePixelShader = CreatePixelShaderFromFile(lpShader);
    if (!yuvScalePixelShader)
        CrashError(TEXT("Unable to create shader from file %s"), lpShader);

    //-------------------------------------------------------------

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        mainRenderTextures[i] = CreateRenderTarget(baseCX, baseCY, GS_BGRA, FALSE);
        yuvRenderTextures[i]  = CreateRenderTarget(outputCX, outputCY, GS_BGRA, FALSE);
    }

    //-------------------------------------------------------------

    D3D10_TEXTURE2D_DESC td;
    zero(&td, sizeof(td));
    td.Width            = outputCX;
    td.Height           = outputCY;
    td.Format           = DXGI_FORMAT_B8G8R8A8_UNORM;
    td.MipLevels        = 1;
    td.ArraySize        = 1;
    td.SampleDesc.Count = 1;
    td.ArraySize        = 1;
    td.Usage            = D3D10_USAGE_STAGING;
    td.CPUAccessFlags   = D3D10_CPU_ACCESS_READ;

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, &copyTextures[i]);
        if(FAILED(err))
        {
            CrashError(TEXT("Unable to create copy texture"));
            //todo - better error handling
        }
    }

    //------------------------------------------------------------------

    UINT format = AppConfig->GetInt(L"Audio Encoding", L"Format", 1);

    switch (format) {
    case 0: sampleRateHz = 44100; break;
    default:
    case 1: sampleRateHz = 48000; break;
    }

    Log(L"------------------------------------------");
    Log(L"Audio Format: %uhz", sampleRateHz);

    //------------------------------------------------------------------

    AudioDeviceList playbackDevices;
    GetAudioDevices(playbackDevices, ADT_PLAYBACK);

    String strPlaybackDevice = AppConfig->GetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
    if(strPlaybackDevice.IsEmpty() || !playbackDevices.HasID(strPlaybackDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
        strPlaybackDevice = TEXT("Default");
    }

    Log(TEXT("Playback device %s"), strPlaybackDevice.Array());
    playbackDevices.FreeData();

    desktopAudio = CreateAudioSource(false, strPlaybackDevice);

    if(!desktopAudio) {
        CrashError(TEXT("Cannot initialize desktop audio sound, more info in the log file."));
    }

    AudioDeviceList audioDevices;
    GetAudioDevices(audioDevices, ADT_RECORDING);

    String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL);
    if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable"));
        strDevice = TEXT("Disable");
    }

    audioDevices.FreeData();

    String strDefaultMic;
    bool bHasDefault = GetDefaultMicID(strDefaultMic);

    if(strDevice.CompareI(TEXT("Disable")))
        EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    else
    {
        bool bUseDefault = strDevice.CompareI(TEXT("Default")) != 0;
        if(!bUseDefault || bHasDefault)
        {
            if(bUseDefault)
                strDevice = strDefaultMic;

            micAudio = CreateAudioSource(true, strDevice);

            if(!micAudio)
                MessageBox(hwndMain, Str("MicrophoneFailure"), NULL, 0);
            else
                micAudio->SetTimeOffset(AppConfig->GetInt(TEXT("Audio"), TEXT("MicTimeOffset"), 0));

            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), micAudio != NULL);
        }
        else
            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    }

    //-------------------------------------------------------------

    bool bDisableEncoding = false;

    if (bTestStream)
        bDisableEncoding = GlobalConfig->GetInt(TEXT("General"), TEXT("DisablePreviewEncoding"), false) != 0;

    //-------------------------------------------------------------

    UINT bitRate = (UINT)AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96);
    String strEncoder = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC"));

    if (bDisableEncoding)
        audioEncoder = CreateNullAudioEncoder();
    else
#ifdef USE_AAC
    if(strEncoder.CompareI(TEXT("AAC")))// && OSGetVersion() >= 7)
        audioEncoder = CreateAACEncoder(bitRate);
    else
#endif
        audioEncoder = CreateMP3Encoder(bitRate);

    //-------------------------------------------------------------

    desktopVol = AppConfig->GetFloat(TEXT("Audio"), TEXT("DesktopVolume"), 1.0f);
    micVol     = AppConfig->GetFloat(TEXT("Audio"), TEXT("MicVolume"),     1.0f);

    //-------------------------------------------------------------

    bRunning = true;

    if(sceneElement)
    {
        scene = CreateScene(sceneElement->GetString(TEXT("class")), sceneElement->GetElement(TEXT("data")));
        XElement *sources = sceneElement->GetElement(TEXT("sources"));
        if(sources)
        {
            UINT numSources = sources->NumElements();
            for(UINT i=0; i<numSources; i++)
            {
                SceneItem *item = scene->AddImageSource(sources->GetElementByID(i));
                if(item)
                {
                    if(ListView_GetItemState(GetDlgItem(hwndMain, ID_SOURCES), i, LVIS_SELECTED) > 0)
                        item->Select(true);
                }
            }
        }

        scene->BeginScene();
    }

    if(scene && scene->HasMissingSources())
        MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    //-------------------------------------------------------------

    int maxBitRate = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int bufferSize = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("BufferSize"), 1000);
    int quality    = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Quality"),    8);
    String preset  = AppConfig->GetString(TEXT("Video Encoding"), TEXT("Preset"),     TEXT("veryfast"));
    bUsing444      = false;//AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Use444"),     0) != 0;
    bUseCFR        = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCFR"), 1) != 0;

    //-------------------------------------------------------------

    bWriteToFile = networkMode == 1 || AppConfig->GetInt(TEXT("Publish"), TEXT("SaveToFile")) != 0;
    String strOutputFile = AppConfig->GetString(TEXT("Publish"), TEXT("SavePath"));

    strOutputFile.FindReplace(TEXT("\\"), TEXT("/"));

    if (bWriteToFile)
    {
        OSFindData ofd;
        HANDLE hFind = NULL;
        bool bUseDateTimeName = true;
        bool bOverwrite = GlobalConfig->GetInt(L"General", L"OverwriteRecordings", false) != 0;

        if(!bOverwrite && (hFind = OSFindFirstFile(strOutputFile, ofd)))
        {
            String strFileExtension = GetPathExtension(strOutputFile);
            String strFileWithoutExtension = GetPathWithoutExtension(strOutputFile);

            if(strFileExtension.IsValid() && !ofd.bDirectory)
            {
                String strNewFilePath;
                UINT curFile = 0;

                do 
                {
                    strNewFilePath.Clear() << strFileWithoutExtension << TEXT(" (") << FormattedString(TEXT("%02u"), ++curFile) << TEXT(").") << strFileExtension;
                } while(OSFileExists(strNewFilePath));

                strOutputFile = strNewFilePath;

                bUseDateTimeName = false;
            }

            if(ofd.bDirectory)
                strOutputFile.AppendChar('/');

            OSFindClose(hFind);
        }

        if(bUseDateTimeName)
        {
            String strFileName = GetPathFileName(strOutputFile);

            if(!strFileName.IsValid() || !IsSafeFilename(strFileName))
            {
                SYSTEMTIME st;
                GetLocalTime(&st);

                String strDirectory = GetPathDirectory(strOutputFile),
                       extension = GetPathExtension(strOutputFile);
                if(extension.IsEmpty())
                    extension = TEXT("mp4");
                strOutputFile = FormattedString(TEXT("%s/%u-%02u-%02u-%02u%02u-%02u.%s"), strDirectory.Array(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond, extension.Array());
            }
        }
    }

    //-------------------------------------------------------------

    bufferingTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 700);
    Log(TEXT("Scene buffering time set to %u"), bufferingTime);

    //-------------------------------------------------------------

    bForceMicMono = AppConfig->GetInt(TEXT("Audio"), TEXT("ForceMicMono")) != 0;
    bRecievedFirstAudioFrame = false;

    //hRequestAudioEvent = CreateSemaphore(NULL, 0, 0x7FFFFFFFL, NULL);
    hSoundDataMutex = OSCreateMutex();
    hSoundThread = OSCreateThread((XTHREAD)OBS::MainAudioThread, NULL);

    //-------------------------------------------------------------

    StartBlankSoundPlayback(strPlaybackDevice);

    //-------------------------------------------------------------

    colorDesc.fullRange = false;
    colorDesc.primaries = ColorPrimaries_BT709;
    colorDesc.transfer  = ColorTransfer_IEC6196621;
    colorDesc.matrix    = outputCX >= 1280 || outputCY > 576 ? ColorMatrix_BT709 : ColorMatrix_SMPTE170M;

    videoEncoder = nullptr;
    if (bDisableEncoding)
        videoEncoder = CreateNullVideoEncoder();
    else if(AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0)
        videoEncoder = CreateQSVEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR);

    if(!videoEncoder)
        videoEncoder = CreateX264Encoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR);


    //-------------------------------------------------------------

    // Ensure that the render frame is properly sized
    ResizeRenderFrame(true);

    //-------------------------------------------------------------

    if(!bTestStream && bWriteToFile && strOutputFile.IsValid())
    {
        String strFileExtension = GetPathExtension(strOutputFile);
        if(strFileExtension.CompareI(TEXT("flv")))
            fileStream = CreateFLVFileStream(strOutputFile);
        else if(strFileExtension.CompareI(TEXT("mp4")))
            fileStream = CreateMP4FileStream(strOutputFile);

        if(!fileStream)
        {
            Log(TEXT("Warning - OBSCapture::Start: Unable to create the file stream. Check the file path in Broadcast Settings."));
            MessageBox(hwndMain, Str("Capture.Start.FileStream.Warning"), Str("Capture.Start.FileStream.WarningCaption"), MB_OK | MB_ICONWARNING);        
        }
    }

    //-------------------------------------------------------------

    curFramePic = NULL;
    bShutdownVideoThread = false;
    bShutdownEncodeThread = false;
    //ResetEvent(hVideoThread);
    hEncodeThread = OSCreateThread((XTHREAD)OBS::EncodeThread, NULL);
    hVideoThread = OSCreateThread((XTHREAD)OBS::MainCaptureThread, NULL);

    if(bTestStream)
    {
        EnableWindow(GetDlgItem(hwndMain, ID_STARTSTOP), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_TESTSTREAM), Str("MainWindow.StopTest"));
    }
    else
    {
        EnableWindow(GetDlgItem(hwndMain, ID_TESTSTREAM), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_STARTSTOP), Str("MainWindow.StopStream"));
    }

    EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), TRUE);

    //-------------------------------------------------------------

    ReportStartStreamTrigger();
    
    SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 0, 0, 0);
    SetThreadExecutionState(ES_CONTINUOUS | ES_SYSTEM_REQUIRED | ES_AWAYMODE_REQUIRED | ES_DISPLAY_REQUIRED);

    UpdateRenderViewMessage();

    //update notification icon to reflect current status
    UpdateNotificationAreaIcon();

    OSLeaveMutex (hStartupShutdownMutex);
}
Ejemplo n.º 30
0
SystemException* CreateSystemResourceException()
{
  return new SystemException(FormattedString(ERRMSG_SYSTEM_RESOURCE_UNAVAILABLE));
}