Exemple #1
0
static void Rule11Alloc(Rule *rule)
{
  static creal w[][nrules] = {
    { .0009903847688882167,  1.715006248224684,     1.936014978949526,
      .517082819560576,      2.05440450381852 },
    { .0084964717409851,     -.3755893815889209,    -.3673449403754268,
      .01445269144914044,     .013777599884901202 },
    { .00013587331735072814,  .1488632145140549,     .02929778657898176,
     -.3601489663995932,     -.576806291790441 },
    { .022982920777660364,   -.2497046640620823,    -.1151883520260315,
      .3628307003418485,      .03726835047700328 },
    { .004202649722286289,    .1792501419135204,     .05086658220872218,
      .007148802650872729,    .0068148789397772195 },
    { .0012671889041675774,   .0034461267589738897,  .04453911087786469,
     -.09222852896022966,     .057231697338518496 },
    { .0002109560854981544,  -.005140483185555825,  -.022878282571259,
      .01719339732471725,    -.044930187438112855 },
    { .016830857056410086,    .006536017839876424,   .02908926216345833,
     -.102141653746035,       .027292365738663484 },
    { .00021876823557504823, -.00065134549392297,   -.002898884350669207,
     -.007504397861080493,    .000354747395055699 },
    { .009690420479796819,   -.006304672433547204,  -.028059634133074954,
      .01648362537726711,     .01571366799739551 },
    { .030773311284628138,    .01266959399788263,    .05638741361145884,
      .05234610158469334,     .049900992192785674 },
    { .0084974310856038,     -.005454241018647931,  -.02427469611942451,
      .014454323316130661,    .0137791555266677 },
    { .0017749535291258914,   .004826995274768427,   .021483070341828822,
      .003019236275367777,    .0028782064230998723 }
  };

  static creal g[] = {
     .095,                    .25,
     .375,                    .4,
     .4975,                   .49936724991757,
     .38968518428362114,      .49998494965443835,
     .3951318612385894,       .22016983438253684,
     .4774686911397297,       .2189239229503431,
     .4830546566815374,       .2288552938881567 };

  enum { nsets = 13, ndim = 3 };

  TYPEDEFSET;

  count n, r;
  Set *first, *last, *s, *t;

  Allocate(first, nsets);
  Clear(first, nsets);

  last = first;
  n = last->n = 1;
  Copy(last->weight, w[0], nrules);

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[1], nrules);
  last->gen[0] = g[0];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[2], nrules);
  last->gen[0] = g[1];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[3], nrules);
  last->gen[0] = g[2];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[4], nrules);
  last->gen[0] = g[3];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[5], nrules);
  last->gen[0] = g[4];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[6], nrules);
  last->gen[0] = g[5];
  last->gen[1] = g[5];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[7], nrules);
  last->gen[0] = g[6];
  last->gen[1] = g[6];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2)/3;
  Copy(last->weight, w[8], nrules);
  last->gen[0] = g[7];
  last->gen[1] = g[7];
  last->gen[2] = g[7];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2)/3;
  Copy(last->weight, w[9], nrules);
  last->gen[0] = g[8];
  last->gen[1] = g[8];
  last->gen[2] = g[8];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2)/3;
  Copy(last->weight, w[10], nrules);
  last->gen[0] = g[9];
  last->gen[1] = g[9];
  last->gen[2] = g[9];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2);
  Copy(last->weight, w[11], nrules);
  last->gen[0] = g[10];
  last->gen[1] = g[11];
  last->gen[2] = g[11];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2);
  Copy(last->weight, w[12], nrules);
  last->gen[0] = g[12];
  last->gen[1] = g[12];
  last->gen[2] = g[13];

  rule->first = first;
  rule->last = last;
  rule->errcoeff[0] = 4;
  rule->errcoeff[1] = .5;
  rule->errcoeff[2] = 3;
  rule->n = n;

  for( s = first; s <= last; ++s )
    for( r = 1; r < nrules - 1; ++r ) {
      creal scale = (s->weight[r] == 0) ? 100 :
        -s->weight[r + 1]/s->weight[r];
      real sum = 0;
      for( t = first; t <= last; ++t )
        sum += t->n*fabs(t->weight[r + 1] + scale*t->weight[r]);
      s->scale[r] = scale;
      s->norm[r] = 1/sum;
    }
}
Exemple #2
0
	void SetLPSLowerCase(const CMStringA &str)
	{
		SetUL(str.GetLength());
		BuffToLowerCase((LPSTR)Allocate(str.GetLength()), str, str.GetLength());
	}
Exemple #3
0
DataBuffer::DataBuffer(unsigned int uMaxSize)
{
	uSize = 0;
	Allocate(uMaxSize);
}
Exemple #4
0
	void SetUIDL(DWORDLONG dwData)
	{
		*(DWORDLONG*)Allocate(sizeof(dwData)) = dwData;
	}
Exemple #5
0
	void SetLPS(const CMStringA &str)
	{
		SetUL(str.GetLength());
		memcpy(Allocate(str.GetLength()), str, str.GetLength());
	}
Exemple #6
0
/** Get a scaled icon. */
ScaledIconNode *GetScaledIcon(IconNode *icon, long fg,
                              int rwidth, int rheight)
{

   XColor color;
   XImage *image;
   ScaledIconNode *np;
   GC maskGC;
   int x, y;
   int scalex, scaley;     /* Fixed point. */
   int srcx, srcy;         /* Fixed point. */
   int ratio;              /* Fixed point. */
   int nwidth, nheight;
   unsigned char *data;

   Assert(icon);
   Assert(icon->image);

   if(rwidth == 0) {
      rwidth = icon->image->width;
   }
   if(rheight == 0) {
      rheight = icon->image->height;
   }

   ratio = (icon->image->width << 16) / icon->image->height;
   nwidth = Min(rwidth, (rheight * ratio) >> 16);
   nheight = Min(rheight, (nwidth << 16) / ratio);
   nwidth = (nheight * ratio) >> 16;
   if(nwidth < 1) {
      nwidth = 1;
   }
   if(nheight < 1) {
      nheight = 1;
   }

   /* Check if this size already exists.
    * Note that XRender scales on the fly.
    */
   for(np = icon->nodes; np; np = np->next) {
#ifdef USE_XRENDER
      if(np->imagePicture != None) {
         np->width = nwidth;
         np->height = nheight;
         return np;
      }
#endif
      if(np->width == nwidth && np->height == nheight) {
         if(!icon->image->bitmap || np->fg == fg) {
            return np;
         }
      }
   }

   /* See if we can use XRender to create the icon. */
#ifdef USE_XRENDER
   if(haveRender) {
      np = CreateScaledRenderIcon(icon, fg, nwidth, nheight);

      /* Don't keep the image data around after creating the icon. */
      Release(icon->image->data);
      icon->image->data = NULL;

      return np;
   }
#endif

   /* Create a new ScaledIconNode the old-fashioned way. */
   np = Allocate(sizeof(ScaledIconNode));
   np->fg = fg;
   np->width = nwidth;
   np->height = nheight;
   np->next = icon->nodes;
#ifdef USE_XRENDER
   np->imagePicture = None;
#endif
   icon->nodes = np;

   /* Create a mask. */
   np->mask = JXCreatePixmap(display, rootWindow, nwidth, nheight, 1);
   maskGC = JXCreateGC(display, np->mask, 0, NULL);
   JXSetForeground(display, maskGC, 0);
   JXFillRectangle(display, np->mask, maskGC, 0, 0, nwidth, nheight);
   JXSetForeground(display, maskGC, 1);

   /* Create a temporary XImage for scaling. */
   image = JXCreateImage(display, rootVisual, rootDepth, ZPixmap, 0,
                         NULL, nwidth, nheight, 8, 0);
   image->data = Allocate(sizeof(unsigned long) * nwidth * nheight);

   /* Determine the scale factor. */
   scalex = (icon->image->width << 16) / nwidth;
   scaley = (icon->image->height << 16) / nheight;

   data = icon->image->data;
   srcy = 0;
   for(y = 0; y < nheight; y++) {
      const int yindex = (srcy >> 16) * icon->image->width;
      srcx = 0;
      for(x = 0; x < nwidth; x++) {
         if(icon->image->bitmap) {
            const int index = yindex + (srcx >> 16);
            const int offset = index >> 3;
            const int mask = 1 << (index & 7);
            if(data[offset] & mask) {
               JXDrawPoint(display, np->mask, maskGC, x, y);
               XPutPixel(image, x, y, fg);
            }
         } else {
            const int yindex = (srcy >> 16) * icon->image->width;
            const int index = 4 * (yindex + (srcx >> 16));
            color.red = data[index + 1];
            color.red |= color.red << 8;
            color.green = data[index + 2];
            color.green |= color.green << 8;
            color.blue = data[index + 3];
            color.blue |= color.blue << 8;
            GetColor(&color);
            XPutPixel(image, x, y, color.pixel);
            if(data[index] >= 128) {
               JXDrawPoint(display, np->mask, maskGC, x, y);
            }
         }
         srcx += scalex;
      }
      srcy += scaley;
   }
Exemple #7
0
void* D3DTexture::GetImage(BOOL bForce, void *lpInputPtr)
{
    if(!bForce || (bDynamic && (dwTexType != D3DTEXTURE_FRAME_BUFFER)))
    {
        if(dwTexType == D3DTEXTURE_FRAME_BUFFER)
            ErrOut(TEXT("Tried to query image for frame buffer"));
        if(!bDynamic)
            ErrOut(TEXT("Tried to query image for non-dynamic texture"));
    }
    else
    {
        IDirect3DSurface9 *RenderSurface=NULL, *CopySurface=NULL;
        BOOL bSuccess=FALSE;
        LPVOID lpData=NULL;

        D3DLOCKED_RECT d3dRect;
        int i,j;

        if(dwTexType == D3DTEXTURE_FRAME_BUFFER)
        {
            if(SUCCEEDED(GetTex()->GetSurfaceLevel(0, &RenderSurface)))
            {
                profileSegment("is it this?"); //3.14%
                if(SUCCEEDED(d3d->d3dDevice->CreateOffscreenPlainSurface(Width(), Height(), (D3DFORMAT)dwInternalFormat, D3DPOOL_SYSTEMMEM, &CopySurface, NULL)))
                {
                    profileSegment("or is it this?"); //5.48%
                    if(SUCCEEDED(d3d->d3dDevice->GetRenderTargetData(RenderSurface, CopySurface)))
                    {
                        profileSegment("or how about this?");
                        if(SUCCEEDED(CopySurface->LockRect(&d3dRect, NULL, 0)))
                            bSuccess = TRUE;
                    }
                }
            }

            if(RenderSurface)
                RenderSurface->Release();

            if(!bSuccess)
            {
                if(CopySurface)
                    CopySurface->Release();

                return NULL;
            }
        }
        else
        {
            if(!SUCCEEDED(GetTex()->GetSurfaceLevel(0, &CopySurface)))
                return NULL;
            if(!SUCCEEDED(CopySurface->LockRect(&d3dRect, NULL, 0)))
            {
                CopySurface->Release();
                return NULL;
            }
        }

        //-------------------------------------------------------

        if(dwFormat <= GS_GRAYSCALE)
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height());

            LPBYTE lpBits  = (LPBYTE)d3dRect.pBits;
            LPBYTE lpInput = (LPBYTE)lpData;

            for(i=0; i<texHeight; i++)
                mcpy(lpInput+(i*texWidth), lpBits+(i*(d3dRect.Pitch)), texWidth);
        }
        else if(dwFormat == GS_A8L8)
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*2);

            LPWORD lpBits  = (LPWORD)d3dRect.pBits;
            LPWORD lpInput = (LPWORD)lpData;
            DWORD widthX2 = texWidth*2;

            for(i=0; i<texHeight; i++)
                mcpy(lpInput+(i*widthX2), lpBits+(i*d3dRect.Pitch), widthX2);
        }
        else if(dwFormat == GS_RGB)
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*3);

            LPBYTE lpBits = (LPBYTE)d3dRect.pBits, lpInput = (LPBYTE)lpData;
            //DWORD widthX3 = texWidth*3;

            for(i=0; i<texHeight; i++)
            {
                //mcpy(lpBits+(i*d3dRect.Pitch), lpInput+(i*widthX3), widthX3);
                DWORD curY      = (i*texWidth*3);
                DWORD curD3DY   = (i*d3dRect.Pitch);

                for(j=0; j<texWidth; j++)
                {
                    DWORD curX      = curY+(j*3);
                    DWORD curD3DX   = curD3DY+(j*4);

                    lpInput[curX]   = lpBits[curD3DX];
                    lpInput[curX+1] = lpBits[curD3DX+1];
                    lpInput[curX+2] = lpBits[curD3DX+2];
                }
            }
        }
        else if(dwFormat == GS_DXT1)
        {
            LPBYTE lpBits = (LPBYTE)d3dRect.pBits;

            DWORD tempWidth  = (texWidth+3)/4;
            DWORD tempHeight = (texHeight+3)/4;

            lpData = lpInputPtr ? lpInputPtr : Allocate(tempWidth*tempHeight*8);
            mcpy(lpData, lpBits, tempWidth*tempHeight*8);
        }
        else if((dwFormat == GS_DXT3) || (dwFormat == GS_DXT5))
        {
            LPBYTE lpBits = (LPBYTE)d3dRect.pBits;

            DWORD tempWidth  = (texWidth+3)/4;
            DWORD tempHeight = (texHeight+3)/4;

            lpData = lpInputPtr ? lpInputPtr : Allocate(tempWidth*tempHeight*16);
            mcpy(lpData, lpBits, tempWidth*tempHeight*16);
        }
        else if((dwFormat == GS_RGBA) || (dwFormat == GS_RG16F))
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*4);

            LPBYTE lpBits = (LPBYTE)d3dRect.pBits, lpInput = (LPBYTE)lpData;
            DWORD widthX4 = texWidth*4;

            for(i=0; i<texHeight; i++)
                mcpy(lpInput+(i*widthX4), lpBits+(i*d3dRect.Pitch), widthX4);
        }
        else if((dwFormat == GS_RGBA16) || (dwFormat == GS_RG32F))
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*2*4);

            LPBYTE lpBits = (LPBYTE)d3dRect.pBits;
            LPWORD lpInput = (LPWORD)lpData;
            DWORD widthX8 = texWidth*8;

            for(i=0; i<texHeight; i++)
                mcpy(lpInput+(i*widthX8), lpBits+(i*d3dRect.Pitch), widthX8);
        }
        else if(dwFormat == GS_RGBA16F) //converts to 8bit RGBA
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*sizeof(Vect4));

            LPBYTE lpBits = (LPBYTE)d3dRect.pBits;
            Vect4* lpInput = (Vect4*)lpData;
            DWORD widthXVect = texWidth*sizeof(Vect4);

            for(i=0; i<texHeight; i++)
            {
                DWORD curY      = (i*texWidth);
                DWORD curD3DY   = (i*d3dRect.Pitch);

                for(j=0; j<texWidth; j++)
                {
                    DWORD curX      = curY+(j*4);
                    DWORD curD3DX   = curD3DY+(j*2*4);

                    lpInput[curY+j].x = ((FLOAT)*(D3DXFLOAT16*)(lpBits+(curD3DX)));
                    lpInput[curY+j].y = ((FLOAT)*(D3DXFLOAT16*)(lpBits+(curD3DX+2)));
                    lpInput[curY+j].z = ((FLOAT)*(D3DXFLOAT16*)(lpBits+(curD3DX+4)));
                    lpInput[curY+j].w = ((FLOAT)*(D3DXFLOAT16*)(lpBits+(curD3DX+6)));
                }
            }
        }
        else if(dwFormat == GS_RGBA32F) //converts to 8bit RGBA
        {
            lpData = lpInputPtr ? lpInputPtr : Allocate(Width()*Height()*sizeof(Vect4));

            LPBYTE lpBits = (LPBYTE)d3dRect.pBits;
            Vect4* lpInput = (Vect4*)lpData;
            DWORD widthXVect = texWidth*sizeof(Vect4);

            if(widthXVect == d3dRect.Pitch)
                mcpy(lpInput, lpBits, widthXVect*texHeight);
            else
            {
                for(i=0; i<texHeight; i++)
                    mcpy(lpInput+(i*texWidth), lpBits+(i*d3dRect.Pitch), widthXVect);
            }
        }

        //-------------------------------------------------------

        CopySurface->UnlockRect();
        CopySurface->Release();

        return lpData;
    }

    return textureData;
}
KString::KString(LPCTSTR pStr, size_t szLength)
{
	Allocate(szLength);
	
	memcpy(GetDataPtr(), pStr, GetStreamCharsLength(szLength));
}
Exemple #9
0
bool CBaseTexture::LoadFromFile(const CStdString& texturePath, unsigned int maxWidth, unsigned int maxHeight,
                                bool autoRotate, unsigned int *originalWidth, unsigned int *originalHeight)
{
#ifdef TARGET_RASPBERRY_PI
    if (URIUtils::GetExtension(texturePath).Equals(".jpg") ||
            URIUtils::GetExtension(texturePath).Equals(".tbn")
            /*|| URIUtils::GetExtension(texturePath).Equals(".png")*/)
    {
        COMXImage omx_image;

        if(omx_image.ReadFile(texturePath))
        {
            // TODO: we only decode as half width and height. this is a workaround for the PI memory limitation
            if(omx_image.Decode(omx_image.GetWidth() / 2, omx_image.GetHeight() / 2))
            {
                Allocate(omx_image.GetDecodedWidth(), omx_image.GetDecodedHeight(), XB_FMT_A8R8G8B8);

                if(!m_pixels)
                {
                    CLog::Log(LOGERROR, "Texture manager (OMX) out of memory");
                    omx_image.Close();
                    return false;
                }

                if (originalWidth)
                    *originalWidth  = omx_image.GetOriginalWidth();
                if (originalHeight)
                    *originalHeight = omx_image.GetOriginalHeight();

                m_hasAlpha = omx_image.IsAlpha();

                if (autoRotate && omx_image.GetOrientation())
                    m_orientation = omx_image.GetOrientation() - 1;

                if(omx_image.GetDecodedData())
                {
                    int size = ( ( GetPitch() * GetRows() ) > omx_image.GetDecodedSize() ) ?
                               omx_image.GetDecodedSize() : ( GetPitch() * GetRows() );

                    memcpy(m_pixels, (unsigned char *)omx_image.GetDecodedData(), size);
                }

                omx_image.Close();

                return true;
            }
            else
            {
                omx_image.Close();
            }
        }
    }
#endif
    if (URIUtils::GetExtension(texturePath).Equals(".dds"))
    {   // special case for DDS images
        CDDSImage image;
        if (image.ReadFile(texturePath))
        {
            Update(image.GetWidth(), image.GetHeight(), 0, image.GetFormat(), image.GetData(), false);
            return true;
        }
        return false;
    }

    //ImageLib is sooo sloow for jpegs. Try our own decoder first. If it fails, fall back to ImageLib.
    if (URIUtils::GetExtension(texturePath).Equals(".jpg") || URIUtils::GetExtension(texturePath).Equals(".tbn"))
    {
        CJpegIO jpegfile;
        if (jpegfile.Open(texturePath, maxWidth, maxHeight))
        {
            if (jpegfile.Width() > 0 && jpegfile.Height() > 0)
            {
                Allocate(jpegfile.Width(), jpegfile.Height(), XB_FMT_A8R8G8B8);
                if (jpegfile.Decode(m_pixels, GetPitch(), XB_FMT_A8R8G8B8))
                {
                    if (autoRotate && jpegfile.Orientation())
                        m_orientation = jpegfile.Orientation() - 1;
                    m_hasAlpha=false;
                    ClampToEdge();
                    return true;
                }
            }
        }
        CLog::Log(LOGDEBUG, "%s - Load of %s failed. Falling back to ImageLib", __FUNCTION__, texturePath.c_str());
    }

    DllImageLib dll;
    if (!dll.Load())
        return false;

    ImageInfo image;
    memset(&image, 0, sizeof(image));

    unsigned int width = maxWidth ? std::min(maxWidth, g_Windowing.GetMaxTextureSize()) : g_Windowing.GetMaxTextureSize();
    unsigned int height = maxHeight ? std::min(maxHeight, g_Windowing.GetMaxTextureSize()) : g_Windowing.GetMaxTextureSize();

    if(!dll.LoadImage(texturePath.c_str(), width, height, &image))
    {
        CLog::Log(LOGERROR, "Texture manager unable to load file: %s", texturePath.c_str());
        return false;
    }

    if (originalWidth)
        *originalWidth = image.originalwidth;
    if (originalHeight)
        *originalHeight = image.originalheight;

    LoadFromImage(image, autoRotate);
    dll.ReleaseImage(&image);

    return true;
}
Exemple #10
0
INT_PTR SettingsAdvanced::ProcMessage(UINT message, WPARAM wParam, LPARAM lParam)
{
    switch(message)
    {
        case WM_INITDIALOG:
            {
                LocalizeWindow(hwnd);

                //--------------------------------------------

                HWND hwndToolTip = CreateWindowEx(NULL, TOOLTIPS_CLASS, NULL, WS_POPUP|TTS_NOPREFIX|TTS_ALWAYSTIP,
                    CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
                    hwnd, NULL, hinstMain, NULL);

                TOOLINFO ti;
                zero(&ti, sizeof(ti));
                ti.cbSize = sizeof(ti);
                ti.uFlags = TTF_SUBCLASS|TTF_IDISHWND;
                ti.hwnd = hwnd;

                SendMessage(hwndToolTip, TTM_SETMAXTIPWIDTH, 0, 500);
                SendMessage(hwndToolTip, TTM_SETDELAYTIME, TTDT_AUTOPOP, 14000);

                //------------------------------------

                UINT sceneBufferingTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 400);
                SendMessage(GetDlgItem(hwnd, IDC_SCENEBUFFERTIME), UDM_SETRANGE32, 60, 20000);
                SendMessage(GetDlgItem(hwnd, IDC_SCENEBUFFERTIME), UDM_SETPOS32, 0, sceneBufferingTime);

                //------------------------------------

                bool bUseMTOptimizations = AppConfig->GetInt(TEXT("General"), TEXT("UseMultithreadedOptimizations"), TRUE) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_USEMULTITHREADEDOPTIMIZATIONS), BM_SETCHECK, bUseMTOptimizations ? BST_CHECKED : BST_UNCHECKED, 0);

                HWND hwndTemp = GetDlgItem(hwnd, IDC_PRIORITY);
                SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)Str("Settings.Advanced.Priority.High"));
                SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)Str("Settings.Advanced.Priority.AboveNormal"));
                SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)Str("Settings.Advanced.Priority.Normal"));
                SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)Str("Settings.Advanced.Priority.Idle"));

                CTSTR pStr = AppConfig->GetStringPtr(TEXT("General"), TEXT("Priority"), TEXT("Normal"));
                if (scmpi(pStr, TEXT("Idle")) == 0)
                    SendMessage(hwndTemp, CB_SETCURSEL, 3, 0);
                else if (scmpi(pStr, TEXT("Above Normal")) == 0)
                    SendMessage(hwndTemp, CB_SETCURSEL, 1, 0);
                else if (scmpi(pStr, TEXT("High")) == 0)
                    SendMessage(hwndTemp, CB_SETCURSEL, 0, 0);
                else //Normal
                    SendMessage(hwndTemp, CB_SETCURSEL, 2, 0);

                //------------------------------------

                bool bDisablePreviewEncoding = GlobalConfig->GetInt(TEXT("General"), TEXT("DisablePreviewEncoding"), false) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_DISABLEPREVIEWENCODING), BM_SETCHECK, bDisablePreviewEncoding ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                bool bAllowOtherHotkeyModifiers = GlobalConfig->GetInt(TEXT("General"), TEXT("AllowOtherHotkeyModifiers"), true) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_ALLOWOTHERHOTKEYMODIFIERS), BM_SETCHECK, bAllowOtherHotkeyModifiers ? BST_CHECKED : BST_UNCHECKED, 0);

                //--------------------------------------------

                hwndTemp = GetDlgItem(hwnd, IDC_PRESET);
                static const CTSTR preset_names[7] = {TEXT("ultrafast"), TEXT("superfast"), TEXT("veryfast"), TEXT("faster"), TEXT("fast"), TEXT("medium"), TEXT("slow")};
                for(int i=0; i<7; i++)
                    SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)preset_names[i]);

                LoadSettingComboString(hwndTemp, TEXT("Video Encoding"), TEXT("Preset"), TEXT("veryfast"));

                ti.lpszText = (LPWSTR)Str("Settings.Advanced.VideoEncoderCPUTradeoffTooltip");
                ti.uId = (UINT_PTR)hwndTemp;
                SendMessage(hwndToolTip, TTM_ADDTOOL, 0, (LPARAM)&ti);

                //------------------------------------

                bool bUseCFR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCFR"), 0) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_USECFR), BM_SETCHECK, bUseCFR ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                bool bUseCustomX264Settings = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCustomSettings")) != 0;
                String strX264Settings = AppConfig->GetString(TEXT("Video Encoding"), TEXT("CustomSettings"));

                SendMessage(GetDlgItem(hwnd, IDC_USEVIDEOENCODERSETTINGS), BM_SETCHECK, bUseCustomX264Settings ? BST_CHECKED : BST_UNCHECKED, 0);
                SetWindowText(GetDlgItem(hwnd, IDC_VIDEOENCODERSETTINGS), strX264Settings);

                ti.lpszText = (LPWSTR)Str("Settings.Advanced.VideoEncoderSettingsTooltip");
                ti.uId = (UINT_PTR)GetDlgItem(hwnd, IDC_VIDEOENCODERSETTINGS);
                SendMessage(hwndToolTip, TTM_ADDTOOL, 0, (LPARAM)&ti);

                ti.uId = (UINT_PTR)GetDlgItem(hwnd, IDC_USEVIDEOENCODERSETTINGS);
                SendMessage(hwndToolTip, TTM_ADDTOOL, 0, (LPARAM)&ti);

                EnableWindow(GetDlgItem(hwnd, IDC_VIDEOENCODERSETTINGS), bUseCustomX264Settings);

                //--------------------------------------------

                bool bUnlockFPS = AppConfig->GetInt(TEXT("Video"), TEXT("UnlockFPS")) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_UNLOCKHIGHFPS), BM_SETCHECK, bUnlockFPS ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                bool bHasQSV = CheckQSVHardwareSupport(false);
                EnableWindow(GetDlgItem(hwnd, IDC_USEQSV), bHasQSV);

                bool bUseQSV = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_USEQSV), BM_SETCHECK, bUseQSV ? BST_CHECKED : BST_UNCHECKED, 0);

                bool bQSVUseVideoEncoderSettings = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("QSVUseVideoEncoderSettings")) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_QSVUSEVIDEOENCODERSETTINGS), BM_SETCHECK, bQSVUseVideoEncoderSettings ? BST_CHECKED : BST_UNCHECKED, 0);
                
                ti.lpszText = (LPWSTR)Str("Settings.Advanced.QSVUseVideoEncoderSettingsTooltip");
                ti.uId = (UINT_PTR)GetDlgItem(hwnd, IDC_QSVUSEVIDEOENCODERSETTINGS);
                SendMessage(hwndToolTip, TTM_ADDTOOL, 0, (LPARAM)&ti);

                EnableWindow(GetDlgItem(hwnd, IDC_QSVUSEVIDEOENCODERSETTINGS), bHasQSV && bUseQSV);

                //------------------------------------

                bool bSyncToVideoTime = AppConfig->GetInt(TEXT("Audio"), TEXT("SyncToVideoTime")) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_SYNCTOVIDEOTIME), BM_SETCHECK, bSyncToVideoTime ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                bool bUseMicQPC = GlobalConfig->GetInt(TEXT("Audio"), TEXT("UseMicQPC")) != 0;
                SendMessage(GetDlgItem(hwnd, IDC_USEMICQPC), BM_SETCHECK, bUseMicQPC ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                int bufferTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 400);

                int globalAudioTimeAdjust = GlobalConfig->GetInt(TEXT("Audio"), TEXT("GlobalAudioTimeAdjust"));
                SendMessage(GetDlgItem(hwnd, IDC_AUDIOTIMEADJUST), UDM_SETRANGE32, -bufferTime, 1000);
                SendMessage(GetDlgItem(hwnd, IDC_AUDIOTIMEADJUST), UDM_SETPOS32, 0, globalAudioTimeAdjust);

                //------------------------------------

                int lowLatencyFactor = AppConfig->GetInt(TEXT("Publish"), TEXT("LatencyFactor"), 20);
                SetDlgItemInt(hwnd, IDC_LATENCYTUNE, lowLatencyFactor, TRUE);

                int bLowLatencyAutoMethod = AppConfig->GetInt(TEXT("Publish"), TEXT("LowLatencyMethod"), 0);
                SendMessage(GetDlgItem(hwnd, IDC_LATENCYMETHOD), BM_SETCHECK, bLowLatencyAutoMethod ? BST_CHECKED : BST_UNCHECKED, 0);

                //------------------------------------

                MIB_IPADDRTABLE tempTable;
                DWORD dwSize = 0;
                if (GetIpAddrTable (&tempTable, &dwSize, TRUE) == ERROR_INSUFFICIENT_BUFFER)
                {
                    PMIB_IPADDRTABLE ipTable;

                    ipTable = (PMIB_IPADDRTABLE)Allocate(dwSize);

                    if (GetIpAddrTable (ipTable, &dwSize, TRUE) == NO_ERROR)
                    {
                        DWORD i;

                        hwndTemp = GetDlgItem(hwnd, IDC_BINDIP);
                        SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)TEXT("Default"));

                        for (i=0; i < ipTable->dwNumEntries; i++)
                        {
                            String strAddress;
                            DWORD strLength = 32;

                            // don't allow binding to localhost
                            if ((ipTable->table[i].dwAddr & 0xFF) == 127)
                                continue;

                            strAddress.SetLength(strLength);

                            SOCKADDR_IN IP;

                            IP.sin_addr.S_un.S_addr = ipTable->table[i].dwAddr;
                            IP.sin_family = AF_INET;
                            IP.sin_port = 0;
                            zero(&IP.sin_zero, sizeof(IP.sin_zero));

                            WSAAddressToString ((LPSOCKADDR)&IP, sizeof(IP), NULL, strAddress.Array(), &strLength);
                            SendMessage(hwndTemp, CB_ADDSTRING, 0, (LPARAM)strAddress.Array());
                        }

                        LoadSettingComboString(hwndTemp, TEXT("Publish"), TEXT("BindToIP"), TEXT("Default"));
                    }

                    Free(ipTable);
                }

                //need this as some of the dialog item sets above trigger the notifications
                ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_HIDE);
                SetChangedSettings(false);
                return TRUE;
            }

        case WM_COMMAND:
            switch(LOWORD(wParam))
            {
                case IDC_USEVIDEOENCODERSETTINGS:
                    if(HIWORD(wParam) == BN_CLICKED)
                    {
                        BOOL bUseVideoEncoderSettings = SendMessage((HWND)lParam, BM_GETCHECK, 0, 0) == BST_CHECKED;
                        EnableWindow(GetDlgItem(hwnd, IDC_VIDEOENCODERSETTINGS), bUseVideoEncoderSettings);

                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;

                case IDC_SCENEBUFFERTIME_EDIT:
                case IDC_AUDIOTIMEADJUST_EDIT:
                case IDC_VIDEOENCODERSETTINGS:
                case IDC_LATENCYTUNE:
                    if(HIWORD(wParam) == EN_CHANGE)
                    {
                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;

                /*case IDC_TIMER1:
                case IDC_TIMER2:
                case IDC_TIMER3:
                case IDC_DISABLED3DCOMPATIBILITY:
                    if(HIWORD(wParam) == BN_CLICKED)
                    {
                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;*/

                case IDC_USESENDBUFFER:
                    if(HIWORD(wParam) == BN_CLICKED)
                    {
                        BOOL bUseSendBuffer = SendMessage((HWND)lParam, BM_GETCHECK, 0, 0) == BST_CHECKED;
                        EnableWindow(GetDlgItem(hwnd, IDC_SENDBUFFERSIZE), bUseSendBuffer);

                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;

                case IDC_PRESET:
                    if(HIWORD(wParam) == CBN_SELCHANGE)
                    {
                        HWND hwndTemp = (HWND)lParam;

                        String strNewPreset = GetCBText(hwndTemp);
                        if (scmp(strNewPreset.Array(), AppConfig->GetString(TEXT("Video Encoding"), TEXT("Preset"), TEXT("veryfast"))))
                        {
                            static BOOL bHasWarned = FALSE;
                            if (!bHasWarned && MessageBox(hwnd, Str("Settings.Advanced.PresetWarning"), NULL, MB_ICONEXCLAMATION | MB_YESNO) == IDNO)
                            {
                                LoadSettingComboString(hwndTemp, TEXT("Video Encoding"), TEXT("Preset"), TEXT("veryfast"));
                            }
                            else
                            {
                                bHasWarned = TRUE;
                                ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                                SetChangedSettings(true);
                            }
                        }
                    }
                    break;

                case IDC_SENDBUFFERSIZE:
                case IDC_PRIORITY:
                case IDC_BINDIP:
                    if(HIWORD(wParam) == CBN_SELCHANGE || HIWORD(wParam) == CBN_EDITCHANGE)
                    {
                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;

                case IDC_USEQSV:
                    if(HIWORD(wParam) == BN_CLICKED)
                    {
                        bool bUseQSV = SendMessage((HWND)lParam, BM_GETCHECK, 0, 0) == BST_CHECKED;
                        EnableWindow(GetDlgItem(hwnd, IDC_QSVUSEVIDEOENCODERSETTINGS), bUseQSV);
                    }
                case IDC_DISABLEPREVIEWENCODING:
                case IDC_ALLOWOTHERHOTKEYMODIFIERS:
                case IDC_USEMICQPC:
                case IDC_SYNCTOVIDEOTIME:
                case IDC_USECFR:
                case IDC_USEMULTITHREADEDOPTIMIZATIONS:
                case IDC_UNLOCKHIGHFPS:
                case IDC_LATENCYMETHOD:
                case IDC_QSVUSEVIDEOENCODERSETTINGS:
                    if(HIWORD(wParam) == BN_CLICKED)
                    {
                        ShowWindow(GetDlgItem(hwnd, IDC_INFO), SW_SHOW);
                        SetChangedSettings(true);
                    }
                    break;
            }

    }
    return FALSE;
}
TCriticalSectionLocker::TCriticalSectionLocker(TCriticalSection& SCriticalSection)
{
	m_pCriticalSection = NULL;

	Allocate(SCriticalSection);
}
Exemple #12
0
static void Rule7Alloc(Rule *rule)
{
  static creal w[] = {
     .019417866674748388428,   -.40385257701150182546,
     .64485668767465982223,     .01177982690775806141,
    -.18041318740733609012,    -.088785828081335044443,
     .056328645808285941374,   -.0097089333373741942142,
    -.99129176779582358138,    -.17757165616267008889,
     .12359398032043233572,     .074978148702033690681,
     .55489147051423559776,     .088041241522692771226,
     .021118358455513385083,   -.0099302203239653333087,
    -.064100053285010904179,    .030381729038221007659,
     .0058899134538790307051,  -.0048544666686870971071,
     .35514331232534017777 };

  static creal g[] = {
     .47795365790226950619,     .20302858736911986780,
     .375,                      .34303789878087814570 };

  enum { nsets = 6 };

  TYPEDEFSET;

  ccount ndim = ndim_;
  ccount twondim = 1 << ndim;
  count dim, n, r;
  Set *first, *last, *s, *t;

  Allocate(first, nsets);
  Clear(first, nsets);

  last = first;
  n = last->n = 1;
  last->weight[0] = ndim*(ndim*w[0] + w[1]) + w[2];
  last->weight[1] = ndim*(ndim*w[3] + w[4]) - w[5];
  last->weight[2] = ndim*w[6] - last->weight[1];
  last->weight[3] = ndim*(ndim*w[7] + w[8]) - w[9];
  last->weight[4] = 1 - last->weight[0];

  ++last;
  n += last->n = 2*ndim;
  last->weight[0] = w[10];
  last->weight[1] = w[11];
  last->weight[2] = -w[10];
  last->weight[3] = w[12];
  last->weight[4] = -w[10];
  last->gen[0] = g[1];

  ++last;
  n += last->n = 2*ndim;
  last->weight[0] = w[13] - ndim*w[0];
  last->weight[1] = w[14] - ndim*w[3];
  last->weight[2] = w[15] - last->weight[1];
  last->weight[3] = w[16] - ndim*w[7];
  last->weight[4] = -last->weight[0];
  last->gen[0] = g[0];

  ++last;
  n += last->n = 2*ndim;
  last->weight[2] = w[17];
  last->gen[0] = g[2];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  last->weight[0] = -w[7];
  last->weight[1] = w[18];
  last->weight[2] = -w[18];
  last->weight[3] = w[19];
  last->weight[4] = w[7];
  last->gen[0] = g[0];
  last->gen[1] = g[0];

  ++last;
  n += last->n = twondim;
  last->weight[0] = w[20]/twondim;
  last->weight[1] = w[5]/twondim;
  last->weight[2] = -last->weight[1];
  last->weight[3] = w[9]/twondim;
  last->weight[4] = -last->weight[0];
  for( dim = 0; dim < ndim; ++dim )
    last->gen[dim] = g[3];

  rule->first = first;
  rule->last = last;
  rule->errcoeff[0] = 5;
  rule->errcoeff[1] = 1;
  rule->errcoeff[2] = 5;
  rule->n = n;

  for( s = first; s <= last; ++s )
    for( r = 1; r < nrules - 1; ++r ) {
      creal scale = (s->weight[r] == 0) ? 100 :
        -s->weight[r + 1]/s->weight[r];
      real sum = 0;
      for( t = first; t <= last; ++t )
        sum += t->n*fabs(t->weight[r + 1] + scale*t->weight[r]);
      s->scale[r] = scale;
      s->norm[r] = 1/sum;
    }
}
Exemple #13
0
static void Rule9Alloc(Rule *rule)
{
  static creal w[] = {
    -.0023611709677855117884,   .11415390023857325268,
    -.63833920076702389094,     .74849988504685208004,
    -.0014324017033399125142,   .057471507864489725949,
    -.14225104571434243234,    -.062875028738286979989,
     .254591133248959089,     -1.207328566678236261,
     .89567365764160676508,    -.36479356986049146661,
     .0035417564516782676826,  -.072609367395893679605,
     .10557491625218991012,     .0021486025550098687713,
    -.032268563892953949998,    .010636783990231217481,
     .014689102496143490175,    .51134708346467591431,
     .45976448120806344646,     .18239678493024573331,
    -.04508628929435784076,     .21415883524352793401,
    -.027351546526545644722,    .054941067048711234101,
     .11937596202570775297,     .65089519391920250593,
     .14744939829434460168,     .057693384490973483573,
     .034999626602143583822,  -1.3868627719278281436,
    -.2386668732575008879,      .015532417276607053264,
     .0035328099607090870236,   .09231719987444221619,
     .02254314464717892038,     .013675773263272822361,
    -.32544759695960125297,     .0017708782258391338413,
     .0010743012775049343856,   .25150011495314791996 };

  static creal g[] = {
     .47795365790226950619,     .20302858736911986780,
     .44762735462617812882,     .125,
     .34303789878087814570 };

  enum { nsets = 9 };

  TYPEDEFSET;

  ccount ndim = ndim_;
  ccount twondim = 1 << ndim;
  count dim, n, r;
  Set *first, *last, *s, *t;

  Allocate(first, nsets);
  Clear(first, nsets);

  last = first;
  n = last->n = 1;
  last->weight[0] = ndim*(ndim*(ndim*w[0] + w[1]) + w[2]) + w[3];
  last->weight[1] = ndim*(ndim*(ndim*w[4] + w[5]) + w[6]) - w[7];
  last->weight[2] = ndim*w[8] - last->weight[1];
  last->weight[3] = ndim*(ndim*w[9] + w[10]) - 1 + last->weight[0];
  last->weight[4] = ndim*w[11] + 1 - last->weight[0];

  ++last;
  n += last->n = 2*ndim;
  last->weight[0] = ndim*(ndim*w[12] + w[13]) + w[14];
  last->weight[1] = ndim*(ndim*w[15] + w[16]) + w[17];
  last->weight[2] = w[18] - last->weight[1];
  last->weight[3] = ndim*w[19] + w[20] + last->weight[0];
  last->weight[4] = w[21] - last->weight[0];
  last->gen[0] = g[0];

  ++last;
  n += last->n = 2*ndim;
  last->weight[0] = ndim*w[22] + w[23];
  last->weight[1] = ndim*w[24] + w[25];
  last->weight[2] = w[26] - last->weight[1];
  last->weight[3] = ndim*w[27] + w[28];
  last->weight[4] = -last->weight[0];
  last->gen[0] = g[1];

  ++last;
  n += last->n = 2*ndim;
  last->weight[0] = w[29];
  last->weight[1] = w[30];
  last->weight[2] = -w[29];
  last->weight[3] = w[31];
  last->weight[4] = -w[29];
  last->gen[0] = g[2];

  ++last;
  n += last->n = 2*ndim;
  last->weight[2] = w[32];
  last->gen[0] = g[3];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  last->weight[0] = w[33] - ndim*w[12];
  last->weight[1] = w[34] - ndim*w[15];
  last->weight[2] = -last->weight[1];
  last->weight[3] = w[35] + last->weight[0];
  last->weight[4] = -last->weight[0];
  last->gen[0] = g[0];
  last->gen[1] = g[0];

  ++last;
  n += last->n = 4*ndim*(ndim - 1);
  last->weight[0] = w[36];
  last->weight[1] = w[37];
  last->weight[2] = -w[37];
  last->weight[3] = w[38];
  last->weight[4] = -w[36];
  last->gen[0] = g[0];
  last->gen[1] = g[1];

  ++last;
  n += last->n = 4*ndim*(ndim - 1)*(ndim - 2)/3;
  last->weight[0] = w[39];
  last->weight[1] = w[40];
  last->weight[2] = -w[40];
  last->weight[3] = w[39];
  last->weight[4] = -w[39];
  last->gen[0] = g[0];
  last->gen[1] = g[0];
  last->gen[2] = g[0];

  ++last;
  n += last->n = twondim;
  last->weight[0] = w[41]/twondim;
  last->weight[1] = w[7]/twondim;
  last->weight[2] = -last->weight[1];
  last->weight[3] = last->weight[0];
  last->weight[4] = -last->weight[0];
  for( dim = 0; dim < ndim; ++dim )
    last->gen[dim] = g[4];

  rule->first = first;
  rule->last = last;
  rule->errcoeff[0] = 5;
  rule->errcoeff[1] = 1;
  rule->errcoeff[2] = 5;
  rule->n = n;

  for( s = first; s <= last; ++s )
    for( r = 1; r < nrules - 1; ++r ) {
      creal scale = (s->weight[r] == 0) ? 100 :
        -s->weight[r + 1]/s->weight[r];
      real sum = 0;
      for( t = first; t <= last; ++t )
        sum += t->n*fabs(t->weight[r + 1] + scale*t->weight[r]);
      s->scale[r] = scale;
      s->norm[r] = 1/sum;
    }
}
Exemple #14
0
static void Rule13Alloc(Rule *rule)
{
  static creal w[][nrules] = {
    { .00844923090033615,     .3213775489050763,     .3372900883288987,
     -.8264123822525677,      .6539094339575232 },
    { .023771474018994404,   -.1767341636743844,    -.1644903060344491,
      .306583861409436,      -.2041614154424632},
    { .02940016170142405,     .07347600537466073,    .07707849911634623,
      .002389292538329435,   -.174698151579499 },
    { .006644436465817374,   -.03638022004364754,   -.03804478358506311,
     -.1343024157997222,      .03937939671417803 },
    { .0042536044255016,      .021252979220987123,   .02223559940380806,
      .08833366840533902,     .006974520545933992 },
    { 0,                      .1460984204026913,     .1480693879765931,
      0,                      0 },
    { .0040664827465935255,   .017476132861520992,  4.467143702185815e-6,
      .0009786283074168292,   .0066677021717782585 },
    { .03362231646315497,     .1444954045641582,     .150894476707413,
     -.1319227889147519,      .05512960621544304 },
    { .033200804136503725,    .0001307687976001325, 3.6472001075162155e-5,
      .00799001220015063,     .05443846381278608 },
    { .014093686924979677,    .0005380992313941161,  .000577719899901388,
      .0033917470797606257,   .02310903863953934 },
    { .000977069770327625,    .0001042259576889814,  .0001041757313688177,
      .0022949157182832643,   .01506937747477189 },
    { .007531996943580376,   -.001401152865045733,  -.001452822267047819,
     -.01358584986119197,    -.060570216489018905 },
    { .02577183086722915,     .008041788181514763,   .008338339968783704,
      .04025866859057809,     .04225737654686337},
    { .015625,               -.1420416552759383,    -.147279632923196,
      .003760268580063992,    .02561989142123099 }
  };

  static creal g[] = {
     .12585646717265545,      .3506966822267133,
     .4795480315809981,       .4978005239276064,
     .25,                     .07972723291487795,
     .1904495567970094,       .3291384627633596,
     .43807365825146577,      .499121592026599,
     .4895111329084231,       .32461421628226944,
     .43637106005656195,      .1791307322940614,
     .2833333333333333,       .1038888888888889 };

  enum { nsets = 14, ndim = 2 };

  TYPEDEFSET;

  count n, r;
  Set *first, *last, *s, *t;

  Allocate(first, nsets);
  Clear(first, nsets);

  last = first;
  n = last->n = 1;
  Copy(last->weight, w[0], nrules);

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[1], nrules);
  last->gen[0] = g[0];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[2], nrules);
  last->gen[0] = g[1];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[3], nrules);
  last->gen[0] = g[2];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[4], nrules);
  last->gen[0] = g[3];

  ++last;
  n += last->n = 2*ndim;
  Copy(last->weight, w[5], nrules);
  last->gen[0] = g[4];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[6], nrules);
  last->gen[0] = g[5];
  last->gen[1] = g[5];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[7], nrules);
  last->gen[0] = g[6];
  last->gen[1] = g[6];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[8], nrules);
  last->gen[0] = g[7];
  last->gen[1] = g[7];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[9], nrules);
  last->gen[0] = g[8];
  last->gen[1] = g[8];

  ++last;
  n += last->n = 2*ndim*(ndim - 1);
  Copy(last->weight, w[10], nrules);
  last->gen[0] = g[9];
  last->gen[1] = g[9];

  ++last;
  n += last->n = 4*ndim*(ndim - 1);
  Copy(last->weight, w[11], nrules);
  last->gen[0] = g[10];
  last->gen[1] = g[11];

  ++last;
  n += last->n = 4*ndim*(ndim - 1);
  Copy(last->weight, w[12], nrules);
  last->gen[0] = g[12];
  last->gen[1] = g[13];

  ++last;
  n += last->n = 4*ndim*(ndim - 1);
  Copy(last->weight, w[13], nrules);
  last->gen[0] = g[14];
  last->gen[1] = g[15];

  rule->first = first;
  rule->last = last;
  rule->errcoeff[0] = 10;
  rule->errcoeff[1] = 1;
  rule->errcoeff[2] = 5;
  rule->n = n;

  for( s = first; s <= last; ++s )
    for( r = 1; r < nrules - 1; ++r ) {
      creal scale = (s->weight[r] == 0) ? 100 :
        -s->weight[r + 1]/s->weight[r];
      real sum = 0;
      for( t = first; t <= last; ++t )
        sum += t->n*fabs(t->weight[r + 1] + scale*t->weight[r]);
      s->scale[r] = scale;
      s->norm[r] = 1/sum;
    }
}
void BaseMesh::CreateSphere(float radius, int slices, int stacks)
{
    Allocate(slices * (stacks - 1) + 2,slices * 2 * (stacks - 1));    //allocate space for the angular splits

    float PI_Stacks = Math::PIf / float(stacks);
    float PI2_Slices = 2.0f * Math::PIf / float(slices);

    float Theta, Phi,CosP,SinP;
    int i,i2,vc=0,ic=0;
    MeshVertex *V = Vertices();
    DWORD *I = Indices();
    MeshVertex MVtx(Vec3f::Origin, Vec3f::Origin, RGBColor::White, Vec2f::Origin);

    for(i=1;i < stacks;i++)
    {
        Phi = float(i) * PI_Stacks;
        CosP = cosf(Phi);
        SinP = sinf(Phi);

        for(i2=0;i2 < slices;i2++)
        {
            Theta = float(i2) * PI2_Slices;
            MVtx.Pos = Vec3f(radius * cosf(Theta) * SinP, radius * sinf(Theta) * SinP, radius * CosP);    //create the new vertex
            V[vc++] = MVtx;    //add the vertex to the mesh
        }
    }

    //add the top and bottom vertices to the mesh
    int TopVertex = vc,BottomVertex = vc+1;
    MVtx.Pos = Vec3f(0.0f,0.0f,radius);
    V[vc++] = MVtx;
    MVtx.Pos = Vec3f(0.0f,0.0f,-radius);
    V[vc++] = MVtx;

    //add the top and bottom triangles (all triangles involving the TopVertex and BottomVertex)
    int ip1,i2p1;
    for(i=0;i < slices;i++)
    {
        ip1 = i + 1;
        if(ip1 == slices) ip1 = 0;

        I[ic++] = i;
        I[ic++] = TopVertex;                    //top triangle
        I[ic++] = ip1;
        
        I[ic++] = ip1 + (stacks - 2) * slices;
        I[ic++] = BottomVertex;                    //bottom triangle
        I[ic++] = i + (stacks - 2) * slices;
    }

    //add all the remaining triangles
    for(i=0;i < stacks - 2;i++)
    {
        for(i2=0;i2 < slices;i2++)
        {
            i2p1 = i2 + 1;
            if(i2p1 == slices) i2p1 = 0;
            
            I[ic++] = (i+1) * slices + i2;
            I[ic++] = i * slices + i2;
            I[ic++] = i * slices + i2p1;

            
            I[ic++] = (i+1) * slices + i2;
            I[ic++] = i * slices + i2p1;
            I[ic++] = (i+1) * slices + i2p1;
        }
    }

    GenerateNormals();
}
Exemple #16
0
void PickCallbackFunction(vtkObject* caller, long unsigned int vtkNotUsed(eventId),
	void* clientData, void* vtkNotUsed(callData))
{
	vtkAreaPicker *areaPicker = static_cast<vtkAreaPicker*>(caller);
	iARenderer *ren = static_cast<iARenderer*>(clientData);
	ren->GetRenderWindow()->GetRenderers()->GetFirstRenderer()->RemoveActor(ren->selectedActor);

	auto extractSelection = vtkSmartPointer<vtkExtractSelectedFrustum>::New();
	extractSelection->SetInputData(0, ren->getRenderObserver()->GetImageData());
	extractSelection->PreserveTopologyOff();
	extractSelection->SetFrustum(areaPicker->GetFrustum());
	extractSelection->Update();

	if (!extractSelection->GetOutput()->GetNumberOfElements(vtkUnstructuredGrid::CELL))
	{
		ren->emitNoSelectedCells();
		return;
	}
	
	if (ren->GetInteractor()->GetControlKey() &&
		!ren->GetInteractor()->GetShiftKey())
	{
		// Adds cells to selection
		auto append = vtkSmartPointer<vtkAppendFilter>::New();
		append->AddInputData(ren->finalSelection);
		append->AddInputData(extractSelection->GetOutput());
		append->Update();
		ren->finalSelection->ShallowCopy(append->GetOutput());
	}
	else if (ren->GetInteractor()->GetControlKey() &&
		ren->GetInteractor()->GetShiftKey())
	{
		// Removes cells from selection 
		auto newfinalSel = vtkSmartPointer<vtkUnstructuredGrid>::New();
		newfinalSel->Allocate(1, 1);
		newfinalSel->SetPoints(ren->finalSelection->GetPoints());
		auto currSel = vtkSmartPointer<vtkUnstructuredGrid>::New();
		currSel->ShallowCopy(extractSelection->GetOutput());
		double f_Cell[DIM] = { 0,0,0 }, c_Cell[DIM] = { 0,0,0 };
		double* spacing = ren->getRenderObserver()->GetImageData()->GetSpacing();

		for (vtkIdType i = 0; i < ren->finalSelection->GetNumberOfCells(); ++i)
		{
			bool addCell = true;
			GetCellCenter(ren->finalSelection, i, f_Cell, spacing);
			for (vtkIdType j = 0; j < currSel->GetNumberOfCells(); ++j)
			{
				GetCellCenter(currSel, j, c_Cell, spacing);
				if (f_Cell[0] == c_Cell[0] &&
					f_Cell[1] == c_Cell[1] &&
					f_Cell[2] == c_Cell[2])
				{
					addCell = false;
					break;
				}
			}
			if (addCell)
				newfinalSel->InsertNextCell(ren->finalSelection->GetCell(i)->GetCellType(),
					ren->finalSelection->GetCell(i)->GetPointIds());
		}		
		ren->finalSelection->ShallowCopy(newfinalSel);
	}
	else
	{
		// New selection
		ren->finalSelection->ShallowCopy(extractSelection->GetOutput());
	}
	ren->selectedMapper->Update();
	ren->GetRenderWindow()->GetRenderers()->GetFirstRenderer()->AddActor(ren->selectedActor);
	ren->emitSelectedCells(ren->finalSelection);
}
Exemple #17
0
void asCString::SetLength(size_t len)
{
	Allocate(len, true);
}
Exemple #18
0
Entity* EntityCreate(Allocator* alloc)
{
	return Allocate(alloc, Entity);
}
CZipAutoBuffer::CZipAutoBuffer(DWORD iSize, bool bZeroMemory)
{
	m_iSize = 0;
	m_pBuffer = NULL;
	Allocate(iSize, bZeroMemory);
}
Exemple #20
0
 void* Region::AllocateAndWrite(int byteCount, const void* data)
 {
     auto block = Allocate(byteCount);
     memcpy(block, data, byteCount);
     return block;
 }
Exemple #21
0
	void SetUL(DWORD dwData)
	{
		*(DWORD*)Allocate(sizeof(DWORD)) = dwData;
	}
Exemple #22
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strDeviceName.IsValid())
    {
        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDeviceName, L"DevicePath", strDeviceID);
        if(!deviceFilter)
        {
            AppWarning(TEXT("DShowPlugin: Invalid device: name '%s', path '%s'"), strDeviceName.Array(), strDeviceID.Array());
            goto cleanFinish;
        }
    }
    else
    {
        if(!strDevice.IsValid())
        {
            AppWarning(TEXT("DShowPlugin: Invalid device specified"));
            goto cleanFinish;
        }

        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDevice);
        if(!deviceFilter)
        {
            AppWarning(TEXT("DShowPlugin: Could not create device filter"));
            goto cleanFinish;
        }
    }

    devicePin = GetOutputPin(deviceFilter, &MEDIATYPE_Video);
    if(!devicePin)
    {
        AppWarning(TEXT("DShowPlugin: Could not get device video pin"));
        goto cleanFinish;
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType"));

    if(soundOutputType != 0)
    {
        err = capture->FindPin(deviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
        if(FAILED(err))
        {
            Log(TEXT("DShowPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data

    renderCX = renderCY = 0;
    frameInterval = 0;

    if(bUseCustomResolution)
    {
        renderCX = data->GetInt(TEXT("resolutionWidth"));
        renderCY = data->GetInt(TEXT("resolutionHeight"));
        frameInterval = data->GetInt(TEXT("frameInterval"));
    }
    else
    {
        SIZE size;
        if (!GetClosestResolution(outputList, size, frameInterval))
        {
            AppWarning(TEXT("DShowPlugin: Unable to find appropriate resolution"));
            renderCX = renderCY = 64;
            goto cleanFinish;
        }

        renderCX = size.cx;
        renderCY = size.cy;
    }

    if(!renderCX || !renderCY || !frameInterval)
    {
        AppWarning(TEXT("DShowPlugin: Invalid size/fps specified"));
        goto cleanFinish;
    }

    preferredOutputType = (data->GetInt(TEXT("usePreferredType")) != 0) ? data->GetInt(TEXT("preferredType")) : -1;

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    for(int i=0; i<numThreads; i++)
    {
        convertData[i].width  = renderCX;
        convertData[i].height = renderCY;
        convertData[i].sample = NULL;
        convertData[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertData[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertData[i].startY = 0;
        else
            convertData[i].startY = convertData[i-1].endY;

        if(i == (numThreads-1))
            convertData[i].endY = renderCY;
        else
            convertData[i].endY = ((renderCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bFirstFrame = true;

    //------------------------------------------------
    // get the closest media output for the settings used

    MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, renderCX, renderCY, preferredOutputType, frameInterval);
    if(!bestOutput)
    {
        AppWarning(TEXT("DShowPlugin: Could not find appropriate resolution to create device image source"));
        goto cleanFinish;
    }

    //------------------------------------------------
    // log video info

    {
        String strTest = FormattedString(TEXT("    device: %s,\r\n    device id %s,\r\n    chosen type: %s, usingFourCC: %s, res: %ux%u - %ux%u, frameIntervals: %llu-%llu"),
            strDevice.Array(), strDeviceID.Array(),
            EnumToName[(int)bestOutput->videoType],
            bestOutput->bUsingFourCC ? TEXT("true") : TEXT("false"),
            bestOutput->minCX, bestOutput->minCY, bestOutput->maxCX, bestOutput->maxCY,
            bestOutput->minFrameInterval, bestOutput->maxFrameInterval);

        BITMAPINFOHEADER *bmiHeader = GetVideoBMIHeader(bestOutput->mediaType);

        char fourcc[5];
        mcpy(fourcc, &bmiHeader->biCompression, 4);
        fourcc[4] = 0;

        if(bmiHeader->biCompression > 1000)
            strTest << FormattedString(TEXT(", fourCC: '%S'\r\n"), fourcc);
        else
            strTest << FormattedString(TEXT(", fourCC: %08lX\r\n"), bmiHeader->biCompression);

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // set up shaders and video output data

    expectedMediaType = bestOutput->mediaType->subtype;

    colorType = DeviceOutputType_RGB;
    if(bestOutput->videoType == VideoOutputType_I420)
        colorType = DeviceOutputType_I420;
    else if(bestOutput->videoType == VideoOutputType_YV12)
        colorType = DeviceOutputType_YV12;
    else if(bestOutput->videoType == VideoOutputType_YVYU)
        colorType = DeviceOutputType_YVYU;
    else if(bestOutput->videoType == VideoOutputType_YUY2)
        colorType = DeviceOutputType_YUY2;
    else if(bestOutput->videoType == VideoOutputType_UYVY)
        colorType = DeviceOutputType_UYVY;
    else if(bestOutput->videoType == VideoOutputType_HDYC)
        colorType = DeviceOutputType_HDYC;
    else
    {
        colorType = DeviceOutputType_RGB;
        expectedMediaType = MEDIASUBTYPE_RGB32;
    }

    strShader = ChooseShader();
    if(strShader.IsValid())
        colorConvertShader = CreatePixelShaderFromFile(strShader);

    if(colorType != DeviceOutputType_RGB && !colorConvertShader)
    {
        AppWarning(TEXT("DShowPlugin: Could not create color space conversion pixel shader"));
        goto cleanFinish;
    }

    if(colorType == DeviceOutputType_YV12 || colorType == DeviceOutputType_I420)
    {
        for(int i=0; i<numThreads; i++)
            hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, convertData+i);
    }

    //------------------------------------------------
    // set chroma details

    keyBaseColor = Color4().MakeFromRGBA(keyColor);
    Matrix4x4TransformVect(keyChroma, (colorType == DeviceOutputType_HDYC) ? (float*)yuv709Mat : (float*)yuvMat, keyBaseColor);
    keyChroma *= 2.0f;

    //------------------------------------------------
    // configure video pin

    if(FAILED(err = devicePin->QueryInterface(IID_IAMStreamConfig, (void**)&config)))
    {
        AppWarning(TEXT("DShowPlugin: Could not get IAMStreamConfig for device pin, result = %08lX"), err);
        goto cleanFinish;
    }

    AM_MEDIA_TYPE outputMediaType;
    CopyMediaType(&outputMediaType, bestOutput->mediaType);

    VIDEOINFOHEADER *vih  = reinterpret_cast<VIDEOINFOHEADER*>(outputMediaType.pbFormat);
    BITMAPINFOHEADER *bmi = GetVideoBMIHeader(&outputMediaType);
    vih->AvgTimePerFrame  = frameInterval;
    bmi->biWidth          = renderCX;
    bmi->biHeight         = renderCY;
    bmi->biSizeImage      = renderCX*renderCY*(bmi->biBitCount>>3);

    if(FAILED(err = config->SetFormat(&outputMediaType)))
    {
        if(err != E_NOTIMPL)
        {
            AppWarning(TEXT("DShowPlugin: SetFormat on device pin failed, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    FreeMediaType(outputMediaType);

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else
            soundOutputType = 0;
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio ccapture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
        {
            AppWarning(TEXT("DShowPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);
            goto cleanFinish;
        }

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(deviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    /*if(bNoBuffering)
    {
        IMediaFilter *mediaFilter;
        if(SUCCEEDED(graph->QueryInterface(IID_IMediaFilter, (void**)&mediaFilter)))
        {
            if(FAILED(mediaFilter->SetSyncSource(NULL)))
                AppWarning(TEXT("DShowPlugin: Failed to set sync source, result = %08lX"), err);

            Log(TEXT("Disabling buffering (hopefully)"));
            mediaFilter->Release();
        }
    }*/

    //THANK THE NINE DIVINES I FINALLY GOT IT WORKING
    bool bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, NULL, captureFilter));
    if(!bConnected)
    {
        if(FAILED(err = graph->Connect(devicePin, captureFilter->GetCapturePin())))
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the video device pin to the video capture pin, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType != 0)
    {
        bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, deviceFilter, NULL, audioFilter));
        if(!bConnected)
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);
        if(bAddedDevice)
            graph->RemoveFilter(deviceFilter);

        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
Exemple #23
0
	void SetGUID(MRA_GUID guidData)
	{
		*(MRA_GUID*)Allocate(sizeof(guidData)) = guidData;
	}
Exemple #24
0
//
// ResCache::Load								- Chapter 8, page 228-230
//
shared_ptr<ResHandle> ResCache::Load(Resource *r)
{
	// Create a new resource and add it to the lru list and map

	shared_ptr<IResourceLoader> loader;
	shared_ptr<ResHandle> handle;

	for (ResourceLoaders::iterator it = m_resourceLoaders.begin(); it != m_resourceLoaders.end(); ++it)
	{
		shared_ptr<IResourceLoader> testLoader = *it;

		if (WildcardMatch(testLoader->VGetPattern().c_str(), r->m_name.c_str()))
		{
			loader = testLoader;
			break;
		}
	}

	if (!loader)
	{
		GCC_ASSERT(loader && _T("Default resource loader not found!"));
		return handle;		// Resource not loaded!
	}

	int rawSize = m_file->VGetRawResourceSize(*r);
	if (rawSize < 0)
	{
		GCC_ASSERT(rawSize > 0 && "Resource size returned -1 - Resource not found");
		return shared_ptr<ResHandle>();
	}

    int allocSize = rawSize + ((loader->VAddNullZero()) ? (1) : (0));
	char *rawBuffer = loader->VUseRawFile() ? Allocate(allocSize) : GCC_NEW char[allocSize];
    memset(rawBuffer, 0, allocSize);

	if (rawBuffer==NULL || m_file->VGetRawResource(*r, rawBuffer)==0)
	{
		// resource cache out of memory
		return shared_ptr<ResHandle>();
	}
	
	char *buffer = NULL;
	unsigned int size = 0;

	if (loader->VUseRawFile())
	{
		buffer = rawBuffer;
		handle = shared_ptr<ResHandle>(GCC_NEW ResHandle(*r, buffer, rawSize, this));
	}
	else
	{
		size = loader->VGetLoadedResourceSize(rawBuffer, rawSize);
        buffer = Allocate(size);
		if (rawBuffer==NULL || buffer==NULL)
		{
			// resource cache out of memory
			return shared_ptr<ResHandle>();
		}
		handle = shared_ptr<ResHandle>(GCC_NEW ResHandle(*r, buffer, size, this));
		bool success = loader->VLoadResource(rawBuffer, rawSize, handle);
		
		// [mrmike] - This was added after the chapter went to copy edit. It is used for those
		//            resoruces that are converted to a useable format upon load, such as a compressed
		//            file. If the raw buffer from the resource file isn't needed, it shouldn't take up
		//            any additional memory, so we release it.
		//
		if (loader->VDiscardRawBufferAfterLoad())
		{
			SAFE_DELETE_ARRAY(rawBuffer);
		}

		if (!success)
		{
			// resource cache out of memory
			return shared_ptr<ResHandle>();
		}
	}

	if (handle)
	{
		m_lru.push_front(handle);
		m_resources[r->m_name] = handle;
	}

	GCC_ASSERT(loader && _T("Default resource loader not found!"));
	return handle;		// ResCache is out of memory!
}
Exemple #25
0
	void SetLPSW(const CMStringW &str)
	{
		DWORD dwBytes = str.GetLength() * sizeof(WCHAR);
		SetUL(dwBytes);
		memcpy(Allocate(dwBytes), str, dwBytes);
	}
void BaseMesh::CreateLantern(float radius, float height, UINT slices, UINT stacks)
{
    //this code is almost identical to CreateCylinder(radius, height slices, stacks) except it flips all non-border
    //edges; this is a much worse triangulation of the surface and does not behave well numerically (which was the
    //goal for the project I was working on at the time.)
    //it also has a cap on the top/bottom.

    Allocate(slices * (stacks + 1),2 * stacks * slices + 2*(slices-2));
    
    float PI2_Slices = 2.0f * Math::PIf / float(slices);
    float Theta;

    int vc=0, ic=0;
    MeshVertex *V = Vertices();
    DWORD *I = Indices();
    MeshVertex MVtx(Vec3f::Origin, Vec3f::Origin, RGBColor::White, Vec2f::Origin);

    for(UINT i = 0; i <= stacks; i++)
    {
        for(UINT i2 = 0; i2 < slices; i2++)
        {
            Theta = float(i2+0.5f*i) * PI2_Slices;
            MVtx.Pos = Vec3f(radius * cosf(Theta), radius * sinf(Theta), height * (float(i) / stacks - 0.5f));
            V[vc++] = MVtx;
        }
    }

    for(UINT i = 0; i < FaceCount(); i++)
    {
        I[i*3+0] = 0;
        I[i*3+1] = 1;
        I[i*3+2] = 2;
    }

    int i2m1;
    for(UINT i = 0; i < stacks - 1; i++)
        for(UINT i2 = 0; i2 < slices; i2++)
        {
            i2m1 = int(i2) - 1;
            if(i2m1 == -1) i2m1 = slices-1;
            
            I[ic++] = i * slices + i2;
            I[ic++] = (i+1) * slices + i2;
            I[ic++] = (i+2) * slices + i2m1;
            
            I[ic++] = i * slices + i2;
            I[ic++] = (i+2) * slices + i2m1;
            I[ic++] = (i+1) * slices + i2m1;
        }

    UINT i = 0;
    for(UINT i2 = 0; i2 < slices; i2++)
    {
        i2m1 = i2 - 1;
        if(i2m1 == -1) i2m1 = slices-1;

        I[ic++] = i * slices + i2m1;
        I[ic++] = i * slices + i2;
        I[ic++] = (i+1) * slices + i2m1;
    }

    i = stacks - 1;
    for(UINT i2 = 0; i2 < slices; i2++)
    {
        i2m1 = i2 - 1;
        if(i2m1 == -1)
        {
            i2m1 = slices - 1;
        }

        I[ic++] = (i + 1) * slices + i2m1;
        I[ic++] = i * slices + i2;
        I[ic++] = (i + 1) * slices + i2;
    }

    i = 0;
    for(UINT i2 = 1; i2 < slices - 1; i2++)
    {
        I[ic++] = i * slices + 0;
        I[ic++] = i * slices + i2 + 1;
        I[ic++] = i * slices + i2;
    }

    i = stacks;
    for(UINT i2 = 1; i2 < slices - 1; i2++)
    {
        I[ic++] = i * slices + 0;
        I[ic++] = i * slices + i2;
        I[ic++] = i * slices + i2 + 1;
    }

    GenerateNormals();
}
Exemple #27
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hMatrix   = yuvScalePixelShader->GetParameterByName(TEXT("yuvMat"));
    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //std::unique_ptr<ProfilerNode> encodeThreadProfiler;

    //----------------------------------------
    // time/timestamp stuff

    bool bWasLaggedFrame = false;

    totalStreamTime = 0;
    lastAudioTimestamp = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
    double bpsTime = 0.0;

    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;
        convertInfo[i].numThreads = numThreads;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bEncode;
    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirstEncode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD streamTimeStart  = GetQPCTimeNS();
    QWORD lastStreamTime   = 0;
    QWORD firstFrameTimeMS = streamTimeStart/1000000;
    QWORD frameLengthNS    = 1000000000/fps;

    while(WaitForSingleObject(hVideoEvent, INFINITE) == WAIT_OBJECT_0)
    {
        if (bShutdownVideoThread)
            break;

        QWORD renderStartTime = GetQPCTimeNS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/1000000);

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        QWORD renderStartTimeMS = renderStartTime/1000000;

        QWORD curStreamTime = latestVideoTimeNS;
        if (!lastStreamTime)
            lastStreamTime = curStreamTime-frameLengthNS;
        QWORD frameDelta = curStreamTime-lastStreamTime;
        //if (!lastStreamTime)
        //    lastStreamTime = renderStartTime-frameLengthNS;
        //QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.000000001;
        //lastStreamTime = renderStartTime;

        bool bUpdateBPS = false;

        profileIn("video thread frame");

        //Log(TEXT("Stream Time: %llu"), curStreamTime);
        //Log(TEXT("frameDelta: %lf"), fSeconds);

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if (bPleaseEnableProjector)
            ActuallyEnableProjector();
        else if(bPleaseDisableProjector)
            DisableProjector();

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if (bProjector) {
            SetRenderTarget(projectorTexture);

            Vect2 renderFrameSize, renderFrameOffset;
            Vect2 projectorSize = Vect2(float(projectorWidth), float(projectorHeight));

            float projectorAspect = (projectorSize.x / projectorSize.y);
            float baseAspect = (baseSize.x / baseSize.y);

            if (projectorAspect < baseAspect) {
                float fProjectorWidth = float(projectorWidth);

                renderFrameSize   = Vect2(fProjectorWidth, fProjectorWidth / baseAspect);
                renderFrameOffset = Vect2(0.0f, (projectorSize.y-renderFrameSize.y) * 0.5f);
            } else {
                float fProjectorHeight = float(projectorHeight);

                renderFrameSize   = Vect2(fProjectorHeight * baseAspect, fProjectorHeight);
                renderFrameOffset = Vect2((projectorSize.x-renderFrameSize.x) * 0.5f, 0.0f);
            }

            DrawPreview(renderFrameSize, renderFrameOffset, projectorSize, curRenderTarget, Preview_Projector);

            SetRenderTarget(NULL);
        }

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);
            DrawPreview(renderFrameSize, renderFrameOffset, renderFrameCtrlSize, curRenderTarget,
                    bFullscreenMode ? Preview_Fullscreen : Preview_Standard);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        switch(colorDesc.matrix)
        {
        case ColorMatrix_GBR:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[0] : (float*)yuvMat[0]);
            break;
        case ColorMatrix_YCgCo:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[1] : (float*)yuvMat[1]);
            break;
        case ColorMatrix_BT2020NCL:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[2] : (float*)yuvMat[2]);
            break;
        case ColorMatrix_BT709:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[3] : (float*)yuvMat[3]);
            break;
        case ColorMatrix_SMPTE240M:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[4] : (float*)yuvMat[4]);
            break;
        default:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[5] : (float*)yuvMat[5]);
        }

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if (bProjector && !copyWait)
            projectorSwap->Present(0, 0);

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("GPU download and conversion");

        bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (renderStartTimeMS-firstFrameTimeMS > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTimestamp = lastStreamTime/1000000;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        lastStreamTime = curStreamTime;

        if(bEncode)
        {
            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirstEncode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirstEncode)
                                bFirstEncode = bEncode = false;
                        }
                        else
                        {
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }

                    if(bEncode)
                    {
                        //encodeThreadProfiler.reset(::new ProfilerNode(TEXT("EncodeThread"), true));
                        //encodeThreadProfiler->MonitorThread(hEncodeThread);
                        curFramePic = &picOut;
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        message << TEXT(" This error can also occur if you have enabled opencl in x264 custom settings.");

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream && totalStreamTime > 15000)
            {
                if (curStrain > 25)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        App->RemoveStreamInfo(adjustmentStreamId);
                        adjustmentStreamId = 0;

                        bUpdateBPS = true;

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;
        profileOut;
        profileOut; //frame

        //------------------------------------
        // frame sync

        QWORD renderStopTime = GetQPCTimeNS();

        if(bWasLaggedFrame = (frameDelta > frameLengthNS))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    DisableProjector();

    //encodeThreadProfiler.reset();

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirstEncode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of late frames: %d (%0.2f%%) (it's okay for some frames to be late)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
}
void BaseMesh::CreateClosedCylinder(float radius, float height, UINT slices, UINT stacks)
{
    //this code is almost identical to CreateCylinder(radius, height slices, stacks) except it 
    //adds on a cap (like CreateLantern.)
    Allocate(slices * (stacks + 1),2 * stacks * slices + 2*(slices-2));
    
    float PI2_Slices = 2.0f * Math::PIf / float(slices);
    float Theta;

    int vc=0,ic=0;
    MeshVertex *V = Vertices();
    DWORD *I = Indices();
    MeshVertex MVtx(Vec3f::Origin, Vec3f::Origin, RGBColor::White, Vec2f::Origin);

    for(UINT i=0;i <= stacks;i++)
    {
        for(UINT i2=0;i2 < slices;i2++)
        {
            Theta = float(i2+0.5f*i) * PI2_Slices;
            MVtx.Pos = Vec3f(radius * cosf(Theta), radius * sinf(Theta), height * (float(i) / stacks - 0.5f));
            V[vc++] = MVtx;
        }
    }

    for(UINT i=0;i < FaceCount();i++)
    {
        I[i*3+0] = 0;
        I[i*3+1] = 1;
        I[i*3+2] = 2;
    }

    for(UINT i=0;i < stacks-1;i++)
        for(UINT i2=0;i2 < slices;i2++)
        {
            UINT i2m1 = i2 - 1;
            if(i2m1 == -1) i2m1 = slices-1;
            
            I[ic++] = i * slices + i2;
            I[ic++] = (i+1) * slices + i2;
            I[ic++] = (i+1) * slices + i2m1;
            
            I[ic++] = (i+1) * slices + i2m1;
            I[ic++] = (i+1) * slices + i2;
            I[ic++] = (i+2) * slices + i2m1;
        }

    UINT i = 0;
    for(UINT i2=0;i2 < slices;i2++)
    {
        UINT i2m1 = i2 - 1;
        if(i2m1 == -1)
        {
            i2m1 = slices-1;
        }

        I[ic++] = i * slices + i2m1;
        I[ic++] = i * slices + i2;
        I[ic++] = (i+1) * slices + i2m1;
    }

    i = stacks - 1;
    for(UINT i2 = 0; i2 < slices; i2++)
    {
        UINT i2m1 = i2 - 1;
        if(i2m1 == -1)
        {
            i2m1 = slices-1;
        }

        I[ic++] = (i+1) * slices + i2m1;
        I[ic++] = i * slices + i2;
        I[ic++] = (i+1) * slices + i2;
    }

    i = 0;
    for(UINT i2 = 1; i2 < slices - 1; i2++)
    {
        I[ic++] = i * slices + 0;
        I[ic++] = i * slices + i2+1;
        I[ic++] = i * slices + i2;
    }

    i = stacks;
    for(UINT i2 = 1; i2 < slices - 1; i2++)
    {
        I[ic++] = i * slices + 0;
        I[ic++] = i * slices + i2;
        I[ic++] = i * slices + i2+1;
    }

    GenerateNormals();
}
Exemple #29
0
void GLVertexBuffer::AddStatic( int n, const float* v )
{
	Allocate(n * sizeof(float), v, GL_STATIC_DRAW);
}
Exemple #30
0
/** Startup color support. */
void StartupColors() {

   int x;
   int red, green, blue;
   XColor c;

   /* Determine how to convert between RGB triples and pixels. */
   Assert(rootVisual);
   switch(rootVisual->class) {
   case DirectColor:
   case TrueColor:
      ComputeShiftMask(rootVisual->red_mask, &redShift, &redMask);
      ComputeShiftMask(rootVisual->green_mask, &greenShift, &greenMask);
      ComputeShiftMask(rootVisual->blue_mask, &blueShift, &blueMask);
      map = NULL;
      break;
   default:

      /* Attempt to get 256 colors, pretend it worked. */
      redMask = 0xE0;
      greenMask = 0x1C;
      blueMask = 0x03;
      ComputeShiftMask(redMask, &redShift, &redMask);
      ComputeShiftMask(greenMask, &greenShift, &greenMask);
      ComputeShiftMask(blueMask, &blueShift, &blueMask);
      map = Allocate(sizeof(unsigned long) * 256);

      /* RGB: 3, 3, 2 */
      x = 0;
      for(red = 0; red < 8; red++) {
         for(green = 0; green < 8; green++) {
            for(blue = 0; blue < 4; blue++) {
               c.red = (unsigned short)(74898 * red / 8);
               c.green = (unsigned short)(74898 * green / 8);
               c.blue = (unsigned short)(87381 * blue / 4);
               c.flags = DoRed | DoGreen | DoBlue;
               JXAllocColor(display, rootColormap, &c);
               map[x] = c.pixel;
               ++x;
            }
         }
      }

      /* Compute the reverse pixel mapping (pixel -> 24-bit RGB). */
      rmap = Allocate(sizeof(unsigned long) * 256);
      for(x = 0; x < 256; x++) {
         c.pixel = x;
         JXQueryColor(display, rootColormap, &c);
         GetDirectPixel(&c);
         rmap[x] = c.pixel;
      }

      break;
   }

   /* Inherit unset colors from the tray for tray items. */
   if(names) {

      if(!names[COLOR_TASK_BG1]) {
         names[COLOR_TASK_BG1] = CopyString(names[COLOR_TRAY_BG]);
      }
      if(!names[COLOR_TASK_BG2]) {
         names[COLOR_TASK_BG2] = CopyString(names[COLOR_TRAY_BG]);
      }

      if(!names[COLOR_TRAYBUTTON_BG]) {
         names[COLOR_TRAYBUTTON_BG] = CopyString(names[COLOR_TRAY_BG]);
      }
      if(!names[COLOR_CLOCK_BG]) {
         names[COLOR_CLOCK_BG] = CopyString(names[COLOR_TRAY_BG]);
      }
      if(!names[COLOR_TASK_FG]) {
         names[COLOR_TASK_FG] = CopyString(names[COLOR_TRAY_FG]);
      }
      if(!names[COLOR_TRAYBUTTON_FG]) {
         names[COLOR_TRAYBUTTON_FG] = CopyString(names[COLOR_TRAY_FG]);
      }
      if(!names[COLOR_CLOCK_FG]) {
         names[COLOR_CLOCK_FG] = CopyString(names[COLOR_TRAY_FG]);
      }
   }

   /* Get color information used for JWM stuff. */
   for(x = 0; x < COLOR_COUNT; x++) {
      if(names && names[x]) {
         if(ParseColor(names[x], &c)) {
            colors[x] = c.pixel;
            rgbColors[x] = GetRGBFromXColor(&c);
         } else {
            SetDefaultColor(x);
         }
      } else {
         SetDefaultColor(x);
      }
   }

   if(names) {
      for(x = 0; x < COLOR_COUNT; x++) {
         if(names[x]) {
            Release(names[x]);
         }
      }
      Release(names);
      names = NULL;
   }

   LightenColor(COLOR_TRAY_BG, COLOR_TRAY_UP);
   DarkenColor(COLOR_TRAY_BG, COLOR_TRAY_DOWN);

   LightenColor(COLOR_TASK_BG1, COLOR_TASK_UP);
   DarkenColor(COLOR_TASK_BG1, COLOR_TASK_DOWN);

   LightenColor(COLOR_TASK_ACTIVE_BG1, COLOR_TASK_ACTIVE_UP);
   DarkenColor(COLOR_TASK_ACTIVE_BG1, COLOR_TASK_ACTIVE_DOWN);

   LightenColor(COLOR_MENU_BG, COLOR_MENU_UP);
   DarkenColor(COLOR_MENU_BG, COLOR_MENU_DOWN);

   LightenColor(COLOR_MENU_ACTIVE_BG1, COLOR_MENU_ACTIVE_UP);
   DarkenColor(COLOR_MENU_ACTIVE_BG1, COLOR_MENU_ACTIVE_DOWN);

}