void LocalUpdateHandler::updateVideoRegion() { ServerConfig *srvConf = Configurator::getInstance()->getServerConfig(); unsigned int interval = srvConf->getVideoRecognitionInterval(); DateTime curTime = DateTime::now(); if ((curTime - m_lastVidUpdTime).getTime() > interval) { m_lastVidUpdTime = DateTime::now(); m_vidRegion.clear(); AutoLock al(srvConf); StringVector *classNames = srvConf->getVideoClassNames(); std::vector<HWND> hwndVector; std::vector<HWND>::iterator hwndIter; WindowFinder::findWindowsByClass(classNames, &hwndVector); for (hwndIter = hwndVector.begin(); hwndIter != hwndVector.end(); hwndIter++) { HWND videoHWND = *hwndIter; if (videoHWND != 0) { WINDOWINFO wi; wi.cbSize = sizeof(WINDOWINFO); if (GetWindowInfo(videoHWND, &wi)) { Rect videoRect(wi.rcClient.left, wi.rcClient.top, wi.rcClient.right, wi.rcClient.bottom); videoRect.move(-GetSystemMetrics(SM_XVIRTUALSCREEN), -GetSystemMetrics(SM_YVIRTUALSCREEN)); m_vidRegion.addRect(&videoRect); } } } } }
STDMETHODIMP_(void) CSubPicAllocatorPresenterImpl::SetPosition(RECT w, RECT v) { bool bWindowPosChanged = !!(m_windowRect != w); bool bWindowSizeChanged = !!(m_windowRect.Size() != CRect(w).Size()); m_windowRect = w; CRect videoRect(v); videoRect.OffsetRect(-m_windowRect.TopLeft()); bool bVideoRectChanged = !!(m_videoRect != videoRect); m_videoRect = videoRect; if (bWindowSizeChanged || bVideoRectChanged) { if (m_pAllocator) { m_pAllocator->SetCurSize(m_windowRect.Size()); m_pAllocator->SetCurVidRect(m_videoRect); } if (m_pSubPicQueue) { m_pSubPicQueue->Invalidate(); } } if (bWindowPosChanged || bVideoRectChanged) { Paint(false); } }
void VideoDisplay::mousePressEvent(QMouseEvent *event) { if (!m_worker->m_pFrame) return; if (!m_IsMouseOn) { m_IsMouseOn = true; } //return; if(event->buttons() & Qt::LeftButton) { QRect videoRect(0, 0, m_Config._config.frmWidth, m_Config._config.frmHeight); if(videoRect.contains(event->x(),event->y())) { m_worker->m_IsTracking = false; _isSelecting = true; trackingRect.left = event->x() - videoRect.left(); trackingRect.top = event->y() - videoRect.top(); _toBeTracked.setLeft(event->x()); _toBeTracked.setTop(event->y()); _toBeTracked.setRight(event->x()) ; _toBeTracked.setBottom( event->y()); } } else if (event->buttons() & Qt::RightButton) { trackingRect.left = event->x()-m_rectWidthInit / 2; trackingRect.top = event->y()-m_rectHeightInit / 2; trackingRect.right = event->x()+m_rectWidthInit / 2; trackingRect.bottom = event->y()+m_rectHeightInit / 2; if(trackingRect.left < 0) trackingRect.left = 0; if(trackingRect.right > m_Config._config.frmWidth - 1) trackingRect.right = m_Config._config.frmWidth - 1; if(trackingRect.top < 0) trackingRect.top = 0; if(trackingRect.bottom > m_Config._config.frmHeight - 1) trackingRect.bottom = m_Config._config.frmHeight - 1; ResetRectCurrent(); m_worker->StartTracking(trackingRect); } QMainWindow::mousePressEvent(event); }
void VideoDisplay::mouseReleaseEvent(QMouseEvent *event) { if (!m_IsMouseOn) return; if (!m_worker->m_pFrame) return; m_IsMouseOn = false; if(_isSelecting) { QRect videoRect(0, 0, m_Config._config.frmWidth, m_Config._config.frmHeight); trackingRect.right = event->x() - videoRect.left(); trackingRect.bottom = event->y() - videoRect.top(); short temp; if(trackingRect.right<trackingRect.left) { temp = trackingRect.right; trackingRect.right = trackingRect.left; trackingRect.left = temp; } if(trackingRect.bottom<trackingRect.top) { temp = trackingRect.bottom; trackingRect.bottom = trackingRect.top; trackingRect.top = temp; } if(trackingRect.left < 0) trackingRect.left = 0; if(trackingRect.right > m_Config._config.frmWidth - 1) trackingRect.right = m_Config._config.frmWidth - 1; if(trackingRect.top < 0) trackingRect.top = 0; if(trackingRect.bottom > m_Config._config.frmHeight - 1) trackingRect.bottom = m_Config._config.frmHeight - 1; ResetRectCurrent(); m_worker->StartTracking(trackingRect); _isSelecting = false; } QMainWindow::mouseReleaseEvent(event); }
void AndroidMediaLayer::UpdatePosition(const gfxRect& aRect, float aZoomLevel) { if (!mVisible) return; std::map<void*, SurfaceData*>::iterator it; for (it = mVideoSurfaces.begin(); it != mVideoSurfaces.end(); it++) { SurfaceData* data = it->second; // The video window dimension we get is not adjusted by zoom factor (unlike the // content window). Fix it up here. gfxRect scaledDimensions = data->dimensions; scaledDimensions.Scale(aZoomLevel); gfxRect videoRect(aRect.x + scaledDimensions.x, aRect.y + scaledDimensions.y, scaledDimensions.width, scaledDimensions.height); AndroidBridge::Bridge()->ShowSurface(data->surface, videoRect, mInverted, false); } if (EnsureContentSurface()) { AndroidBridge::Bridge()->ShowSurface(mContentData.surface, aRect, mInverted, true); } }
STDMETHODIMP CSubPicImpl::GetSourceAndDest(RECT rcWindow, RECT rcVideo, RECT* pRcSource, RECT* pRcDest, const double videoStretchFactor /*= 1.0*/, int xOffsetInPixels /*= 0*/) const { CheckPointer(pRcSource, E_POINTER); CheckPointer(pRcDest, E_POINTER); if (m_size.cx > 0 && m_size.cy > 0) { CPoint offset(0, 0); double scaleX, scaleY; // Enable best fit only for HD contents since SD contents // are often anamorphic and thus break the auto-fit logic if (m_relativeTo == BEST_FIT && m_virtualTextureSize.cx > 720) { double scaleFactor; CRect videoRect(rcVideo); LONG stretch = lround(videoRect.Width() * (1.0 - videoStretchFactor) / 2.0); videoRect.left += stretch; videoRect.right -= stretch; CSize szVideo = videoRect.Size(); double subtitleAR = double(m_virtualTextureSize.cx) / m_virtualTextureSize.cy; double videoAR = double(szVideo.cx) / szVideo.cy; double dCRVideoWidth = szVideo.cy * subtitleAR; double dCRVideoHeight = szVideo.cx / subtitleAR; if ((dCRVideoHeight > dCRVideoWidth) != (videoAR > subtitleAR)) { scaleFactor = dCRVideoHeight / m_virtualTextureSize.cy; offset.y = lround((szVideo.cy - dCRVideoHeight) / 2.0); } else { scaleFactor = dCRVideoWidth / m_virtualTextureSize.cx; offset.x = lround((szVideo.cx - dCRVideoWidth) / 2.0); } scaleX = scaleY = scaleFactor; offset += videoRect.TopLeft(); } else { CRect rcTarget = (m_relativeTo == WINDOW) ? rcWindow : rcVideo; CSize szTarget = rcTarget.Size(); scaleX = double(szTarget.cx) / m_virtualTextureSize.cx; scaleY = double(szTarget.cy) / m_virtualTextureSize.cy; offset += rcTarget.TopLeft(); } CRect rcTemp = m_rcDirty; *pRcSource = rcTemp; rcTemp.OffsetRect(m_virtualTextureTopLeft + CPoint(xOffsetInPixels, 0)); rcTemp = CRect(lround(rcTemp.left * scaleX), lround(rcTemp.top * scaleY), lround(rcTemp.right * scaleX), lround(rcTemp.bottom * scaleY)); rcTemp.OffsetRect(offset); LONG stretch = lround(rcTemp.Width() * (1.0 - 1.0 / videoStretchFactor) / 2.0); rcTemp.left += stretch; rcTemp.right -= stretch; *pRcDest = rcTemp; return S_OK; } return E_INVALIDARG; }
//-------------------------------------------------------------- void ofApp::draw(){ int frameDiff; while (oscReceiver.hasWaitingMessages()) { cout << "New msg is coming + "; ofxOscMessage m; oscReceiver.getNextMessage(&m); if (m.getAddress() == "/sync/play/FW_SH_02_HBD_A/frameJump") { frameJump = m.getArgAsInt32(0); cout << "Frame sync, jump(" << frameJump << ") frames\r\n"; // do move forward videoPlayers[i]->setFrame(frameJump); } else if (m.getAddress() == "/sync/play/nowPlayingFile") { // can I get all the file list from the beginning? loadVideo(m.getArgAsString(0)); } else if (m.getAddress() == "/sync/play/nowPlayingStart") { isCellStart = m.getArgAsInt32(0); } else if (m.getAddress() == "/sync/play/nowPlayingStop") { isCellStop = m.getArgAsInt32(0); } else if (m.getAddress() == "/sync/play/type") { mediaType = m.getArgAsString(0); } else if (m.getAddress() == "/sync/play/nowPlayingKickTime") { cellKickTime = m.getArgAsInt32(0); } //dumpOSC(m); } strFruitString = strFruitPrefix + ofToString(currentAppleAmount) + strUnit; if (!videoPause) { // if (((videoPlayers[i]->getPosition() * videoPlayers[i]->getDuration()) - videoPlayers[i]->getDuration()) == 0){ // // if (i < N_VIDEO_PLAYERS-1) { // i++; // } // else { // i = 0; // loopCounter++; // } // } //for(auto p : videoPlayers) { // p->draw(ofMap(i++, 0, videoPlayers.size(), 0, ofGetWidth()), ofGetHeight()/2 - 108*2, 192*4, 108*4); //ofPushMatrix(); //ofSetColor(ofRandom(255), 0, 0); // ofRect(0,0,ofGetWidth(),ofGetHeight()); // ofEnableAlphaBlending(); // ofSetColor(255,255,255); // #if 0 // for 1080 x 3840 video //---------------------------------------------------------- draw video texture to fullscreen. ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight()); ofRectangle videoRect(0, 0, videoPlayers[i]->getWidth(), videoPlayers[i]->getHeight()); ofRectangle videoFullscreenRect = videoRect; videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING); videoPlayers[i]->draw(0, 0, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight()); #endif #if 1 // for 2 1080 x 1920 videos //---------------------------------------------------------- draw video texture to fullscreen. for (int videoNum = 0; videoNum < videoPlayers.size(); videoNum++) { if (videoNum == 1) { //top ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight()/2); ofRectangle videoRect(0, 0, videoPlayers[videoNum]->getWidth(), videoPlayers[videoNum]->getHeight()); ofRectangle videoFullscreenRect = videoRect; videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING); videoPlayers[videoNum]->draw(0, 0, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight()); } else if (videoNum == 0) { //bottom ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight()/2); ofRectangle videoRect(0, 0, videoPlayers[videoNum]->getWidth(), videoPlayers[videoNum]->getHeight()); ofRectangle videoFullscreenRect = videoRect; videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING); videoPlayers[videoNum]->draw(0, ofGetHeight()/2, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight()); } } if (i < N_VIDEO_PLAYERS-1) { i++; } else { i = 0; //loopCounter++; } #endif //videoPlayers[i]->draw(0, 0, 900, 1400); //p->draw(ofMap(i++, 0, videoPlayers.size(), 0, ofGetWidth()), ofGetHeight()/2 - 108*2, 192*4, 108*4); // 2304x4096 sumsung tv @1 // 2304x8192 ofScreen // 2026x3840 video // cout << "w:" << w << "\n"; // p->draw(0, 0); //ofPopMatrix(); //} } if (imageDisplay) { ofRectangle screenRect(0, 0, ofGetWidth()/2, ofGetHeight()/2); ofRectangle videoRect(0, 0, imgTop->width, imgTop->height); ofRectangle videoFullscreenRect = videoRect; videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING); if (imgTopPosters.size() == 1) { ofSetColor(255, 255, 255); // very important, don't delete set color imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height); } if (dbgImg) { ofSetColor(0, 0, 255); imgTop->drawBounds(0, 0, imgTop->width, imgTop->height); imgBottom->drawBounds(0, 4096, imgBottom->width, imgBottom->height); } else { ofPushMatrix(); ofEnableAlphaBlending(); ofSetColor(255, 255, 255); // very important, don't delete set color // ensure first image is showing //imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height); if ((ofGetElapsedTimef() - initTime) > 2) { //videoPause = !videoPause; //imageDisplay = !imageDisplay; initTime = ofGetElapsedTimef(); if (imgTopx && !isUpdateImg) { //ofColor(255, 0, 0, 100); int size = imgTopPosters.size(); // size limitation is 2 by design if (size > 0) { imgTopPosters[imgRotateIndex]->draw(0, 0, imgTopPosters[imgRotateIndex]->width, imgTopPosters[imgRotateIndex]->height); imgRotateIndex++; if (imgRotateIndex >= size) imgRotateIndex = 0; //ofSleepMillis(3000);// well... } // for (auto i = imgTopPosters.begin(); i!= imgTopPosters.end(); ++i) { // //(*i)->draw(0, 0, imgTopx->width, imgTopx->height); // (*i)->draw(0, 0, (*i)->width, (*i)->height); // } //imgTopx->draw(0, 0, imgTopx->width, imgTopx->height); } else{ imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height); //imgTop->draw(0, 0, imgTop->width, imgTop->height); } } ofNoFill(); ofSetColor(100, 0, 100, 100); ofCircle(imgTop->width/2, imgTop->height/3+100, 400); ofDisableAlphaBlending(); //ofRectangle bbox; float fontSize = 300; //ofTranslate(100, 2* ofGetHeight()/3); //ofRotateY(50 * ofGetElapsedTimef()); ofSetColor(100, 0, 100, 128); FZLfont.draw(strPoster, fontSize, imgTop->width/2-100+0.1*x, imgTop->height/3+100+0.1*y); //bbox = unicodeFont.getStringBoundingBox(strFruitPrefix, 100, 2* ofGetHeight()/3); //ofSetColor(0, 200, 0); //ofFill(); //ofRotateZ(-5); ofEnableAlphaBlending(); ofPopMatrix(); // Bottom poster //ofColor(255, 255, 255); //ofTranslate(0, 4096); // samgung tv ofSetColor(255, 255, 255); // very important, don't delete set color ofTranslate(0, 7680/2); // pptv imgBottom->draw(0, 0, imgBottom->width, imgBottom->height); } } ofDrawBitmapStringHighlight("FPS: " + ofToString(fps), 20, 360); ofDrawBitmapStringHighlight("Frame " + ofToString(videoPlayers[i]->getCurrentFrame()) + "/" + ofToString(videoPlayers[i]->getTotalNumFrames()), 20, 380); ofDrawBitmapStringHighlight("Duration " + ofToString(videoPlayers[i]->getPosition() * videoPlayers[i]->getDuration(), 2) + "/" + ofToString(videoPlayers[0]->getDuration(), 2), 20, 400); ofDrawBitmapStringHighlight("Speed " + ofToString(videoPlayers[i]->getSpeed(), 2), 20, 420); ofDrawBitmapStringHighlight("Canvas W:" + ofToString(ofGetWidth()) + " H:" + ofToString(ofGetHeight()), 20, 440); ofDrawBitmapString("Total Loop #" + ofToString(loopCounter) + " \nClip #" + ofToString(i), 20, 460); // send out frame number information msgSend.setAddress("/sync/play/FW_SH_02_HBD_A/currentFrame"); msgSend.addIntArg(videoPlayers[i]->getCurrentFrame()); oscSender.sendMessage(msgSend); #if 0 #if 0 ofPushMatrix(); ofRectangle bbox; ofSetColor(255, 0, 0, 32); float fontSize = 20 /*134*/; //TIME_SAMPLE_START("bbox"); //ofTranslate(100, 2* ofGetHeight()/3); bbox = unicodeFont.getBBox(strFruitString, fontSize, 500, 500); //TIME_SAMPLE_STOP("bbox"); ofRect(bbox); ofPopMatrix(); #endif ofPushMatrix(); ofRectangle bbox; float fontSize = 134; //ofTranslate(100, 2* ofGetHeight()/3); //ofRotateY(50 * ofGetElapsedTimef()); ofSetColor(0xd3, 0xd3, 0xd3, 200); unicodeFont.draw(strFruitString, fontSize, 100, 2* ofGetHeight()/3); bbox = unicodeFont.getStringBoundingBox(strFruitPrefix, 100, 2* ofGetHeight()/3); ofSetColor(0, 200, 0); //ofFill(); ofEnableAlphaBlending(); ofRect(bbox); //ofRotateZ(-5); ofPopMatrix(); // text background ofSetColor(153, 153, 153, 100); ofRect(100, 2* ofGetHeight()/3 - 130, ofGetWidth()-100, 160); if (isDemoMode) { if ((ofGetElapsedTimef() - initTimeDbg) > 3.0) { videoPause = !videoPause; imageDisplay = !imageDisplay; initTimeDbg = ofGetElapsedTimef(); } } if (isDownloadImg){ isDownloadImg = !isDownloadImg; cout << "Downloading..." << "\n"; ofSetColor(100, 0, 100, 128); if (isUpdateImg) { string str = "Poster Updating..."; FZLfont.draw(str, 200, ofGetWidth()/2, ofGetHeight()/4*2); } // read file name from iOS client char serverPath[512] = "http://192.168.43.155:8080/"; if (strUpdateFileDate.length() != 0) { sprintf(updateURL, "%s%s.jpg", serverPath, strUpdateFileDate.c_str()); cout << "Update URL: " << updateURL << "\n"; ofSaveURLAsync(updateURL, "images/L1.jpg"); } else{ cout << "ERROR: No update date" << "\n"; } } if (isUpdateImg) { // reload image here if (imgTopx) delete imgTopx; imgTopx = new ofxGiantImage(); imgTopx->loadImage("images/L1.jpg"); imgTopPosters.push_back(imgTopx); // push into image queues isUpdateImg = !isUpdateImg; } #endif }
QRectF QuickFBORenderer::contentRect() const { return videoRect(); }