VCardDialog::VCardDialog (const UserInfo& info, PhotoStorage *storage, GeoResolver *geo, ICoreProxy_ptr proxy, QWidget *parent) : QDialog (parent) , Proxy_ (proxy) , Info_ (info) , Storage_ (storage) { Ui_.setupUi (this); setAttribute (Qt::WA_DeleteOnClose); Ui_.FirstName_->setText (info.FirstName_); Ui_.LastName_->setText (info.LastName_); Ui_.Nickname_->setText (info.Nick_); Ui_.Birthday_->setDate (info.Birthday_); Ui_.Birthday_->setDisplayFormat (info.Birthday_.year () != 1800 ? "dd MMMM yyyy" : "dd MMMM"); if (info.Gender_) Ui_.Gender_->setText (info.Gender_ == 1 ? tr ("female") : tr ("male")); Ui_.HomePhone_->setText (info.HomePhone_); Ui_.MobilePhone_->setText (info.MobilePhone_); auto timezoneText = QString::number (info.Timezone_) + " GMT"; if (info.Timezone_ > 0) timezoneText.prepend ('+'); Ui_.Timezone_->setText (timezoneText); QPointer<VCardDialog> safeThis (this); if (info.Country_ > 0) geo->GetCountry (info.Country_, [safeThis, this] (const QString& country) { if (safeThis) Ui_.Country_->setText (country); }); if (info.City_ > 0) geo->GetCity (info.City_, [safeThis, this] (const QString& country) { if (safeThis) Ui_.City_->setText (country); }); if (!info.BigPhoto_.isValid ()) return; const auto& image = storage->GetImage (info.BigPhoto_); if (image.isNull ()) connect (storage, SIGNAL (gotImage (QUrl)), this, SLOT (handleImage (QUrl))); else Ui_.PhotoLabel_->setPixmap (QPixmap::fromImage (image) .scaled (Ui_.PhotoLabel_->size (), Qt::KeepAspectRatio, Qt::SmoothTransformation)); }
void VideoWidget::clear() { m_images.clear(); QImage img(m_width, m_height, QImage::Format_RGB32); img.fill(0xff000000); handleImage(img, RENDER_FLAG_FLUSH); }
int main(int argc, char** argv) { boost::thread handleImage(&handleImageThread); ros::init(argc, argv, "kinectImage"); SubscriberKinectImage ic; ros::spin(); return 0; }
void VideoWidget::clear() { QMutexLocker locker(&m_mutex); m_images.clear(); QImage img(m_width, m_height, QImage::Format_RGB32); img.fill(Qt::black); handleImage(img); handleFlush(); }
int main(int argc, char *argv[]) { QImageReader reader("image.png"); QImage image = reader.read(); if (image.isNull()) { qWarning() << "Error reading image" << reader.errorString(); return 1; } handleImage(image); return 0; }
Renderer::Renderer(VideoWidget *video) : m_background(0, 0) { m_video = video; m_rawFrame.m_pixels = new uint8_t[0x10000]; m_backgroundFrame = true; m_mode = 3; connect(this, SIGNAL(image(QImage)), m_video, SLOT(handleImage(QImage))); // Qt::BlockingQueuedConnection); connect(this, SIGNAL(flushImage()), m_video, SLOT(handleFlush())); //, Qt::BlockingQueuedConnection); }
int main(int argc, char *argv[]) { ImageIO reader("image.png"); auto result = reader.read(); ImageIOResult ok = result.first; if (!ok) { qWarning() << "Error reading image:" << ok.toString(); return 1; } ImageContents contents = result.second; handleImage(contents.image()); return 0; }
void OleStreamParser::processFloatImage(OleMainStream &stream) { const OleMainStream::FloatImageInfoList &imageInfoList = stream.getFloatImageInfoList(); if (imageInfoList.empty()) { return; } //seek to curCharPos, because not all entries are real pictures while(myNextFloatImageInfoIndex < imageInfoList.size() && imageInfoList.at(myNextFloatImageInfoIndex).first < myCurCharPos) { ++myNextFloatImageInfoIndex; } while (myNextFloatImageInfoIndex < imageInfoList.size() && imageInfoList.at(myNextFloatImageInfoIndex).first == myCurCharPos) { OleMainStream::FloatImageInfo info = imageInfoList.at(myNextFloatImageInfoIndex).second; ZLFileImage::Blocks list = stream.getFloatImage(info.ShapeId); if (!list.empty()) { handleImage(list); } ++myNextFloatImageInfoIndex; } }
/** * Handle widget events. * @param event A MoSync event data structure. */ void NativeScreen::customEvent(const MAEvent& event) { // Get the information sent by the widget. MAWidgetEventData* widgetEventData = (MAWidgetEventData*) event.data; if ( event.type == EVENT_TYPE_IMAGE_PICKER) { if ( event.imagePickerState == 1 ) { char checkboxBuffer[BUF_SIZE]; maWidgetGetProperty(mEventReturnTypeCheckbox, MAW_CHECK_BOX_CHECKED, checkboxBuffer, BUF_SIZE); MAUtil::String value = MAUtil::lowerString(checkboxBuffer); int resCode = -1; if ( strcmp(value.c_str(),"true") == 0 ) { resCode = handleImageData(event.imagePickerItem); } else { resCode = handleImage(event.imagePickerItem); } if (resCode == RES_OK) setLabelText(mLabel, "Preview is available"); else setLabelText(mLabel, "Preview is not available"); } else { setLabelText(mLabel, "The user canceled the image selection"); } } // Check that the event was a click (touch) event. if (widgetEventData->eventType == MAW_EVENT_CLICKED) { // Handle the event emitted by the widget widgetClicked(widgetEventData->widgetHandle); } }
void operator()() { mRunning = true; while (mRunning) { std::unique_lock<std::mutex> lock(mConditionMutex); mCondition.wait_for(lock, std::chrono::milliseconds(100)); std::vector<std::shared_ptr<bot_core::image_t> > workQueue; { std::unique_lock<std::mutex> lock(mDataMutex); workQueue.reserve(mDataQueue.size()); while (!mDataQueue.empty()) { workQueue.push_back(mDataQueue.front()); mDataQueue.pop_front(); } } for (auto img : workQueue) { handleImage(img); ++mProcessedCount; std::cout << mChannelTransmit << ": received " << mReceivedCount << ", processed " << mProcessedCount << std::endl; } } }
Renderer::Renderer(VideoWidget *video) { m_video = video; m_frameData = new uint8_t[0x20000]; m_backgroundFrame = true; m_hmin = 0x00; m_hmax = 0xff; m_smin = 0x00; m_smax = 0xff; m_vmin = 0x00; m_vmax = 0xff; m_cmin = 0x00; m_cmax = 0xff; m_lut = NULL; m_mode = 3; connect(this, SIGNAL(image(QImage)), m_video, SLOT(handleImage(QImage))); // Qt::BlockingQueuedConnection); connect(this, SIGNAL(flushImage()), m_video, SLOT(handleFlush())); //, Qt::BlockingQueuedConnection); }
static int specCharProc (wvParseStruct * ps, U16 eachchar, CHP * achp) { Blip blip; wvStream *fil; long pos; FSPA *fspa; PICF picf; FDOA *fdoa; switch (eachchar) { case 19: /* field begin */ ps->fieldstate++; ps->fieldmiddle = 0; return 0; case 20: /* field separator */ if (achp->fOle2) { wvTrace (("Field has an embedded OLE2 object\n")); } ps->fieldmiddle = 1; return 0; case 21: /* field end */ ps->fieldstate--; ps->fieldmiddle = 0; return 0; default: break; } /* TODO: properly handle fields */ if (ps->fieldstate) { if (eachchar == 0x13 || eachchar == 0x14) return 0; } /* image handling */ switch (eachchar) { case 0x01: if (achp->fOle2) { wvTrace (("embedded OLE2 component. currently unsupported")); return 0; } pos = wvStream_tell (ps->data); wvStream_goto (ps->data, achp->fcPic_fcObj_lTagObj); wvGetPICF (wvQuerySupported (&ps->fib, NULL), &picf, ps->data); fil = picf.rgb; if (wv0x01 (&blip, fil, picf.lcb - picf.cbHeader)) { handleImage (&blip, picf.dxaGoal, picf.dyaGoal); } else { wvTrace (("Dom: strange no graphic data 1\n")); } wvStream_goto (ps->data, pos); return 0; break; case 0x08: if (wvQuerySupported (&ps->fib, NULL) == WORD8) { if (ps->nooffspa > 0) { fspa = wvGetFSPAFromCP (ps->currentcp, ps->fspa, ps->fspapos, ps->nooffspa); if (!fspa) { wvError ( ("No fspa! Panic and Insanity Abounds!\n")); return 0; } if (wv0x08 (&blip, fspa->spid, ps)) { handleImage (&blip, fspa->xaRight - fspa->xaLeft, fspa->yaBottom - fspa->yaTop); } else { wvTrace (("Dom: strange no graphic data 2\n")); return 0; } } else { wvTrace (("nooffspa was <=0 -- ignoring")); } } else { wvError ( ("pre Word8 0x08 graphic -- unsupported at the moment")); fdoa = wvGetFDOAFromCP (ps->currentcp, NULL, ps->fdoapos, ps->nooffdoa); } } return 0; }