示例#1
0
const QIcon& CIconProvider::iconFor(const CFileSystemObject& object)
{
	const qulonglong objectHash = hash(object);
	if (_iconForObject.count(objectHash) == 0)
	{
		const QIcon icon = _provider->iconFor(object);
		assert_r(!icon.isNull());

		const auto qimage = icon.pixmap(icon.availableSizes().front()).toImage();
		const qulonglong iconHash = fasthash64((const char*)qimage.constBits(), qimage.bytesPerLine() * qimage.height(), 0);

		if (_iconCache.size() > 300)
		{
			_iconCache.clear();
			_iconForObject.clear();
		}

		const auto iconInContainer = _iconCache.insert(std::make_pair(iconHash, icon)).first;
		_iconForObject[objectHash] = iconHash;
		
		return iconInContainer->second;
	}

	return _iconCache[_iconForObject[objectHash]];
}
示例#2
0
AudioFrame AudioFrame::to(const AudioFormat &fmt) const
{
    if (!isValid() || !constBits(0))
        return AudioFrame();
    //if (fmt == format())
      //  return clone(); //FIXME: clone a frame from ffmpeg is not enough?
    Q_D(const AudioFrame);
    // TODO: use a pool
    AudioResampler *conv = d->conv;
    QScopedPointer<AudioResampler> c;
    if (!conv) {
        conv = AudioResampler::create(AudioResamplerId_FF);
        if (!conv)
            conv = AudioResampler::create(AudioResamplerId_Libav);
        if (!conv) {
            qWarning("no audio resampler is available");
            return AudioFrame();
        }
        c.reset(conv);
    }
    conv->setInAudioFormat(format());
    conv->setOutAudioFormat(fmt);
    //conv->prepare(); // already called in setIn/OutFormat
    conv->setInSampesPerChannel(samplesPerChannel()); //TODO
    if (!conv->convert((const quint8**)d->planes.constData())) {
        qWarning() << "AudioFrame::to error: " << format() << "=>" << fmt;
        return AudioFrame();
    }
    AudioFrame f(conv->outData(), fmt);
    f.setSamplesPerChannel(conv->outSamplesPerChannel());
    f.setTimestamp(timestamp());
    f.d_ptr->metadata = d->metadata; // need metadata?
    return f;
}
示例#3
0
VideoFrame VideoFrame::to(const VideoFormat &fmt, const QSize& dstSize, const QRectF& roi) const
{
    if (!isValid() || !constBits(0)) {// hw surface. map to host. only supports rgb packed formats now
        Q_D(const VideoFrame);
        const QVariant v = d->metadata.value(QStringLiteral("surface_interop"));
        if (!v.isValid())
            return VideoFrame();
        VideoSurfaceInteropPtr si = v.value<VideoSurfaceInteropPtr>();
        if (!si)
            return VideoFrame();
        VideoFrame f;
        f.setDisplayAspectRatio(displayAspectRatio());
        f.setTimestamp(timestamp());
        if (si->map(HostMemorySurface, fmt, &f)) {
            if ((!dstSize.isValid() ||dstSize == QSize(width(), height())) && (!roi.isValid() || roi == QRectF(0, 0, width(), height()))) //roi is not supported now
                return f;
            return f.to(fmt, dstSize, roi);
        }
        return VideoFrame();
    }
    const int w = dstSize.width() > 0 ? dstSize.width() : width();
    const int h = dstSize.height() > 0 ? dstSize.height() : height();
    if (fmt.pixelFormatFFmpeg() == pixelFormatFFmpeg()
            && w == width() && h == height()
            // TODO: roi check.
            )
        return *this;
    Q_D(const VideoFrame);
    ImageConverterSWS conv;
    conv.setInFormat(pixelFormatFFmpeg());
    conv.setOutFormat(fmt.pixelFormatFFmpeg());
    conv.setInSize(width(), height());
    conv.setOutSize(w, h);
    conv.setInRange(colorRange());
    if (!conv.convert(d->planes.constData(), d->line_sizes.constData())) {
        qWarning() << "VideoFrame::to error: " << format() << "=>" << fmt;
        return VideoFrame();
    }
    VideoFrame f(w, h, fmt, conv.outData());
    f.setBits(conv.outPlanes());
    f.setBytesPerLine(conv.outLineSizes());
    if (fmt.isRGB()) {
        f.setColorSpace(fmt.isPlanar() ? ColorSpace_GBR : ColorSpace_RGB);
    } else {
        f.setColorSpace(ColorSpace_Unknown);
    }
    // TODO: color range
    f.setTimestamp(timestamp());
    f.setDisplayAspectRatio(displayAspectRatio());
    f.d_ptr->metadata = d->metadata; // need metadata?
    return f;
}
示例#4
0
   Subscriber subscribe(const std::string &, uint32_t, void(T::*fun)(M), T *obj) {
      struct Thread : QThread {
         Thread(QObject*p):QThread(p){} ~Thread() override { quit(); wait(); } };
      static QPointer<Thread> thread = new Thread(qApp);
      thread->start(); // no-op if already started
      auto *timer = new QTimer;
      timer->start(1000/60);
      timer->moveToThread(thread);
      QObject::connect(timer, &QTimer::timeout, [timer, obj, fun]{
         auto const msec = QTime::currentTime().msecsSinceStartOfDay();
         auto val = timer->property("name");
         QVariant f;
         auto img = takeProperty<QImage>(timer, "image", 256, 256, QImage::Format_ARGB32_Premultiplied);
//         else img = {256, 256, QImage::Format_ARGB32_Premultiplied};
         Q_ASSERT(img.isDetached());
         qDebug() << val.isDetached() << timer->property("image").constData() << (void*)img.constBits();
//         QImage img{256, 256, QImage::Format_ARGB32_Premultiplied};
         img.fill(Qt::white);
         QPainter p{&img};
         constexpr int period = 3000;
         p.scale(img.width()/2.0, img.height()/2.0);
         p.translate(1.0, 1.0);
         p.rotate((msec % period) * 360.0/period);
         p.setPen({Qt::darkBlue, 0.1});
         p.drawLine(QLineF{{-1., 0.}, {1., 0.}});
         p.end();
         setProperty(timer, "image", img);
         img = std::move(img).convertToFormat(QImage::Format_RGB888).rgbSwapped();

         sensor_msgs::ImageConstPtr ptr{new sensor_msgs::Image{
               {img.constBits(), img.constBits() + img.sizeInBytes()},
               sensor_msgs::image_encodings::BGR8,
                     (uint32_t)img.height(), (uint32_t)img.width()}};
         (*obj.*fun)(ptr);
      });
      return {};
   }   
示例#5
0
void AudioFrame::setSamplesPerChannel(int samples)
{
    Q_D(AudioFrame);
    if (!d->format.isValid()) {
        qWarning() << "can not set spc for an invalid format: " << d->format;
        return;
    }
    d->samples_per_ch = samples;
    const int nb_planes = d->format.planeCount();
    const int bpl(d->line_sizes[0] > 0 ? d->line_sizes[0] : d->samples_per_ch*d->format.bytesPerSample() * (d->format.isPlanar() ? 1 : d->format.channels()));
    for (int i = 0; i < nb_planes; ++i) {
        setBytesPerLine(bpl, i);
    }
    if (d->data.isEmpty())
        return;
    if (!constBits(0)) {
        setBits((quint8*)d->data.constData(), 0);
    }
    for (int i = 1; i < nb_planes; ++i) {
        if (!constBits(i)) {
            setBits((uchar*)constBits(i-1) + bpl, i);
        }
    }
}
示例#6
0
VideoFrame VideoFrame::clone() const
{
    Q_D(const VideoFrame);
    if (!d->format.isValid())
        return VideoFrame();

    // data may be not set (ff decoder)
    if (d->planes.isEmpty() || !d->planes.at(0)) {//d->data.size() < width()*height()) { // at least width*height
        // maybe in gpu memory, then bits() is not set
        qDebug("frame data not valid. size: %d", d->data.size());
        VideoFrame f(width(), height(), d->format);
        f.d_ptr->metadata = d->metadata; // need metadata?
        f.setTimestamp(d->timestamp);
        f.setDisplayAspectRatio(d->displayAspectRatio);
        return f;
    }
    int bytes = 0;
    for (int i = 0; i < d->format.planeCount(); ++i) {
        bytes += bytesPerLine(i)*planeHeight(i);
    }

    QByteArray buf(bytes, 0);
    char *dst = buf.data(); //must before buf is shared, otherwise data will be detached.
    VideoFrame f(width(), height(), d->format, buf);
    const int nb_planes = d->format.planeCount();
    for (int i = 0; i < nb_planes; ++i) {
        f.setBits((quint8*)dst, i);
        f.setBytesPerLine(bytesPerLine(i), i);
        const int plane_size = bytesPerLine(i)*planeHeight(i);
        memcpy(dst, constBits(i), plane_size);
        dst += plane_size;
    }
    f.d_ptr->metadata = d->metadata; // need metadata?
    f.setTimestamp(d->timestamp);
    f.setDisplayAspectRatio(d->displayAspectRatio);
    f.setColorSpace(d->color_space);
    f.setColorRange(d->color_range);
    return f;
}
void Basic2DWindowOpenGLDisplayPlugin::customizeContext() {
#if defined(Q_OS_ANDROID)
    qreal dpi = getFullscreenTarget()->physicalDotsPerInch();
    _virtualPadPixelSize = dpi * VirtualPad::Manager::BASE_DIAMETER_PIXELS / VirtualPad::Manager::DPI;

    if (!_virtualPadStickTexture) {
        auto iconPath = PathUtils::resourcesPath() + "images/analog_stick.png";
        auto image = QImage(iconPath);
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
        if ((image.width() > 0) && (image.height() > 0)) {
            image = image.scaled(_virtualPadPixelSize, _virtualPadPixelSize, Qt::KeepAspectRatio);

            _virtualPadStickTexture = gpu::Texture::createStrict(
                    gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA),
                    image.width(), image.height(),
                    gpu::Texture::MAX_NUM_MIPS,
                    gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR));
            _virtualPadStickTexture->setSource("virtualPad stick");
            auto usage = gpu::Texture::Usage::Builder().withColor().withAlpha();
            _virtualPadStickTexture->setUsage(usage.build());
            _virtualPadStickTexture->setStoredMipFormat(gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA));
            _virtualPadStickTexture->assignStoredMip(0, image.byteCount(), image.constBits());
            _virtualPadStickTexture->setAutoGenerateMips(true);
        }
    }

    if (!_virtualPadStickBaseTexture) {
        auto iconPath = PathUtils::resourcesPath() + "images/analog_stick_base.png";
        auto image = QImage(iconPath);
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
        if ((image.width() > 0) && (image.height() > 0)) {
            image = image.scaled(_virtualPadPixelSize, _virtualPadPixelSize, Qt::KeepAspectRatio);

            _virtualPadStickBaseTexture = gpu::Texture::createStrict(
                    gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA),
                    image.width(), image.height(),
                    gpu::Texture::MAX_NUM_MIPS,
                    gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR));
            _virtualPadStickBaseTexture->setSource("virtualPad base");
            auto usage = gpu::Texture::Usage::Builder().withColor().withAlpha();
            _virtualPadStickBaseTexture->setUsage(usage.build());
            _virtualPadStickBaseTexture->setStoredMipFormat(gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA));
            _virtualPadStickBaseTexture->assignStoredMip(0, image.byteCount(), image.constBits());
            _virtualPadStickBaseTexture->setAutoGenerateMips(true);
        }
    }

    if (_virtualPadButtons.size() == 0) {
        _virtualPadButtons.append(VirtualPadButton(
                dpi * VirtualPad::Manager::BTN_FULL_PIXELS / VirtualPad::Manager::DPI,
                PathUtils::resourcesPath() + "images/fly.png",
                VirtualPad::Manager::Button::JUMP));
        _virtualPadButtons.append(VirtualPadButton(
                dpi * VirtualPad::Manager::BTN_FULL_PIXELS / VirtualPad::Manager::DPI,
                PathUtils::resourcesPath() + "images/handshake.png",
                VirtualPad::Manager::Button::HANDSHAKE));
    }
#endif
    Parent::customizeContext();
}