Beispiel #1
0
int main(int argc, char *argv[])
{
    SET_HANDLERS();

    Q_INIT_RESOURCE(main);

    SYNC_PRINT(("Starting cloudView...\n"));
    QApplication app(argc, argv);
    ImageViewMainWindow mainWindow;

    QTG12Loader::registerMyself();
    QTRGB24Loader::registerMyself();

    if (argc > 1)
    {
        qDebug("Main: %s", argv[1]);
        mainWindow.loadImage(QString(argv[1]));

#if 0
        RGB48Buffer *buffer = PPMLoader().rgb48BufferCreateFromPPM(str);
        if (buffer == NULL)
        {
            qDebug("Can't' open file: %s", str.c_str());
        } else {
            mainWindow.input = buffer;
        }
#endif
    }

    mainWindow.show();
    app.exec();
    SYNC_PRINT(("Exiting ImageView application...\n"));
}
Beispiel #2
0
AbstractFileCapture::AbstractFileCapture(QString const &params)
    : mDelay(0)
    , mSpin(NULL)
    , mShouldSkipUnclaimed(false)
    , shouldStopSpinThread(false)
    , mTimeStamp(0)
{
    //     Group numbers                  1      2 3      4       5 6         7
    QRegExp deviceStringPattern(QString("^([^,]*)(,(\\d*)/(\\d*))?(,(\\d*)ms)?(,skip)?$"));
    static const int filenamePatternGroup  = 1;
    static const int fpsNumGroup           = 3;
    static const int fpsDenumGroup         = 4;
    static const int delayGroup            = 6;
    static const int shouldSkipGroup       = 7;

    SYNC_PRINT(("Input string %s\n", params.toLatin1().constData()));
    int result = deviceStringPattern.indexIn(params);
    if (result == -1)
    {
        printf("Error in device string format\n");
        return;
    }

    SYNC_PRINT((
        "Parsed data:\n"
        "  | - Filename Pattern: <%s>\n"
        "  | - FPS: <%s/%s>\n"
        "  | - Delay: <%s>\n"
        "  \\ - Skip: <%s>\n",
        deviceStringPattern.cap(filenamePatternGroup).toLatin1().constData(),
        deviceStringPattern.cap(fpsNumGroup)         .toLatin1().constData(),
        deviceStringPattern.cap(fpsDenumGroup)       .toLatin1().constData(),
        deviceStringPattern.cap(delayGroup)          .toLatin1().constData(),
        deviceStringPattern.cap(shouldSkipGroup)     .toLatin1().constData()
        ));

    // store the given path format for further usage
    mPathFmt = deviceStringPattern.cap(filenamePatternGroup).toLatin1().constData();

    bool err = false;
    int fpsnum = deviceStringPattern.cap(fpsNumGroup).toInt(&err);
    if (!err || fpsnum < 0)
        fpsnum = -1;

    int fpsdenum = deviceStringPattern.cap(fpsDenumGroup).toInt(&err);
    if (!err || fpsdenum <= 0)
        fpsdenum = -1;

    mDelay = deviceStringPattern.cap(delayGroup).toInt(&err);
    if (!err /*|| mDelay < 0*/)
        mDelay = capDefaultDelay;

    if (fpsnum != -1 && fpsdenum != -1 && fpsdenum != 0)
        mDelay = 1000 * fpsnum / fpsdenum;

    if (deviceStringPattern.cap(shouldSkipGroup) == QString(",skip"))
        mShouldSkipUnclaimed = true;
}
Beispiel #3
0
RTSPCapture::~RTSPCapture()
{
    SYNC_PRINT(("RTSPCapture::~RTSPCapture(): called\n"));

    av_free(mFrame);
    avcodec_close(mCodecContext);
    avformat_close_input(&mFormatContext);

    SYNC_PRINT(("RTSPCapture::~RTSPCapture(): exited\n"));
}
Beispiel #4
0
ImageCaptureInterface::CapErrorCode RTSPCapture::startCapture()
{
    SYNC_PRINT(("RTSPCapture::startCapture(): called\n"));
    mIsPaused = false;

    frame_data_t frameData;
    frameData.timestamp = 0;
    SYNC_PRINT(("RTSPCapture::startCapture(): sending notification\n"));
    notifyAboutNewFrame(frameData);

    spin.start();

    SYNC_PRINT(("RTSPCapture::startCapture(): exited\n"));
    return ImageCaptureInterface::SUCCESS;
}
void BinaryReader::visit<void *, PointerField>(void * &field, const PointerField *fieldDescriptor)
{
    CORE_UNUSED(field);
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    SYNC_PRINT(("%s : NYI\n", __FUNCTION__));
}
void BinaryReader::visit<double, DoubleVectorField>(std::vector<double> &field, const DoubleVectorField *fieldDescriptor)
{
    CORE_UNUSED(field);
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    SYNC_PRINT(("%s : NYI\n", __FUNCTION__));
}
void BinaryReader::visit<int,    IntField>(int &field, const IntField *fieldDescriptor)
{
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    stream->read((char *) &field, sizeof(field));
    SYNC_PRINT(("BinaryReader::visit<int,IntField>(): read %d\n", field));
}
void BinaryReader::visit<double, DoubleField>(double &field, const DoubleField *fieldDescriptor)
{
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    stream->read((char *) &field, sizeof(field));
    SYNC_PRINT(("BinaryReader::visit<double, DoubleField>():read %lf\n", field));
}
void BinaryReader::visit<std::wstring, WStringField>(std::wstring &field, const WStringField *fieldDescriptor)
{
    CORE_UNUSED(field);
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    SYNC_PRINT(("%s : NYI\n", __FUNCTION__));
}
void BinaryReader::visit<std::wstring>(std::wstring &stringField, std::wstring /*defaultValue*/, const char *fieldName)
{
    CORE_UNUSED(stringField);
    CORE_UNUSED(fieldName);
    if (stream == NULL) return;
    SYNC_PRINT(("%s : NYI\n", __FUNCTION__));
}
Beispiel #11
0
ImageCaptureInterface::FramePair RTSPCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    FramePair result = fcb.dequeue();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    emit newStatisticsReady(stats);

    if (!mIsPaused)
    {        
        frame_data_t frameData;
        frameData.timestamp = fcb.secondFrameTimestamp();
        //SYNC_PRINT(("RTSPCapture::getFrame(): sending notification ts = %d\n", frameData.timestamp));
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("RTSPCapture::getFrame(): Paused\n"));
    }

    return result;
}
void BinaryReader::visit<int>(int &intField, int defaultValue, const char *fieldName)
{
    CORE_UNUSED(defaultValue);
    CORE_UNUSED(fieldName);
    if (stream == NULL) return;
    stream->read((char *) &intField, sizeof(intField));
    SYNC_PRINT(("BinaryReader::visit<int>(): read %d\n", intField));
}
void BinaryReader::visit<double>(double &doubleField, double defaultValue, const char *fieldName)
{
    CORE_UNUSED(defaultValue);
    CORE_UNUSED(fieldName);
    if (stream == NULL) return;
    stream->read((char *) &doubleField, sizeof(doubleField));
    SYNC_PRINT(("BinaryReader::visit<double>(): read %lf\n", doubleField));
}
Beispiel #14
0
OpenCVCaptureInterface::FramePair OpenCVCaptureInterface::getFrame()
{
    SYNC_PRINT(("OpenCVCaptureInterface::SpinThread::getFrame(): called"));
    protectFrame.lock();
        FramePair result = current.clone();
    protectFrame.unlock();
    return result;
}
Beispiel #15
0
ImageCaptureInterface::CapErrorCode AviCapture::startCapture()
{
//  return ImageCaptureInterface::CapSuccess1Cam;
    SYNC_PRINT(("AviCapture::startCapture(): called\n"));
    frame_data_t frameData;

    //mIsPaused = false;

    count++;
    frameData.timestamp = (count * 10);
    SYNC_PRINT(("AviCapture::startCapture(): sending notification\n"));
    notifyAboutNewFrame(frameData);


    SYNC_PRINT(("AviCapture::startCapture(): exited\n"));
    return ImageCaptureInterface::SUCCESS;
}
void BinaryReader::visit<string, StringField>(std::string &field, const StringField *fieldDescriptor)
{
    CORE_UNUSED(field);
    CORE_UNUSED(fieldDescriptor);
    if (stream == NULL) return;
    SYNC_PRINT(("%s : NYI\n", __FUNCTION__));
    //stream->read((char *) &field, sizeof(field));
}
Beispiel #17
0
ImageCaptureInterface::CapErrorCode RTSPCapture::nextFrame()
{
    frame_data_t frameData;
    frameData.timestamp = fcb.firstFrameTimestamp();

    SYNC_PRINT(("RTSPCapture::nextFrame(): sending notification ts = %" PRIu64 "\n", frameData.timestamp));
    notifyAboutNewFrame(frameData);
    return ImageCaptureInterface::SUCCESS;
}
Beispiel #18
0
RTSPCapture::RTSPCapture(QString const &params):
       mName(params.toStdString())
     , mFormatContext(NULL)
     , mCodecContext(NULL)
     , mCodec(NULL)
     , mIsPaused(false)
     , mFrame(NULL)
     , spin(this)
     , fcb(this)
{
    SYNC_PRINT(("RTSPCapture::RTSPCapture(%s): called\n", params.toLatin1().constData()));
    SYNC_PRINT(("Registering the codecs...\n"));

    av_register_all();
    avformat_network_init();

    SYNC_PRINT(("RTSPCapture::RTSPCapture(): exited\n"));
}
Beispiel #19
0
ImageCaptureInterface::CapErrorCode RTSPCapture::initCapture()
{
    SYNC_PRINT(("RTSPCapture::initCapture(): called\n"));

    int res;
    res = avformat_open_input(&mFormatContext, mName.c_str(), NULL, NULL);
    if (res < 0) {
        SYNC_PRINT(("RTSPCapture::initCapture(): failed to open file"));
        return ImageCaptureInterface::FAILURE;
    }

    res = avformat_find_stream_info(mFormatContext, NULL);
    if (res < 0) {
        SYNC_PRINT(("RTSPCapture::initCapture(): Unable to find stream info\n"));
        return ImageCaptureInterface::FAILURE;
    }

    SYNC_PRINT(("Stream seem to have %d streams\n", mFormatContext->nb_streams));

    // Dump information about file onto standard error
    av_dump_format(mFormatContext, 0, mName.c_str(), 0);

    // Find the first video stream
    for (mVideoStream = 0; mVideoStream < mFormatContext->nb_streams; mVideoStream++) {
        if (mFormatContext->streams[mVideoStream]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            break;
        }
    }

    if (mVideoStream == mFormatContext->nb_streams) {
        SYNC_PRINT(("RTSPCapture::initCapture(): Unable to find video stream among %d streams\n", mFormatContext->nb_streams));
        return ImageCaptureInterface::FAILURE;
    }

    SYNC_PRINT(("RTSPCapture::initCapture(): Video Stream found\n"));
    mCodecContext = mFormatContext->streams[mVideoStream]->codec;
    mCodec = avcodec_find_decoder(mCodecContext->codec_id);
    res = avcodec_open2(mCodecContext, mCodec, NULL);
    if (res < 0) {
        SYNC_PRINT(("RTSPCapture::initCapture(): Unable to open codec\n"));
        return ImageCaptureInterface::FAILURE;
    }
    SYNC_PRINT(("RTSPCapture::initCapture(): Video codec found\n"));

    mFrame = avcodec_alloc_frame();

    SYNC_PRINT(("RTSPCapture::initCapture(): exited\n"));
    return ImageCaptureInterface::SUCCESS;
}
Beispiel #20
0
ImageCaptureInterface::CapErrorCode AviCapture::nextFrame()
{
    count++;
    frame_data_t frameData;
    frameData.timestamp = (count * 10);
    SYNC_PRINT(("AviCapture::nextFrame(): sending notification\n"));
    notifyAboutNewFrame(frameData);

    return ImageCaptureInterface::SUCCESS;
}
V4L2CaptureInterface::~V4L2CaptureInterface()
{
   // spinRunning.lock();
    cout << "V4L2CaptureInterface::Request for killing the thread" << endl;

    shouldStopSpinThread = true;
    bool result = spinRunning.tryLock(1000);

    if (result) {
        SYNC_PRINT(("V4L2CaptureInterface::Camera thread killed\n"));
    } else {
        SYNC_PRINT(("V4L2CaptureInterface::Unable to exit Camera thread\n"));
    }

    for (int i = 0; i < MAX_INPUTS_NUMBER; i++) {
        SYNC_PRINT(("V4L2CaptureInterface::Stopping cameras\n"));
        camera[i].stop();
    }
}
Beispiel #22
0
AviCapture::AviCapture(const std::string &params)
    : /*AbstractFileCapture(params),*/
       mName(params)
     , mFormatContext(NULL)
     , mCodecContext(NULL)
     , mCodec(NULL)
     , mIsPaused(false)
     , mFrame(NULL)
     , count(1)
{
    SYNC_PRINT(("AviCapture::AviCapture(%s): called\n", params.c_str()));
    if (!avCodecInited)
    {
        SYNC_PRINT(("Registering the codecs...\n"));
        av_register_all();
        avCodecInited = true;
    }

    SYNC_PRINT(("AviCapture::AviCapture(): exited\n"));
}
Beispiel #23
0
ImageCaptureInterface::CapErrorCode AviCapture::pauseCapture()
{
    mIsPaused = !mIsPaused;
    SYNC_PRINT(("AviCapture::pauseCapture(): called. Pause is %s\n", mIsPaused ? "ON" : "OFF"));
    if (!mIsPaused)
    {
        nextFrame();
    }

    return ImageCaptureInterface::SUCCESS;
}
Beispiel #24
0
AviCapture::~AviCapture()
{
    SYNC_PRINT(("AviCapture::~AviCapture(): called\n"));

    if (mFrame != NULL) {
    av_free(mFrame);
        mFrame = NULL;
    }

    if (mCodecContext != NULL) {
    avcodec_close(mCodecContext);
        mCodecContext = NULL;
    }

    if (mFormatContext != NULL) {
    avformat_close_input(&mFormatContext);
        mFormatContext = NULL;
    }

    SYNC_PRINT(("AviCapture::~AviCapture(): exited\n"));
}
Beispiel #25
0
RGB24Buffer* QTRGB24Loader::load(string name)
{
    SYNC_PRINT(("QTRGB24Loader::load(%s): called\n", name.c_str()));
    QString qtName = QString::fromStdString(name);
    QImage image(qtName);

    if (image.isNull()) {
        return NULL;
    }

    return QTFileLoader::RGB24BufferFromQImage(&image);
}
void BinaryReader::visit<std::string>(std::string &stringField, std::string /*defaultValue*/, const char *fieldName)
{
    CORE_UNUSED(fieldName);
    if (stream == NULL) return;
    uint32_t length = 0;
    stream->read((char *)&length, sizeof(length));
    char* data = new char[length + 1];
    stream->read((char *)data, length);
    data[length] = 0;
    stringField = data;
    SYNC_PRINT(("BinaryReader::visit<std::string>():read %s\n", stringField.c_str()));
}
void V4L2CaptureInterface::decodeDataRGB24(V4L2CameraDescriptor *camera, V4L2BufferDescriptor *buffer, RGB24Buffer **output)
{
    if (!buffer->isFilled)
    {
        *output = new RGB24Buffer(formatH, formatW);
        return;
    }

    uint8_t *ptrL = (uint8_t*)(camera->buffers[buffer->index].start);
    PreciseTimer timer;
    switch(decoder)
    {
        case UNCOMPRESSED:
            *output = new RGB24Buffer(formatH, formatW);
//            printf("Decoding image...");
            timer = PreciseTimer::currentTime();
            (*output)->fillWithYUYV(ptrL);
//            printf("Delay: %i\n", timer.usecsToNow());
            break;
        case COMPRESSED_JPEG:
        {
            uint16_t *ptrDecoded = decodeMjpeg(ptrL );
            *output = new RGB24Buffer(formatH, formatW, false);
            (*output)->fillWithYUYV((uint8_t *)ptrDecoded);
            free(ptrDecoded);
           // SYNC_PRINT(("V4L2CaptureInterface::decodeDataRGB24(): COMPRESSED_JPEG not supported"));
        }
        break;
        case CODEC_NUMBER:
            SYNC_PRINT(("V4L2CaptureInterface::decodeDataRGB24(): CODEC_NUMBER not supported"));
            break;
        case COMPRESSED_FAST_JPEG:
            SYNC_PRINT(("V4L2CaptureInterface::decodeDataRGB24(): COMPRESSED_JPEG not supported"));
            break;
        default:
            SYNC_PRINT(("V4L2CaptureInterface::decodeDataRGB24(): %d decoder not supported", decoder));
            break;

    }
}
bool Mesh3DDecorated::verify( void )
{
    if (faces.size() != texId.size() || faces.size() != normalCoords.size())
    {
        SYNC_PRINT(("Wrong face/texId/normalId index\n"));
        return false;
    }

    for (size_t i = 0; i < faces.size(); i++) {
        for (int j = 0; j < 3; j++) {
            if (faces[i][j] > (int)vertexes.size() ) {
                SYNC_PRINT(("Wrong face index\n"));
                return false;
            }
        }
    }

    for (size_t i = 0; i < texId.size(); i++) {
        for (int j = 0; j < 3; j++) {
            if (texId[i][j] > (int)textureCoords.size() ) {
                SYNC_PRINT(("Wrong texture index\n"));
                return false;
            }
        }
    }

    for (size_t i = 0; i < normalId.size(); i++) {
        for (int j = 0; j < 3; j++) {
            if (normalId[i][j] > (int)normalCoords.size() && normalId[i][j] != -1) {
                SYNC_PRINT(("Wrong normal index for face %u - [%d %d %d]\n",
                     i, normalId[i][0], normalId[i][1], normalId[i][2]));
                return false;
            }
        }
    }
    return true;
}
uint16_t *V4L2CaptureInterface::decodeMjpeg(unsigned char *data)
{
    int width  = 800;
    int height = 600;
    unsigned char * framebuffer = 0;

    MjpegDecoder decoder;
    int result = decoder.decode(&framebuffer, data, &width, &height);
    if (result != 0)
    {
        SYNC_PRINT(("MJPEG decoder error, code %d\n", result));
    }

    return (uint16_t*)framebuffer;
}
void V4L2CaptureInterface::setCaptureDeviceParameters(const int handle, const int prop,
                                                      const int32_t val, int &res, const char* text) const
{
    v4l2_control request;
    if (handle)
    {
        request.id = prop;
        request.value = val;
        int result = ioctl( handle, VIDIOC_S_CTRL, &request);
        if ( result != 0 )
        {
            SYNC_PRINT(( "VIDIOC_S_CTRL for %s with value %d failed. (%s)\n", text, val, strerror(errno) ));
        }

        res |= result;
    }
    else {
        res = -1;
    }
}