Q_FOREACH (QObject *obj, ordered) { IInfo *ii = qobject_cast<IInfo*> (obj); try { qDebug () << "Initializing" << ii->GetName (); emit loadProgress (tr ("Initializing %1: stage one...").arg (ii->GetName ())); ii->Init (ICoreProxy_ptr (new CoreProxy ())); } catch (const std::exception& e) { qWarning () << Q_FUNC_INFO << "while initializing" << obj << "got" << e.what (); return obj; } catch (...) { qWarning () << Q_FUNC_INFO << "while initializing" << obj << "caught unknown exception"; return obj; } }
/// Load point cloud in text format, assuming fields XYZ bool PointArray::loadText(QString fileName, size_t maxPointCount, std::vector<GeomField>& fields, V3d& offset, size_t& npoints, uint64_t& totPoints, Imath::Box3d& bbox, V3d& centroid) { V3d Psum(0); // Use C file IO here, since it's about 40% faster than C++ streams for // large text files (tested on linux x86_64, gcc 4.6.3). FILE* inFile = fopen(fileName.toUtf8(), "r"); if (!inFile) return false; fseek(inFile, 0, SEEK_END); const size_t numBytes = ftell(inFile); fseek(inFile, 0, SEEK_SET); std::vector<Imath::V3d> points; Imath::V3d p; size_t readCount = 0; // Read three doubles; "%*[^\n]" discards up to just before end of line while (fscanf(inFile, " %lf %lf %lf%*[^\n]", &p.x, &p.y, &p.z) == 3) { points.push_back(p); ++readCount; if (readCount % 10000 == 0) emit loadProgress(int(100*ftell(inFile)/numBytes)); } fclose(inFile); totPoints = points.size(); npoints = points.size(); // Zero points + nonzero bytes => bad text file if (totPoints == 0 && numBytes != 0) return false; if (totPoints > 0) offset = points[0]; fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); V3f* position = (V3f*)fields[0].as<float>(); for (size_t i = 0; i < npoints; ++i) { position[i] = points[i] - offset; bbox.extendBy(points[i]); Psum += points[i]; } if (npoints > 0) centroid = (1.0/npoints)*Psum; return true; }
int FacebookLoginDialog::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QDialog::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: done(); break; case 1: loadProgress((*reinterpret_cast< int(*)>(_a[1]))); break; case 2: loadStarted(); break; case 3: loadFinished((*reinterpret_cast< bool(*)>(_a[1]))); break; case 4: message((*reinterpret_cast< const QString(*)>(_a[1]))); break; case 5: urlDidChange((*reinterpret_cast< const QUrl(*)>(_a[1]))); break; default: ; } _id -= 6; } return _id; }
void WBTabWidget::currentChanged(int index) { WBWebView *webView = this->webView(index); if (!webView) return; Q_ASSERT(mLineEdits->count() == count()); WBWebView *oldWebView = this->webView(mLineEdits->currentIndex()); if (oldWebView) { disconnect(oldWebView, SIGNAL(statusBarMessage(const QString&)), this, SIGNAL(showStatusBarMessage(const QString&))); disconnect(oldWebView->page(), SIGNAL(linkHovered(const QString&, const QString&, const QString&)), this, SIGNAL(linkHovered(const QString&))); disconnect(oldWebView, SIGNAL(loadProgress(int)), this, SIGNAL(loadProgress(int))); disconnect(oldWebView, SIGNAL(loadFinished(bool)), this, SIGNAL(loadFinished(bool))); } connect(webView, SIGNAL(statusBarMessage(const QString&)), this, SIGNAL(showStatusBarMessage(const QString&))); connect(webView->page(), SIGNAL(linkHovered(const QString&, const QString&, const QString&)), this, SIGNAL(linkHovered(const QString&))); connect(webView, SIGNAL(loadProgress(int)), this, SIGNAL(loadProgress(int))); connect(webView, SIGNAL(loadFinished (bool)), this, SIGNAL(loadFinished(bool))); for (int i = 0; i < mWebActions.count(); ++i) { WBWebActionMapper *mapper = mWebActions[i]; mapper->updateCurrent(webView->page()); } emit setCurrentTitle(webView->title()); mLineEdits->setCurrentIndex(index); emit loadProgress(webView->progress()); emit showStatusBarMessage(webView->lastStatusBarText()); if (webView->url().isEmpty()) mLineEdits->currentWidget()->setFocus(); else webView->setFocus(); }
void WebTab::createPreviewSelectorBar(int index) { if (m_previewSelectorBar.isNull()) { m_previewSelectorBar = new PreviewSelectorBar(index, this); qobject_cast<QVBoxLayout *>(layout())->insertWidget(0, m_previewSelectorBar.data()); m_previewSelectorBar.data()->animatedShow(); } else { disconnect(m_previewSelectorBar.data()); m_previewSelectorBar.data()->setIndex(index); m_previewSelectorBar.data()->animatedHide(); } connect(this, SIGNAL(loadStarted()), m_previewSelectorBar.data(), SLOT(loadProgress()), Qt::UniqueConnection); connect(this, SIGNAL(loadProgress(int)), m_previewSelectorBar.data(), SLOT(loadProgress()), Qt::UniqueConnection); connect(this, SIGNAL(loadFinished(bool)), m_previewSelectorBar.data(), SLOT(loadFinished()), Qt::UniqueConnection); connect(this, SIGNAL(urlChanged(QUrl)), m_previewSelectorBar.data(), SLOT(verifyUrl()), Qt::UniqueConnection); }
void SearchTask::run() { for(int index = m_beginAtPage - 1; index < m_pages.count() + m_beginAtPage - 1; ++index) { if(testCancellation()) { break; } const QList< QRectF > results = m_pages.at(index % m_pages.count())->search(m_text, m_matchCase, m_wholeWords); emit resultsReady(index % m_pages.count(), results); releaseProgress(100 * (index + 1 - m_beginAtPage + 1) / m_pages.count()); emit progressChanged(loadProgress()); } releaseProgress(0); }
void WebView::fakeLoadingProgress(int progress) { emit loadStarted(); emit loadProgress(progress); }
bool PointArray::loadFile(QString fileName, size_t maxPointCount) { QTime loadTimer; loadTimer.start(); setFileName(fileName); // Read file into point data fields. Use very basic file type detection // based on extension. uint64_t totPoints = 0; Imath::Box3d bbox; V3d offset(0); V3d centroid(0); emit loadStepStarted("Reading file"); if (fileName.endsWith(".las") || fileName.endsWith(".laz")) { if (!loadLas(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } else if (fileName.endsWith(".ply")) { if (!loadPly(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } #if 0 else if (fileName.endsWith(".dat")) { // Load crappy db format for debugging std::ifstream file(fileName.toUtf8(), std::ios::binary); file.seekg(0, std::ios::end); totPoints = file.tellg()/(4*sizeof(float)); file.seekg(0); m_fields.push_back(GeomField(TypeSpec::vec3float32(), "position", totPoints)); m_fields.push_back(GeomField(TypeSpec::float32(), "intensity", totPoints)); float* position = m_fields[0].as<float>(); float* intensity = m_fields[1].as<float>(); for (size_t i = 0; i < totPoints; ++i) { file.read((char*)position, 3*sizeof(float)); file.read((char*)intensity, 1*sizeof(float)); bbox.extendBy(V3d(position[0], position[1], position[2])); position += 3; intensity += 1; } m_npoints = totPoints; } #endif else { // Last resort: try loading as text if (!loadText(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } // Search for position field m_positionFieldIdx = -1; for (size_t i = 0; i < m_fields.size(); ++i) { if (m_fields[i].name == "position" && m_fields[i].spec.count == 3) { m_positionFieldIdx = (int)i; break; } } if (m_positionFieldIdx == -1) { g_logger.error("No position field found in file %s", fileName); return false; } m_P = (V3f*)m_fields[m_positionFieldIdx].as<float>(); setBoundingBox(bbox); setOffset(offset); setCentroid(centroid); emit loadProgress(100); g_logger.info("Loaded %d of %d points from file %s in %.2f seconds", m_npoints, totPoints, fileName, loadTimer.elapsed()/1000.0); if (totPoints == 0) { m_rootNode.reset(new OctreeNode(V3f(0), 1)); return true; } // Sort points into octree order emit loadStepStarted("Sorting points"); std::unique_ptr<size_t[]> inds(new size_t[m_npoints]); for (size_t i = 0; i < m_npoints; ++i) inds[i] = i; // Expand the bound so that it's cubic. Not exactly sure it's required // here, but cubic nodes sometimes work better the points are better // distributed for LoD, splitting is unbiased, etc. Imath::Box3f rootBound(bbox.min - offset, bbox.max - offset); V3f diag = rootBound.size(); float rootRadius = std::max(std::max(diag.x, diag.y), diag.z) / 2; ProgressFunc progressFunc(*this); m_rootNode.reset(makeTree(0, &inds[0], 0, m_npoints, &m_P[0], rootBound.center(), rootRadius, progressFunc)); // Reorder point fields into octree order emit loadStepStarted("Reordering fields"); for (size_t i = 0; i < m_fields.size(); ++i) { g_logger.debug("Reordering field %d: %s", i, m_fields[i]); reorder(m_fields[i], inds.get(), m_npoints); emit loadProgress(int(100*(i+1)/m_fields.size())); } m_P = (V3f*)m_fields[m_positionFieldIdx].as<float>(); return true; }
bool ProjectHandler::load(Project *project, const QString &file_path) { this->project=project; QFile file(file_path); if(!file.open(QIODevice::ReadOnly | QIODevice::Text)) { qDebug()<<"Unable to read project file "+file_path; return false; } QFileInfo fi(file_path); project->root_dir=fi.absoluteDir(); project->project_file=fi.absoluteFilePath(); project->name="Unknown"; QString error_str; int error_line, error_column; QDomDocument doc("AtlasProject"); if(!doc.setContent(&file,false,&error_str,&error_line,&error_column)) { qDebug()<<"Error in xml file "+file_path+" : "+qPrintable(error_str); } QDomElement root=doc.documentElement(); if(root.tagName() != "project") { return false; } QDomNode child=root.firstChild(); QString tag_name; //header must be loaded first fo setup QDomNodeList header_list=root.elementsByTagName("header"); if(header_list.count()==1) { QDomElement header=header_list.at(0).toElement(); if(!parseHeader(header)) return false; } else { qDebug()<<"Project header not found!"<<header_list.count(); return false; } completed_percent+=10; emit loadProgress(completed_percent); QDir dir=project->root_dir; while(!child.isNull()) { tag_name=child.toElement().tagName(); if(tag_name=="terrain") { if(!project->terrain_io.load(dir,child.toElement(),&project->map)) return false; completed_percent+=50; emit loadProgress(completed_percent); } else if(tag_name=="model") { if(!project->model_io.load(dir,child.toElement(),&project->map)) return false; completed_percent+=10; emit loadProgress(completed_percent); } else if(tag_name=="grass") { if(!project->grass_io.load(dir,child.toElement(),&project->map)) return false; completed_percent+=20; emit loadProgress(completed_percent); } child = child.nextSibling(); } file.close(); completed_percent=100; emit loadProgress(completed_percent); return true; }
static uint64_t loadFromFile(tr_torrent* tor, uint64_t fieldsToLoad) { TR_ASSERT(tr_isTorrent(tor)); size_t len; int64_t i; char const* str; char* filename; tr_variant top; bool boolVal; uint64_t fieldsLoaded = 0; bool const wasDirty = tor->isDirty; tr_error* error = NULL; filename = getResumeFilename(tor); if (!tr_variantFromFile(&top, TR_VARIANT_FMT_BENC, filename, &error)) { tr_logAddTorDbg(tor, "Couldn't read \"%s\": %s", filename, error->message); tr_error_free(error); tr_free(filename); return fieldsLoaded; } tr_logAddTorDbg(tor, "Read resume file \"%s\"", filename); if ((fieldsToLoad & TR_FR_CORRUPT) != 0 && tr_variantDictFindInt(&top, TR_KEY_corrupt, &i)) { tor->corruptPrev = i; fieldsLoaded |= TR_FR_CORRUPT; } if ((fieldsToLoad & (TR_FR_PROGRESS | TR_FR_DOWNLOAD_DIR)) != 0 && tr_variantDictFindStr(&top, TR_KEY_destination, &str, &len) && str != NULL && *str != '\0') { bool const is_current_dir = tor->currentDir == tor->downloadDir; tr_free(tor->downloadDir); tor->downloadDir = tr_strndup(str, len); if (is_current_dir) { tor->currentDir = tor->downloadDir; } fieldsLoaded |= TR_FR_DOWNLOAD_DIR; } if ((fieldsToLoad & (TR_FR_PROGRESS | TR_FR_INCOMPLETE_DIR)) != 0 && tr_variantDictFindStr(&top, TR_KEY_incomplete_dir, &str, &len) && str != NULL && *str != '\0') { bool const is_current_dir = tor->currentDir == tor->incompleteDir; tr_free(tor->incompleteDir); tor->incompleteDir = tr_strndup(str, len); if (is_current_dir) { tor->currentDir = tor->incompleteDir; } fieldsLoaded |= TR_FR_INCOMPLETE_DIR; } if ((fieldsToLoad & TR_FR_DOWNLOADED) != 0 && tr_variantDictFindInt(&top, TR_KEY_downloaded, &i)) { tor->downloadedPrev = i; fieldsLoaded |= TR_FR_DOWNLOADED; } if ((fieldsToLoad & TR_FR_UPLOADED) != 0 && tr_variantDictFindInt(&top, TR_KEY_uploaded, &i)) { tor->uploadedPrev = i; fieldsLoaded |= TR_FR_UPLOADED; } if ((fieldsToLoad & TR_FR_MAX_PEERS) != 0 && tr_variantDictFindInt(&top, TR_KEY_max_peers, &i)) { tor->maxConnectedPeers = i; fieldsLoaded |= TR_FR_MAX_PEERS; } if ((fieldsToLoad & TR_FR_RUN) != 0 && tr_variantDictFindBool(&top, TR_KEY_paused, &boolVal)) { tor->isRunning = !boolVal; fieldsLoaded |= TR_FR_RUN; } if ((fieldsToLoad & TR_FR_ADDED_DATE) != 0 && tr_variantDictFindInt(&top, TR_KEY_added_date, &i)) { tor->addedDate = i; fieldsLoaded |= TR_FR_ADDED_DATE; } if ((fieldsToLoad & TR_FR_DONE_DATE) != 0 && tr_variantDictFindInt(&top, TR_KEY_done_date, &i)) { tor->doneDate = i; fieldsLoaded |= TR_FR_DONE_DATE; } if ((fieldsToLoad & TR_FR_ACTIVITY_DATE) != 0 && tr_variantDictFindInt(&top, TR_KEY_activity_date, &i)) { tr_torrentSetActivityDate(tor, i); fieldsLoaded |= TR_FR_ACTIVITY_DATE; } if ((fieldsToLoad & TR_FR_TIME_SEEDING) != 0 && tr_variantDictFindInt(&top, TR_KEY_seeding_time_seconds, &i)) { tor->secondsSeeding = i; fieldsLoaded |= TR_FR_TIME_SEEDING; } if ((fieldsToLoad & TR_FR_TIME_DOWNLOADING) != 0 && tr_variantDictFindInt(&top, TR_KEY_downloading_time_seconds, &i)) { tor->secondsDownloading = i; fieldsLoaded |= TR_FR_TIME_DOWNLOADING; } if ((fieldsToLoad & TR_FR_BANDWIDTH_PRIORITY) != 0 && tr_variantDictFindInt(&top, TR_KEY_bandwidth_priority, &i) && tr_isPriority(i)) { tr_torrentSetPriority(tor, i); fieldsLoaded |= TR_FR_BANDWIDTH_PRIORITY; } if ((fieldsToLoad & TR_FR_PEERS) != 0) { fieldsLoaded |= loadPeers(&top, tor); } if ((fieldsToLoad & TR_FR_FILE_PRIORITIES) != 0) { fieldsLoaded |= loadFilePriorities(&top, tor); } if ((fieldsToLoad & TR_FR_PROGRESS) != 0) { fieldsLoaded |= loadProgress(&top, tor); } if ((fieldsToLoad & TR_FR_DND) != 0) { fieldsLoaded |= loadDND(&top, tor); } if ((fieldsToLoad & TR_FR_SPEEDLIMIT) != 0) { fieldsLoaded |= loadSpeedLimits(&top, tor); } if ((fieldsToLoad & TR_FR_RATIOLIMIT) != 0) { fieldsLoaded |= loadRatioLimits(&top, tor); } if ((fieldsToLoad & TR_FR_IDLELIMIT) != 0) { fieldsLoaded |= loadIdleLimits(&top, tor); } if ((fieldsToLoad & TR_FR_FILENAMES) != 0) { fieldsLoaded |= loadFilenames(&top, tor); } if ((fieldsToLoad & TR_FR_NAME) != 0) { fieldsLoaded |= loadName(&top, tor); } /* loading the resume file triggers of a lot of changes, * but none of them needs to trigger a re-saving of the * same resume information... */ tor->isDirty = wasDirty; tr_variantFree(&top); tr_free(filename); return fieldsLoaded; }
bool PointArray::loadLas(QString fileName, size_t maxPointCount, std::vector<GeomField>& fields, V3d& offset, size_t& npoints, uint64_t& totalPoints, Imath::Box3d& bbox, V3d& centroid) { V3d Psum(0); #ifdef DISPLAZ_USE_PDAL // Open file if (!pdal::FileUtils::fileExists(fileName.toLatin1().constData())) { g_logger.info("File \"%s\" does not exist", fileName.toStdString() ); return false; } std::unique_ptr<pdal::PipelineManager> manager(new pdal::PipelineManager); pdal::StageFactory factory; std::string driver = factory.inferReaderDriver(fileName.toStdString()); manager->addReader(driver); pdal::Stage* reader = static_cast<pdal::Reader*>(manager->getStage()); pdal::Options options; pdal::Option fname("filename", fileName.toStdString()); options.add(fname); reader->setOptions(options); manager->execute(); pdal::PointBufferSet pbSet = manager->buffers(); pdal::PointContext context = manager->context(); bool hasColor = context.hasDim(pdal::Dimension::Id::Red); pdal::QuickInfo quickinfo = reader->preview(); // Figure out how much to decimate the point cloud. totalPoints = quickinfo.m_pointCount; size_t decimate = totalPoints == 0 ? 1 : 1 + (totalPoints - 1) / maxPointCount; if(decimate > 1) { g_logger.info("Decimating \"%s\" by factor of %d", fileName.toStdString(), decimate); } npoints = (totalPoints + decimate - 1) / decimate; pdal::BOX3D pdal_bounds = quickinfo.m_bounds; offset = V3d(0.5*(pdal_bounds.minx + pdal_bounds.maxx), 0.5*(pdal_bounds.miny + pdal_bounds.maxy), 0.5*(pdal_bounds.minz + pdal_bounds.maxz)); // Attempt to place all data on the same vertical scale, but allow // other offsets if the magnitude of z is too large (and we would // therefore loose noticable precision by storing the data as floats) if (fabs(offset.z) < 10000) offset.z = 0; // Allocate all arrays fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); fields.push_back(GeomField(TypeSpec::uint16_i(), "intensity", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "returnNumber", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "numberOfReturns", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "pointSourceId", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "classification", npoints)); // Output iterators for the output arrays V3f* position = (V3f*)fields[0].as<float>(); uint16_t* intensity = fields[1].as<uint16_t>(); uint8_t* returnNumber = fields[2].as<uint8_t>(); uint8_t* numReturns = fields[3].as<uint8_t>(); uint8_t* pointSourceId = fields[4].as<uint8_t>(); uint8_t* classification = fields[5].as<uint8_t>(); uint16_t* color = 0; if (hasColor) { fields.push_back(GeomField(TypeSpec(TypeSpec::Uint,2,3,TypeSpec::Color), "color", npoints)); color = fields.back().as<uint16_t>(); } size_t readCount = 0; size_t storeCount = 0; size_t nextDecimateBlock = 1; size_t nextStore = 1; for (auto st = pbSet.begin(); st != pbSet.end(); ++st) // while (size_t numRead = chunkIter->read(buf)) { pdal::PointBufferPtr buf = *st; for (size_t i = 0; i < buf->size(); ++i) { ++readCount; V3d P = V3d(buf->getFieldAs<double>(pdal::Dimension::Id::X, i), buf->getFieldAs<double>(pdal::Dimension::Id::Y, i), buf->getFieldAs<double>(pdal::Dimension::Id::Z, i)); // V3d P = V3d(xDim.applyScaling(buf.getField<int32_t>(xDim, i)), // yDim.applyScaling(buf.getField<int32_t>(yDim, i)), // zDim.applyScaling(buf.getField<int32_t>(zDim, i))); bbox.extendBy(P); Psum += P; if(readCount < nextStore) continue; ++storeCount; // Store the point *position++ = P - offset; *intensity++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Intensity, i); *returnNumber++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::ReturnNumber, i); *numReturns++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::NumberOfReturns, i); *pointSourceId++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::PointSourceId, i); *classification++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::Classification, i); // Extract point RGB if (hasColor) { *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Red, i); *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Green, i); *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Blue, i); } // Figure out which point will be the next stored point. nextDecimateBlock += decimate; nextStore = nextDecimateBlock; if(decimate > 1) { // Randomize selected point within block to avoid repeated patterns nextStore += (qrand() % decimate); if(nextDecimateBlock <= totalPoints && nextStore > totalPoints) nextStore = totalPoints; } } emit loadProgress(100*readCount/totalPoints); } #else LASreadOpener lasReadOpener; #ifdef _WIN32 // Hack: liblas doesn't like forward slashes as path separators on windows fileName = fileName.replace('/', '\\'); #endif lasReadOpener.set_file_name(fileName.toLatin1().constData()); std::unique_ptr<LASreader> lasReader(lasReadOpener.open()); if(!lasReader) { g_logger.error("Couldn't open file \"%s\"", fileName); return false; } //std::ofstream dumpFile("points.txt"); // Figure out how much to decimate the point cloud. totalPoints = std::max<uint64_t>(lasReader->header.extended_number_of_point_records, lasReader->header.number_of_point_records); size_t decimate = totalPoints == 0 ? 1 : 1 + (totalPoints - 1) / maxPointCount; if(decimate > 1) { g_logger.info("Decimating \"%s\" by factor of %d", fileName.toStdString(), decimate); } npoints = (totalPoints + decimate - 1) / decimate; offset = V3d(lasReader->header.min_x, lasReader->header.min_y, 0); // Attempt to place all data on the same vertical scale, but allow other // offsets if the magnitude of z is too large (and we would therefore loose // noticable precision by storing the data as floats) if (fabs(lasReader->header.min_z) > 10000) offset.z = lasReader->header.min_z; fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); fields.push_back(GeomField(TypeSpec::uint16_i(), "intensity", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "returnNumber", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "numberOfReturns", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "pointSourceId", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "classification", npoints)); if (totalPoints == 0) { g_logger.warning("File %s has zero points", fileName); return true; } // Iterate over all points & pull in the data. V3f* position = (V3f*)fields[0].as<float>(); uint16_t* intensity = fields[1].as<uint16_t>(); uint8_t* returnNumber = fields[2].as<uint8_t>(); uint8_t* numReturns = fields[3].as<uint8_t>(); uint8_t* pointSourceId = fields[4].as<uint8_t>(); uint8_t* classification = fields[5].as<uint8_t>(); uint64_t readCount = 0; uint64_t nextDecimateBlock = 1; uint64_t nextStore = 1; size_t storeCount = 0; if (!lasReader->read_point()) return false; const LASpoint& point = lasReader->point; uint16_t* color = 0; if (point.have_rgb) { fields.push_back(GeomField(TypeSpec(TypeSpec::Uint,2,3,TypeSpec::Color), "color", npoints)); color = fields.back().as<uint16_t>(); } do { // Read a point from the las file ++readCount; if(readCount % 10000 == 0) emit loadProgress(100*readCount/totalPoints); V3d P = V3d(point.get_x(), point.get_y(), point.get_z()); bbox.extendBy(P); Psum += P; if(readCount < nextStore) continue; ++storeCount; // Store the point *position++ = P - offset; // float intens = float(point.scan_angle_rank) / 40; *intensity++ = point.intensity; *returnNumber++ = point.return_number; # if LAS_TOOLS_VERSION >= 140315 *numReturns++ = point.number_of_returns; # else *numReturns++ = point.number_of_returns_of_given_pulse; # endif *pointSourceId++ = point.point_source_ID; // Put flags back in classification byte to avoid memory bloat *classification++ = point.classification | (point.synthetic_flag << 5) | (point.keypoint_flag << 6) | (point.withheld_flag << 7); // Extract point RGB if (color) { *color++ = point.rgb[0]; *color++ = point.rgb[1]; *color++ = point.rgb[2]; } // Figure out which point will be the next stored point. nextDecimateBlock += decimate; nextStore = nextDecimateBlock; if(decimate > 1) { // Randomize selected point within block to avoid repeated patterns nextStore += (qrand() % decimate); if(nextDecimateBlock <= totalPoints && nextStore > totalPoints) nextStore = totalPoints; } } while(lasReader->read_point()); lasReader->close(); #endif if (readCount < totalPoints) { g_logger.warning("Expected %d points in file \"%s\", got %d", totalPoints, fileName, readCount); npoints = storeCount; // Shrink all fields to fit - these will have wasted space at the end, // but that will be fixed during reordering. for (size_t i = 0; i < fields.size(); ++i) fields[i].size = npoints; totalPoints = readCount; } if (totalPoints > 0) centroid = (1.0/totalPoints)*Psum; return true; }
static uint64_t loadFromFile( tr_torrent * tor, uint64_t fieldsToLoad ) { int64_t i; const char * str; uint64_t fieldsLoaded = 0; char * filename; tr_benc top; filename = getResumeFilename( tor ); if( tr_bencLoadFile( filename, &top ) ) { tr_tordbg( tor, "Couldn't read \"%s\"; trying old format.", filename ); fieldsLoaded = tr_fastResumeLoad( tor, fieldsToLoad ); if( ( fieldsLoaded != 0 ) && ( fieldsToLoad == ~(uint64_t)0 ) ) { tr_torrentSaveResume( tor ); tr_fastResumeRemove( tor ); tr_tordbg( tor, "Migrated resume file to \"%s\"", filename ); } tr_free( filename ); return fieldsLoaded; } tr_tordbg( tor, "Read resume file \"%s\"", filename ); if( ( fieldsToLoad & TR_FR_CORRUPT ) && tr_bencDictFindInt( &top, KEY_CORRUPT, &i ) ) { tor->corruptPrev = i; fieldsLoaded |= TR_FR_CORRUPT; } if( ( fieldsToLoad & ( TR_FR_PROGRESS | TR_FR_DOWNLOAD_DIR ) ) && tr_bencDictFindStr( &top, KEY_DOWNLOAD_DIR, &str ) ) { tr_free( tor->downloadDir ); tor->downloadDir = tr_strdup( str ); fieldsLoaded |= TR_FR_DOWNLOAD_DIR; } if( ( fieldsToLoad & TR_FR_DOWNLOADED ) && tr_bencDictFindInt( &top, KEY_DOWNLOADED, &i ) ) { tor->downloadedPrev = i; fieldsLoaded |= TR_FR_DOWNLOADED; } if( ( fieldsToLoad & TR_FR_UPLOADED ) && tr_bencDictFindInt( &top, KEY_UPLOADED, &i ) ) { tor->uploadedPrev = i; fieldsLoaded |= TR_FR_UPLOADED; } if( ( fieldsToLoad & TR_FR_MAX_PEERS ) && tr_bencDictFindInt( &top, KEY_MAX_PEERS, &i ) ) { tor->maxConnectedPeers = i; fieldsLoaded |= TR_FR_MAX_PEERS; } if( ( fieldsToLoad & TR_FR_RUN ) && tr_bencDictFindInt( &top, KEY_PAUSED, &i ) ) { tor->isRunning = i ? 0 : 1; fieldsLoaded |= TR_FR_RUN; } if( ( fieldsToLoad & TR_FR_ADDED_DATE ) && tr_bencDictFindInt( &top, KEY_ADDED_DATE, &i ) ) { tor->addedDate = i; fieldsLoaded |= TR_FR_ADDED_DATE; } if( ( fieldsToLoad & TR_FR_DONE_DATE ) && tr_bencDictFindInt( &top, KEY_DONE_DATE, &i ) ) { tor->doneDate = i; fieldsLoaded |= TR_FR_DONE_DATE; } if( ( fieldsToLoad & TR_FR_ACTIVITY_DATE ) && tr_bencDictFindInt( &top, KEY_ACTIVITY_DATE, &i ) ) { tor->activityDate = i; fieldsLoaded |= TR_FR_ACTIVITY_DATE; } if( fieldsToLoad & TR_FR_PEERS ) fieldsLoaded |= loadPeers( &top, tor ); if( fieldsToLoad & TR_FR_PRIORITY ) fieldsLoaded |= loadPriorities( &top, tor ); if( fieldsToLoad & TR_FR_PROGRESS ) fieldsLoaded |= loadProgress( &top, tor ); if( fieldsToLoad & TR_FR_DND ) fieldsLoaded |= loadDND( &top, tor ); if( fieldsToLoad & TR_FR_SPEEDLIMIT ) fieldsLoaded |= loadSpeedLimits( &top, tor ); tr_bencFree( &top ); tr_free( filename ); return fieldsLoaded; }
void FrameLoaderClientQt::postProgressEstimateChangedNotification() { if (m_webFrame && m_frame->page()) emit loadProgress(qRound(m_frame->page()->progress()->estimatedProgress() * 100)); }
void CWizFileReader::run() { int nTotal = m_files.count(); for (int i = 0; i < nTotal; i++) { QString strFile = m_files.at(i); QFileInfo fi(strFile); QString strHtml; QStringList textExtList, imageExtList, rtfExtList, docExtList, htmlExtList; textExtList << "txt" << "md" << "markdown" << "mht" << "cpp" << "h"; imageExtList << "jpg" << "png" << "gif" << "tiff" << "jpeg" << "bmp" << "svg"; rtfExtList << "rtf"; docExtList << "doc" << "docx" << "pages"; htmlExtList << "html" << "htm"; #ifdef Q_OS_MAC QStringList webExtList; webExtList << "webarchive"; #endif bool addAttach = false; QString docType = fi.suffix(); if (textExtList.contains(docType,Qt::CaseInsensitive)) { strHtml = loadTextFileToHtml(strFile); } else if (imageExtList.contains(docType,Qt::CaseInsensitive)) { strHtml = loadImageFileToHtml(strFile); } else if (htmlExtList.contains(docType, Qt::CaseInsensitive)) { strHtml = loadHtmlFileToHtml(strFile); QString strTitle = WizExtractFileName(strFile); emit htmlFileloaded(strFile, strHtml, strTitle); continue; } #ifdef Q_OS_MAC else if (rtfExtList.contains(docType, Qt::CaseInsensitive)) { if (!documentToHtml(strFile, RTFTextDocumentType, strHtml)) continue; WizGetBodyContentFromHtml(strHtml, true); addAttach = true; } else if (docExtList.contains(docType)) { if (!documentToHtml(strFile, DocFormatTextDocumentType, strHtml)) continue; WizGetBodyContentFromHtml(strHtml, true); addAttach = true; } else if (webExtList.contains(docType)) { if (!documentToHtml(strFile, WebArchiveTextDocumentType, strHtml)) continue; WizGetBodyContentFromHtml(strHtml, true); } else { emit fileLoadFailed(strFile); } #endif QString strTitle = WizExtractFileName(strFile); if (addAttach) { emit richTextFileLoaded(strHtml, strTitle, strFile); } else if (!strHtml.isEmpty()) { emit fileLoaded(strHtml, strTitle); } emit loadProgress(nTotal, i + 1); } emit loadFinished(); m_files.clear(); }
int QWebView::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QWidget::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: loadStarted(); break; case 1: loadProgress((*reinterpret_cast< int(*)>(_a[1]))); break; case 2: loadFinished((*reinterpret_cast< bool(*)>(_a[1]))); break; case 3: titleChanged((*reinterpret_cast< const QString(*)>(_a[1]))); break; case 4: statusBarMessage((*reinterpret_cast< const QString(*)>(_a[1]))); break; case 5: linkClicked((*reinterpret_cast< const QUrl(*)>(_a[1]))); break; case 6: selectionChanged(); break; case 7: iconChanged(); break; case 8: urlChanged((*reinterpret_cast< const QUrl(*)>(_a[1]))); break; case 9: stop(); break; case 10: back(); break; case 11: forward(); break; case 12: reload(); break; case 13: print((*reinterpret_cast< QPrinter*(*)>(_a[1]))); break; case 14: d->_q_pageDestroyed(); break; default: ; } _id -= 15; } #ifndef QT_NO_PROPERTIES else if (_c == QMetaObject::ReadProperty) { void *_v = _a[0]; switch (_id) { case 0: *reinterpret_cast< QString*>(_v) = title(); break; case 1: *reinterpret_cast< QUrl*>(_v) = url(); break; case 2: *reinterpret_cast< QIcon*>(_v) = icon(); break; case 3: *reinterpret_cast< QString*>(_v) = selectedText(); break; case 4: *reinterpret_cast< bool*>(_v) = isModified(); break; case 5: *reinterpret_cast< qreal*>(_v) = textSizeMultiplier(); break; case 6: *reinterpret_cast< qreal*>(_v) = zoomFactor(); break; case 7: *reinterpret_cast<int*>(_v) = QFlag(renderHints()); break; } _id -= 8; } else if (_c == QMetaObject::WriteProperty) { void *_v = _a[0]; switch (_id) { case 1: setUrl(*reinterpret_cast< QUrl*>(_v)); break; case 5: setTextSizeMultiplier(*reinterpret_cast< qreal*>(_v)); break; case 6: setZoomFactor(*reinterpret_cast< qreal*>(_v)); break; case 7: setRenderHints(QFlag(*reinterpret_cast<int*>(_v))); break; } _id -= 8; } else if (_c == QMetaObject::ResetProperty) { _id -= 8; } else if (_c == QMetaObject::QueryPropertyDesignable) { _id -= 8; } else if (_c == QMetaObject::QueryPropertyScriptable) { _id -= 8; } else if (_c == QMetaObject::QueryPropertyStored) { _id -= 8; } else if (_c == QMetaObject::QueryPropertyEditable) { _id -= 8; } else if (_c == QMetaObject::QueryPropertyUser) { _id -= 8; } #endif // QT_NO_PROPERTIES return _id; }
void AMPWebView::slotReplyDownloadProgress( qint64 progress, qint64 total ) { emit progressMaximum( static_cast<int>(total) ); emit loadProgress( static_cast<int>(progress) ); }
/** * The main import function... */ int CachegrindLoader::loadInternal(TraceData* data, QIODevice* device, const QString& filename) { if (!data || !device) return 0; _data = data; _filename = filename; _lineNo = 0; loadStart(_filename); FixFile file(device, _filename); if (!file.exists()) { loadFinished(QStringLiteral("File does not exist")); return 0; } int statusProgress = 0; #if USE_FIXCOST // FixCost Memory Pool FixPool* pool = _data->fixPool(); #endif _part = 0; partsAdded = 0; prepareNewPart(); FixString line; char c; // current position nextLineType = SelfCost; // default if there is no "positions:" line hasLineInfo = true; hasAddrInfo = false; while (file.nextLine(line)) { _lineNo++; #if TRACE_LOADER qDebug() << "[CachegrindLoader] " << _filename << ":" << _lineNo << " - '" << QString(line) << "'"; #endif // if we cannot strip a character, this was an empty line if (!line.first(c)) continue; if (c <= '9') { if (c == '#') continue; // parse position(s) if (!parsePosition(line, currentPos)) { error(QStringLiteral("Invalid position specification '%1'").arg(line)); continue; } // go through after big switch } else { // if (c > '9') line.stripFirst(c); /* in order of probability */ switch(c) { case 'f': // fl= if (line.stripPrefix("l=")) { setFile(line); // this is the default for new functions currentFunctionFile = currentFile; continue; } // fi=, fe= if (line.stripPrefix("i=") || line.stripPrefix("e=")) { setFile(line); continue; } // fn= if (line.stripPrefix("n=")) { if (currentFile != currentFunctionFile) currentFile = currentFunctionFile; setFunction(line); // on a new function, update status int progress = (int)(100.0 * file.current() / file.len() +.5); if (progress != statusProgress) { statusProgress = progress; /* When this signal is connected, it most probably * should lead to GUI update. Thus, when multiple * "long operations" (like file loading) are in progress, * this can temporarly switch to another operation. */ loadProgress(statusProgress); } continue; } break; case 'c': // cob= if (line.stripPrefix("ob=")) { setCalledObject(line); continue; } // cfi= / cfl= if (line.stripPrefix("fl=") || line.stripPrefix("fi=")) { setCalledFile(line); continue; } // cfn= if (line.stripPrefix("fn=")) { setCalledFunction(line); continue; } // calls= if (line.stripPrefix("alls=")) { // ignore long lines... line.stripUInt64(currentCallCount); nextLineType = CallCost; continue; } // cmd: if (line.stripPrefix("md:")) { QString command = QString(line).trimmed(); if (!_data->command().isEmpty() && _data->command() != command) { error(QStringLiteral("Redefined command, was '%1'").arg(_data->command())); } _data->setCommand(command); continue; } // creator: if (line.stripPrefix("reator:")) { // ignore ... continue; } break; case 'j': // jcnd= if (line.stripPrefix("cnd=")) { bool valid; valid = line.stripUInt64(jumpsFollowed) && line.stripPrefix("/") && line.stripUInt64(jumpsExecuted) && parsePosition(line, targetPos); if (!valid) { error(QStringLiteral("Invalid line after 'jcnd'")); } else nextLineType = CondJump; continue; } if (line.stripPrefix("ump=")) { bool valid; valid = line.stripUInt64(jumpsExecuted) && parsePosition(line, targetPos); if (!valid) { error(QStringLiteral("Invalid line after 'jump'")); } else nextLineType = BoringJump; continue; } // jfi= if (line.stripPrefix("fi=")) { currentJumpToFile = compressedFile(line); continue; } // jfn= if (line.stripPrefix("fn=")) { if (!currentJumpToFile) { // !=0 as functions needs file currentJumpToFile = currentFile; } currentJumpToFunction = compressedFunction(line, currentJumpToFile, currentObject); continue; } break; case 'o': // ob= if (line.stripPrefix("b=")) { setObject(line); continue; } break; case '#': continue; case 'a': // "arch: arm" if (line.stripPrefix("rch: arm")) { TraceData::Arch a = _data->architecture(); if ((a != TraceData::ArchUnknown) && (a != TraceData::ArchARM)) { error(QStringLiteral("Redefined architecture!")); } _data->setArchitecture(TraceData::ArchARM); continue; } break; case 't': // totals: if (line.stripPrefix("otals:")) continue; // thread: if (line.stripPrefix("hread:")) { prepareNewPart(); _part->setThreadID(QString(line).toInt()); continue; } // timeframe (BB): if (line.stripPrefix("imeframe (BB):")) { _part->setTimeframe(line); continue; } break; case 'd': // desc: if (line.stripPrefix("esc:")) { line.stripSurroundingSpaces(); // desc: Trigger: if (line.stripPrefix("Trigger:")) { _part->setTrigger(line); } continue; } break; case 'e': // events: if (line.stripPrefix("vents:")) { prepareNewPart(); mapping = _data->eventTypes()->createMapping(line); _part->setEventMapping(mapping); continue; } // event:<name>[=<formula>][:<long name>] if (line.stripPrefix("vent:")) { line.stripSurroundingSpaces(); FixString e, f, l; if (!line.stripName(e)) { error(QStringLiteral("Invalid event")); continue; } line.stripSpaces(); if (!line.stripFirst(c)) continue; if (c=='=') f = line.stripUntil(':'); line.stripSpaces(); // add to known cost types if (line.isEmpty()) line = e; EventType::add(new EventType(e,line,f)); continue; } break; case 'p': // part: if (line.stripPrefix("art:")) { prepareNewPart(); _part->setPartNumber(QString(line).toInt()); continue; } // pid: if (line.stripPrefix("id:")) { prepareNewPart(); _part->setProcessID(QString(line).toInt()); continue; } // positions: if (line.stripPrefix("ositions:")) { prepareNewPart(); QString positions(line); hasLineInfo = positions.contains(QStringLiteral("line")); hasAddrInfo = positions.contains(QStringLiteral("instr")); continue; } break; case 'v': // version: if (line.stripPrefix("ersion:")) { // ignore for now continue; } break; case 's': // summary: if (line.stripPrefix("ummary:")) { if (!mapping) { error(QStringLiteral("No event line found. Skipping file")); delete _part; return false; } _part->totals()->set(mapping, line); continue; } case 'r': // rcalls= (deprecated) if (line.stripPrefix("calls=")) { // handle like normal calls: we need the sum of call count // recursive cost is discarded in cycle detection line.stripUInt64(currentCallCount); nextLineType = CallCost; warning(QStringLiteral("Old file format using deprecated 'rcalls'")); continue; } break; default: break; } error(QStringLiteral("Invalid line '%1%2'").arg(c).arg(line)); continue; } if (!mapping) { error(QStringLiteral("No event line found. Skipping file")); delete _part; return false; } // for a cost line, we always need a current function ensureFunction(); if (!currentFunctionSource || (currentFunctionSource->file() != currentFile)) { currentFunctionSource = currentFunction->sourceFile(currentFile, true); } #if !USE_FIXCOST if (hasAddrInfo) { if (!currentInstr || (currentInstr->addr() != currentPos.fromAddr)) { currentInstr = currentFunction->instr(currentPos.fromAddr, true); if (!currentInstr) { error(QString("Invalid address '%1'").arg(currentPos.fromAddr.toString())); continue; } currentPartInstr = currentInstr->partInstr(_part, currentPartFunction); } } if (hasLineInfo) { if (!currentLine || (currentLine->lineno() != currentPos.fromLine)) { currentLine = currentFunctionSource->line(currentPos.fromLine, true); currentPartLine = currentLine->partLine(_part, currentPartFunction); } if (hasAddrInfo && currentInstr) currentInstr->setLine(currentLine); } #endif #if TRACE_LOADER qDebug() << _filename << ":" << _lineNo; qDebug() << " currentInstr " << (currentInstr ? qPrintable(currentInstr->toString()) : "."); qDebug() << " currentLine " << (currentLine ? qPrintable(currentLine->toString()) : ".") << "( file " << currentFile->name() << ")"; qDebug() << " currentFunction " << qPrintable(currentFunction->prettyName()); qDebug() << " currentCalled " << (currentCalledFunction ? qPrintable(currentCalledFunction->prettyName()) : "."); #endif // create cost item if (nextLineType == SelfCost) { #if USE_FIXCOST new (pool) FixCost(_part, pool, currentFunctionSource, currentPos, currentPartFunction, line); #else if (hasAddrInfo) { TracePartInstr* partInstr; partInstr = currentInstr->partInstr(_part, currentPartFunction); if (hasLineInfo) { // we need to set <line> back after reading for the line int l = line.len(); const char* s = line.ascii(); partInstr->addCost(mapping, line); line.set(s,l); } else partInstr->addCost(mapping, line); } if (hasLineInfo) { TracePartLine* partLine; partLine = currentLine->partLine(_part, currentPartFunction); partLine->addCost(mapping, line); } #endif if (!line.isEmpty()) { error(QStringLiteral("Garbage at end of cost line ('%1')").arg(line)); } } else if (nextLineType == CallCost) { nextLineType = SelfCost; TraceCall* calling = currentFunction->calling(currentCalledFunction); TracePartCall* partCalling = calling->partCall(_part, currentPartFunction, currentCalledPartFunction); #if USE_FIXCOST FixCallCost* fcc; fcc = new (pool) FixCallCost(_part, pool, currentFunctionSource, hasLineInfo ? currentPos.fromLine : 0, hasAddrInfo ? currentPos.fromAddr : Addr(0), partCalling, currentCallCount, line); fcc->setMax(_data->callMax()); _data->updateMaxCallCount(fcc->callCount()); #else if (hasAddrInfo) { TraceInstrCall* instrCall; TracePartInstrCall* partInstrCall; instrCall = calling->instrCall(currentInstr); partInstrCall = instrCall->partInstrCall(_part, partCalling); partInstrCall->addCallCount(currentCallCount); if (hasLineInfo) { // we need to set <line> back after reading for the line int l = line.len(); const char* s = line.ascii(); partInstrCall->addCost(mapping, line); line.set(s,l); } else partInstrCall->addCost(mapping, line); // update maximum of call cost _data->callMax()->maxCost(partInstrCall); _data->updateMaxCallCount(partInstrCall->callCount()); } if (hasLineInfo) { TraceLineCall* lineCall; TracePartLineCall* partLineCall; lineCall = calling->lineCall(currentLine); partLineCall = lineCall->partLineCall(_part, partCalling); partLineCall->addCallCount(currentCallCount); partLineCall->addCost(mapping, line); // update maximum of call cost _data->callMax()->maxCost(partLineCall); _data->updateMaxCallCount(partLineCall->callCount()); } #endif currentCalledFile = 0; currentCalledPartFile = 0; currentCalledObject = 0; currentCalledPartObject = 0; currentCallCount = 0; if (!line.isEmpty()) { error(QStringLiteral("Garbage at end of call cost line ('%1')").arg(line)); } } else { // (nextLineType == BoringJump || nextLineType == CondJump) TraceFunctionSource* targetSource; if (!currentJumpToFunction) currentJumpToFunction = currentFunction; targetSource = (currentJumpToFile) ? currentJumpToFunction->sourceFile(currentJumpToFile, true) : currentFunctionSource; #if USE_FIXCOST new (pool) FixJump(_part, pool, /* source */ hasLineInfo ? currentPos.fromLine : 0, hasAddrInfo ? currentPos.fromAddr : 0, currentPartFunction, currentFunctionSource, /* target */ hasLineInfo ? targetPos.fromLine : 0, hasAddrInfo ? targetPos.fromAddr : Addr(0), currentJumpToFunction, targetSource, (nextLineType == CondJump), jumpsExecuted, jumpsFollowed); #else if (hasAddrInfo) { TraceInstr* jumpToInstr; TraceInstrJump* instrJump; TracePartInstrJump* partInstrJump; jumpToInstr = currentJumpToFunction->instr(targetPos.fromAddr, true); instrJump = currentInstr->instrJump(jumpToInstr, (nextLineType == CondJump)); partInstrJump = instrJump->partInstrJump(_part); partInstrJump->addExecutedCount(jumpsExecuted); if (nextLineType == CondJump) partInstrJump->addFollowedCount(jumpsFollowed); } if (hasLineInfo) { TraceLine* jumpToLine; TraceLineJump* lineJump; TracePartLineJump* partLineJump; jumpToLine = targetSource->line(targetPos.fromLine, true); lineJump = currentLine->lineJump(jumpToLine, (nextLineType == CondJump)); partLineJump = lineJump->partLineJump(_part); partLineJump->addExecutedCount(jumpsExecuted); if (nextLineType == CondJump) partLineJump->addFollowedCount(jumpsFollowed); } #endif if (0) { qDebug() << _filename << ":" << _lineNo << " - jump from 0x" << currentPos.fromAddr.toString() << " (line " << currentPos.fromLine << ") to 0x" << targetPos.fromAddr.toString() << " (line " << targetPos.fromLine << ")"; if (nextLineType == BoringJump) qDebug() << " Boring Jump, count " << jumpsExecuted.pretty(); else qDebug() << " Cond. Jump, followed " << jumpsFollowed.pretty() << ", executed " << jumpsExecuted.pretty(); } nextLineType = SelfCost; currentJumpToFunction = 0; currentJumpToFile = 0; if (!line.isEmpty()) { error(QStringLiteral("Garbage at end of jump cost line ('%1')").arg(line)); } } } loadFinished(); if (mapping) { _part->invalidate(); _part->totals()->clear(); _part->totals()->addCost(_part); data->addPart(_part); partsAdded++; } else { delete _part; } device->close(); return partsAdded; }
static uint64_t loadFromFile( tr_torrent * tor, uint64_t fieldsToLoad ) { int64_t i; const char * str; uint64_t fieldsLoaded = 0; char * filename; tr_benc top; tr_bool boolVal; const tr_bool wasDirty = tor->isDirty; assert( tr_isTorrent( tor ) ); filename = getResumeFilename( tor ); if( tr_bencLoadFile( &top, TR_FMT_BENC, filename ) ) { tr_tordbg( tor, "Couldn't read \"%s\"", filename ); tr_free( filename ); return fieldsLoaded; } tr_tordbg( tor, "Read resume file \"%s\"", filename ); if( ( fieldsToLoad & TR_FR_CORRUPT ) && tr_bencDictFindInt( &top, KEY_CORRUPT, &i ) ) { tor->corruptPrev = i; fieldsLoaded |= TR_FR_CORRUPT; } if( ( fieldsToLoad & ( TR_FR_PROGRESS | TR_FR_DOWNLOAD_DIR ) ) && ( tr_bencDictFindStr( &top, KEY_DOWNLOAD_DIR, &str ) ) && ( str && *str ) ) { tr_free( tor->downloadDir ); tor->downloadDir = tr_strdup( str ); fieldsLoaded |= TR_FR_DOWNLOAD_DIR; } if( ( fieldsToLoad & ( TR_FR_PROGRESS | TR_FR_INCOMPLETE_DIR ) ) && ( tr_bencDictFindStr( &top, KEY_INCOMPLETE_DIR, &str ) ) && ( str && *str ) ) { tr_free( tor->incompleteDir ); tor->incompleteDir = tr_strdup( str ); fieldsLoaded |= TR_FR_INCOMPLETE_DIR; } if( ( fieldsToLoad & TR_FR_DOWNLOADED ) && tr_bencDictFindInt( &top, KEY_DOWNLOADED, &i ) ) { tor->downloadedPrev = i; fieldsLoaded |= TR_FR_DOWNLOADED; } if( ( fieldsToLoad & TR_FR_UPLOADED ) && tr_bencDictFindInt( &top, KEY_UPLOADED, &i ) ) { tor->uploadedPrev = i; fieldsLoaded |= TR_FR_UPLOADED; } if( ( fieldsToLoad & TR_FR_MAX_PEERS ) && tr_bencDictFindInt( &top, KEY_MAX_PEERS, &i ) ) { tor->maxConnectedPeers = i; fieldsLoaded |= TR_FR_MAX_PEERS; } if( ( fieldsToLoad & TR_FR_RUN ) && tr_bencDictFindBool( &top, KEY_PAUSED, &boolVal ) ) { tor->isRunning = !boolVal; fieldsLoaded |= TR_FR_RUN; } if( ( fieldsToLoad & TR_FR_ADDED_DATE ) && tr_bencDictFindInt( &top, KEY_ADDED_DATE, &i ) ) { tor->addedDate = i; fieldsLoaded |= TR_FR_ADDED_DATE; } if( ( fieldsToLoad & TR_FR_DONE_DATE ) && tr_bencDictFindInt( &top, KEY_DONE_DATE, &i ) ) { tor->doneDate = i; fieldsLoaded |= TR_FR_DONE_DATE; } if( ( fieldsToLoad & TR_FR_ACTIVITY_DATE ) && tr_bencDictFindInt( &top, KEY_ACTIVITY_DATE, &i ) ) { tr_torrentSetActivityDate( tor, i ); fieldsLoaded |= TR_FR_ACTIVITY_DATE; } if( ( fieldsToLoad & TR_FR_TIME_SEEDING ) && tr_bencDictFindInt( &top, KEY_TIME_SEEDING, &i ) ) { tor->secondsSeeding = i; fieldsLoaded |= TR_FR_TIME_SEEDING; } if( ( fieldsToLoad & TR_FR_TIME_DOWNLOADING ) && tr_bencDictFindInt( &top, KEY_TIME_DOWNLOADING, &i ) ) { tor->secondsDownloading = i; fieldsLoaded |= TR_FR_TIME_DOWNLOADING; } if( ( fieldsToLoad & TR_FR_BANDWIDTH_PRIORITY ) && tr_bencDictFindInt( &top, KEY_BANDWIDTH_PRIORITY, &i ) && tr_isPriority( i ) ) { tr_torrentSetPriority( tor, i ); fieldsLoaded |= TR_FR_BANDWIDTH_PRIORITY; } if( fieldsToLoad & TR_FR_PEERS ) fieldsLoaded |= loadPeers( &top, tor ); if( fieldsToLoad & TR_FR_FILE_PRIORITIES ) fieldsLoaded |= loadFilePriorities( &top, tor ); if( fieldsToLoad & TR_FR_PROGRESS ) fieldsLoaded |= loadProgress( &top, tor ); if( fieldsToLoad & TR_FR_DND ) fieldsLoaded |= loadDND( &top, tor ); if( fieldsToLoad & TR_FR_SPEEDLIMIT ) fieldsLoaded |= loadSpeedLimits( &top, tor ); if( fieldsToLoad & TR_FR_RATIOLIMIT ) fieldsLoaded |= loadRatioLimits( &top, tor ); if( fieldsToLoad & TR_FR_IDLELIMIT ) fieldsLoaded |= loadIdleLimits( &top, tor ); /* loading the resume file triggers of a lot of changes, * but none of them needs to trigger a re-saving of the * same resume information... */ tor->isDirty = wasDirty; tr_bencFree( &top ); tr_free( filename ); return fieldsLoaded; }
bool SpriteState::load(const QString& filename, SpriteStateError* err) { setError(err, ErrNone); if (filename.isEmpty()) { setError(err, ErrNullFilename); return false; } QFile file(filename); if (!file.exists()) { setError(err, ErrFileDoesNotExist); return false; } if (!file.open(QFile::ReadOnly | QFile::Text)) { qDebug() << "Error: SpriteState::load(): could not open" << filename << "in ro mode"; setError(err, ErrCantOpenReadMode); return false; } clear(); enum { StCheckVersion = 0, StNoToken = 1, StTokenImages = 2, StTokenFrames = 3, StTokenAnimations = 4, StTokenAframes = 5, StTokenHeader = 6, StError = 999, } state = StCheckVersion; QTextStream stream(&file); QString line; QStringList tokens; int lineNumber = -1; InputImage tmpImage; LvkFrame tmpFrame; LvkAnimation tmpAni; LvkAframe tmpAframe; Id currentAniId = NullId; do { line = stream.readLine().trimmed(); ++lineNumber; if (line.isNull()) { break; /* end of stream */ } if (line.isEmpty() && state != StTokenHeader) { continue; } if (line.startsWith('#') && state != StTokenHeader) { continue; } if (state == StCheckVersion) { if (line == HEADER_VER_01 || line == HEADER_VER_02 || line == HEADER_VER_03 || line == HEADER_VER_04) { state = StNoToken; continue; } else { qDebug() << "Error: SpriteState::load(): Invalid LvkSprite file format" << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; break; } } switch (state) { case StNoToken: if (line == "images(") { state = StTokenImages; } else if (line == "frames(") { state = StTokenFrames; } else if (line == "animations(") { state = StTokenAnimations; } else if (line == "custom_header(") { state = StTokenHeader; } else if (line == "aframes(") { qDebug() << "Error: SpriteState::load(): Unspected token" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } else { qDebug() << "Error: SpriteState::load(): Unknown token" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } break; case StTokenImages: if (line == ")") { state = StNoToken; } else { if (tmpImage.fromString(line)) { emit(loadProgress(tr("Image ") + tmpImage.filename)); addImage(tmpImage); } else { qDebug() << "Error: SpriteState::load(): invalid image entry" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } } break; case StTokenFrames: if (line == ")") { state = StNoToken; } else { if (tmpFrame.fromString(line)) { addFrame(tmpFrame); emit(loadProgress(tr("Frame ") + tmpImage.filename)); } else { qDebug() << "Error: SpriteState::load(): invalid frame entry" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } } break; case StTokenAnimations: if (line == ")") { currentAniId = NullId; state = StNoToken; } else if (line == "aframes(") { if (currentAniId != NullId) { state = StTokenAframes; } else { qDebug() << "Error: SpriteState::load(): null animation id" << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } } else { if (tmpAni.fromString(line)) { currentAniId = tmpAni.id; addAnimation(tmpAni); emit(loadProgress(tr("Animation ") + tmpAni.name)); } else { qDebug() << "Error: SpriteState::load(): invalid animation entry" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } } break; case StTokenAframes: if (line == ")") { state = StTokenAnimations; } else { if (tmpAframe.fromString(line)) { addAframe(tmpAframe, currentAniId); } else { qDebug() << "Error: SpriteState::load(): invalid aframe entry" << line << "at line" << lineNumber; setError(err, ErrInvalidFormat); state = StError; } } break; case StTokenHeader: if (line == ")") { state = StNoToken; } else { _customHeader.append(line).append("\n"); } break; default: qDebug() << "Warning: SpriteState::load(): Unhandled state " << (int)state << "at line" << lineNumber; break; } } while (true); file.close(); return (state != StError); }