int Pdfsync::SourceToDoc(const WCHAR* srcfilename, UINT line, UINT col, UINT* page, Vec<RectI>& rects) { if (IsIndexDiscarded()) if (RebuildIndex() != PDFSYNCERR_SUCCESS) return PDFSYNCERR_SYNCFILE_CANNOT_BE_OPENED; Vec<size_t> found_records; UINT ret = SourceToRecord(srcfilename, line, col, found_records); if (ret != PDFSYNCERR_SUCCESS || found_records.size() == 0) return ret; rects.Reset(); // records have been found for the desired source position: // we now find the page and positions in the PDF corresponding to these found records UINT firstPage = UINT_MAX; for (size_t i = 0; i < points.size(); i++) { if (!found_records.Contains(points.at(i).record)) continue; if (firstPage != UINT_MAX && firstPage != points.at(i).page) continue; firstPage = *page = points.at(i).page; RectD rc(SYNC_TO_PDF_COORDINATE(points.at(i).x), SYNC_TO_PDF_COORDINATE(points.at(i).y), MARK_SIZE, MARK_SIZE); // PdfSync coordinates are y-inversed RectD mbox = engine->PageMediabox(firstPage); rc.y = mbox.dy - (rc.y + rc.dy); rects.Push(rc.Round()); } if (rects.size() > 0) return PDFSYNCERR_SUCCESS; // the record does not correspond to any point in the PDF: this is possible... return PDFSYNCERR_NOSYNCPOINT_FOR_LINERECORD; }
int Pdfsync::DocToSource(UINT pageNo, PointI pt, AutoFreeW& filename, UINT* line, UINT* col) { if (IsIndexDiscarded()) if (RebuildIndex() != PDFSYNCERR_SUCCESS) return PDFSYNCERR_SYNCFILE_CANNOT_BE_OPENED; // find the entry in the index corresponding to this page if (pageNo <= 0 || pageNo >= sheetIndex.size() || pageNo > (UINT)engine->PageCount()) return PDFSYNCERR_INVALID_PAGE_NUMBER; // PdfSync coordinates are y-inversed RectI mbox = engine->PageMediabox(pageNo).Round(); pt.y = mbox.dy - pt.y; // distance to the closest pdf location (in the range <PDFSYNC_EPSILON_SQUARE) UINT closest_xydist = UINT_MAX; UINT selected_record = UINT_MAX; // If no record is found within a distance^2 of PDFSYNC_EPSILON_SQUARE // (selected_record == -1) then we pick up the record that is closest // vertically to the hit-point. UINT closest_ydist = UINT_MAX; // vertical distance between the hit point and the vertically-closest record UINT closest_xdist = UINT_MAX; // horizontal distance between the hit point and the vertically-closest record UINT closest_ydist_record = UINT_MAX; // vertically-closest record // read all the sections of 'p' declarations for this pdf sheet for (size_t i = sheetIndex.at(pageNo); i < points.size() && points.at(i).page == pageNo; i++) { // check whether it is closer than the closest point found so far UINT dx = abs(pt.x - (int)SYNC_TO_PDF_COORDINATE(points.at(i).x)); UINT dy = abs(pt.y - (int)SYNC_TO_PDF_COORDINATE(points.at(i).y)); UINT dist = dx * dx + dy * dy; if (dist < PDFSYNC_EPSILON_SQUARE && dist < closest_xydist) { selected_record = points.at(i).record; closest_xydist = dist; } else if ((closest_xydist == UINT_MAX) && dy < PDFSYNC_EPSILON_Y && (dy < closest_ydist || (dy == closest_ydist && dx < closest_xdist))) { closest_ydist_record = points.at(i).record; closest_ydist = dy; closest_xdist = dx; } } if (selected_record == UINT_MAX) selected_record = closest_ydist_record; if (selected_record == UINT_MAX) return PDFSYNCERR_NO_SYNC_AT_LOCATION; // no record was found close enough to the hit point // We have a record number, we need to find its declaration ('l ...') in the syncfile PdfsyncLine cmp; cmp.record = selected_record; PdfsyncLine* found = (PdfsyncLine*)bsearch(&cmp, lines.LendData(), lines.size(), sizeof(PdfsyncLine), cmpLineRecords); AssertCrash(found); if (!found) return PDFSYNCERR_NO_SYNC_AT_LOCATION; filename.SetCopy(srcfiles.at(found->file)); *line = found->line; *col = found->column; return PDFSYNCERR_SUCCESS; }
static bool IsInRange(Vec<PageRange>& ranges, int pageNo) { for (size_t i = 0; i < ranges.size(); i++) { if (ranges.at(i).start <= pageNo && pageNo <= ranges.at(i).end) return true; } return false; }
static void BenchFile(const WCHAR* filePath, const WCHAR* pagesSpec) { if (!file::Exists(filePath)) { return; } // ad-hoc: if enabled times layout instead of rendering and does layout // using all text rendering methods, so that we can compare and find // docs that take a long time to load if (Doc::IsSupportedFile(filePath) && !gGlobalPrefs->ebookUI.useFixedPageUI) { BenchEbookLayout(filePath); return; } if (ChmModel::IsSupportedFile(filePath) && !gGlobalPrefs->chmUI.useFixedPageUI) { BenchChmLoadOnly(filePath); return; } Timer total; logbench(L"Starting: %s", filePath); Timer t; BaseEngine* engine = EngineManager::CreateEngine(filePath); if (!engine) { logbench(L"Error: failed to load %s", filePath); return; } double timeMs = t.Stop(); logbench(L"load: %.2f ms", timeMs); int pages = engine->PageCount(); logbench(L"page count: %d", pages); if (nullptr == pagesSpec) { for (int i = 1; i <= pages; i++) { BenchLoadRender(engine, i); } } AssertCrash(!pagesSpec || IsBenchPagesInfo(pagesSpec)); Vec<PageRange> ranges; if (ParsePageRanges(pagesSpec, ranges)) { for (size_t i = 0; i < ranges.size(); i++) { for (int j = ranges.at(i).start; j <= ranges.at(i).end; j++) { if (1 <= j && j <= pages) BenchLoadRender(engine, j); } } } delete engine; total.Stop(); logbench(L"Finished (in %.2f ms): %s", total.GetTimeInMs(), filePath); }
// Select random files to test. We want to test each file type equally, so // we first group them by file extension and then select up to maxPerType // for each extension, randomly, and inter-leave the files with different // extensions, so their testing is evenly distributed. // Returns result in <files>. static void RandomizeFiles(WStrVec& files, int maxPerType) { WStrVec fileExts; Vec<WStrVec*> filesPerType; for (size_t i = 0; i < files.size(); i++) { const WCHAR* file = files.at(i); const WCHAR* ext = path::GetExt(file); CrashAlwaysIf(!ext); int typeNo = fileExts.FindI(ext); if (-1 == typeNo) { fileExts.Append(str::Dup(ext)); filesPerType.Append(new WStrVec()); typeNo = (int)filesPerType.size() - 1; } filesPerType.at(typeNo)->Append(str::Dup(file)); } for (size_t j = 0; j < filesPerType.size(); j++) { WStrVec* all = filesPerType.at(j); WStrVec* random = new WStrVec(); for (int n = 0; n < maxPerType && all->size() > 0; n++) { int idx = rand() % all->size(); WCHAR* file = all->at(idx); random->Append(file); all->RemoveAtFast(idx); } filesPerType.at(j) = random; delete all; } files.Reset(); bool gotAll = false; while (!gotAll) { gotAll = true; for (size_t j = 0; j < filesPerType.size(); j++) { WStrVec* random = filesPerType.at(j); if (random->size() > 0) { gotAll = false; WCHAR* file = random->at(0); files.Append(file); random->RemoveAtFast(0); } } } for (size_t j = 0; j < filesPerType.size(); j++) { delete filesPerType.at(j); } }
// add offset to coordinate in dimension d of index i long addCoord(long i, long d, long offset) const { assert(i >= 0 && i < size); offset = offset % dims.at(d); if (offset < 0) offset += dims.at(d); long i_d = getCoord(i, d); long i_d1 = (i_d + offset) % dims.at(d); long i1 = i + (i_d1 - i_d) * prods.at(d+1); return i1; }
void applyPermToVec(Vec<T>& out, const Vec<T>& in, const Permut& p1) { assert(&out != &in); // NOT an in-place procedure out.SetLength(p1.length()); for (long i=0; i<p1.length(); i++) out[i] = in.at(p1[i]); }
// removes file history entries which shouldn't be saved anymore // (see the loop below for the details) void FileHistory::Purge(bool alwaysUseDefaultState) { // minOpenCount is set to the number of times a file must have been // opened to be kept (provided that there is no other valuable // information about the file to be remembered) int minOpenCount = 0; if (alwaysUseDefaultState) { Vec<DisplayState*> frequencyList; GetFrequencyOrder(frequencyList); if (frequencyList.size() > FILE_HISTORY_MAX_RECENT) minOpenCount = frequencyList.at(FILE_HISTORY_MAX_FREQUENT)->openCount / 2; } for (size_t j = states->size(); j > 0; j--) { DisplayState* state = states->at(j - 1); // never forget pinned documents, documents we've remembered a password for and // documents for which there are favorites if (state->isPinned || state->decryptionKey != nullptr || state->favorites->size() > 0) continue; // forget about missing documents without valuable state if (state->isMissing && (alwaysUseDefaultState || state->useDefaultState)) states->RemoveAt(j - 1); // forget about files last opened longer ago than the last FILE_HISTORY_MAX_FILES ones else if (j > FILE_HISTORY_MAX_FILES) states->RemoveAt(j - 1); // forget about files that were hardly used (and without valuable state) else if (alwaysUseDefaultState && state->openCount < minOpenCount && j > FILE_HISTORY_MAX_RECENT) states->RemoveAt(j - 1); else continue; DeleteDisplayState(state); } }
// Find a record corresponding to the given source file, line number and optionally column number. // (at the moment the column parameter is ignored) // // If there are several *consecutively declared* records for the same line then they are all returned. // The list of records is added to the vector 'records' // // If there is no record for that line, the record corresponding to the nearest line is selected // (within a range of EPSILON_LINE) // // The function returns PDFSYNCERR_SUCCESS if a matching record was found. UINT Pdfsync::SourceToRecord(const WCHAR* srcfilename, UINT line, UINT col, Vec<size_t>& records) { UNUSED(col); if (!srcfilename) return PDFSYNCERR_INVALID_ARGUMENT; AutoFreeW srcfilepath; // convert the source file to an absolute path if (PathIsRelative(srcfilename)) srcfilepath.Set(PrependDir(srcfilename)); else srcfilepath.SetCopy(srcfilename); if (!srcfilepath) return PDFSYNCERR_OUTOFMEMORY; // find the source file entry size_t isrc; for (isrc = 0; isrc < srcfiles.size(); isrc++) if (path::IsSame(srcfilepath, srcfiles.at(isrc))) break; if (isrc == srcfiles.size()) return PDFSYNCERR_UNKNOWN_SOURCEFILE; if (fileIndex.at(isrc).start == fileIndex.at(isrc).end) return PDFSYNCERR_NORECORD_IN_SOURCEFILE; // there is not any record declaration for that particular source file // look for sections belonging to the specified file // starting with the first section that is declared within the scope of the file. UINT min_distance = EPSILON_LINE; // distance to the closest record size_t lineIx = (size_t)-1; // closest record-line index for (size_t isec = fileIndex.at(isrc).start; isec < fileIndex.at(isrc).end; isec++) { // does this section belong to the desired file? if (lines.at(isec).file != isrc) continue; UINT d = abs((int)lines.at(isec).line - (int)line); if (d < min_distance) { min_distance = d; lineIx = isec; if (0 == d) break; // We have found a record for the requested line! } } if (lineIx == (size_t)-1) return PDFSYNCERR_NORECORD_FOR_THATLINE; // we read all the consecutive records until we reach a record belonging to another line for (size_t i = lineIx; i < lines.size() && lines.at(i).line == lines.at(lineIx).line; i++) records.Push(lines.at(i).record); return PDFSYNCERR_SUCCESS; }
// For easy access, we try to show favorites in the menu, similar to a list of // recently opened files. // The first menu items are for currently opened file (up to MAX_FAV_MENUS), based // on the assumption that user is usually interested in navigating current file. // Then we have a submenu for each file for which there are bookmarks (up to // MAX_FAV_SUBMENUS), each having up to MAX_FAV_MENUS menu items. // If not all favorites can be shown, we also enable "Show all favorites" menu which // will provide a way to see all favorites. // Note: not sure if that's the best layout. Maybe we should always use submenu and // put the submenu for current file as the first one (potentially named as "Current file" // or some such, to make it stand out from other submenus) static void AppendFavMenus(HMENU m, const WCHAR* currFilePath) { // To minimize mouse movement when navigating current file via favorites // menu, put favorites for current file first DisplayState* currFileFav = nullptr; if (currFilePath) { currFileFav = gFavorites.GetFavByFilePath(currFilePath); } // sort the files with favorites by base file name of file path Vec<const WCHAR*> filePathsSorted; if (HasPermission(Perm_DiskAccess)) { // only show favorites for other files, if we're allowed to open them GetSortedFilePaths(filePathsSorted, currFileFav); } if (currFileFav && currFileFav->favorites->size() > 0) { filePathsSorted.InsertAt(0, currFileFav->filePath); } if (filePathsSorted.size() == 0) { return; } AppendMenu(m, MF_SEPARATOR, 0, nullptr); gFavorites.ResetMenuIds(); UINT menuId = IDM_FAV_FIRST; size_t menusCount = filePathsSorted.size(); if (menusCount > MAX_FAV_MENUS) { menusCount = MAX_FAV_MENUS; } for (size_t i = 0; i < menusCount; i++) { const WCHAR* filePath = filePathsSorted.at(i); DisplayState* f = gFavorites.GetFavByFilePath(filePath); CrashIf(!f); HMENU sub = m; bool combined = (f->favorites->size() == 1); if (!combined) { sub = CreateMenu(); } AppendFavMenuItems(sub, f, menuId, combined, f == currFileFav); if (!combined) { if (f == currFileFav) { AppendMenu(m, MF_POPUP | MF_STRING, (UINT_PTR)sub, _TR("Current file")); } else { AutoFreeW tmp; tmp.SetCopy(path::GetBaseName(filePath)); auto fileName = win::menu::ToSafeString(tmp); AppendMenuW(m, MF_POPUP | MF_STRING, (UINT_PTR)sub, fileName); } } } }
// don't emit multiple spaces and don't emit spaces // at the beginning of the line static bool CanEmitElasticSpace(float currX, float NewLineX, float maxCurrX, Vec<DrawInstr>& currLineInstr) { if (NewLineX == currX || 0 == currLineInstr.size()) return false; // prevent elastic spaces from being flushed to the // beginning of the next line if (currX > maxCurrX) return false; DrawInstr& di = currLineInstr.Last(); // don't add a space if only an anchor would be in between them if (InstrAnchor == di.type && currLineInstr.size() > 1) di = currLineInstr.at(currLineInstr.size() - 2); return (InstrElasticSpace != di.type) && (InstrFixedSpace != di.type); }
// For each position in the data vector, compute how many slots it should be // shifted inside its small permutation. // Return value is zero if all the shift amounts are zero, nonzero otherwise. long ColPerm::getShiftAmounts(Vec<long>& out) const { long sz = getSize(); out.SetLength(sz); long nonZero = 0; for (long k = 0; k < sz; k++) { long i = getCoord(k, dim); long pi_i = at(k); if (i != pi_i) nonZero = 1; out.at(addCoord(k, dim, pi_i-i)) = i - pi_i; } return nonZero; }
static VerticalLayout* VerticalLayoutFromDef(ParsedMui& parsed, TxtNode* structDef) { CrashIf(!structDef->IsStructWithName("VerticalLayout")); VerticalLayoutDef* def = DeserializeVerticalLayoutDef(structDef); VerticalLayout* l = new VerticalLayout(); l->SetName(def->name); Vec<DirectionalLayoutDataDef*>* children = def->children; DirectionalLayoutData ld; for (size_t i = 0; children && i < children->size(); i++) { SetDirectionalLayouData(ld, parsed, children->at(i)); l->Add(ld); } FreeVerticalLayoutDef(def); return l; }
static bool HasPreviousLineSingleImage(Vec<DrawInstr>& instrs) { REAL imageY = -1; for (size_t idx = instrs.size(); idx > 0; idx--) { DrawInstr& i = instrs.at(idx - 1); if (!IsVisibleDrawInstr(i)) continue; if (-1 != imageY) { // if another visible item precedes the image, // it must be completely above it (previous line) return i.bbox.Y + i.bbox.Height <= imageY; } if (InstrImage != i.type) return false; imageY = i.bbox.Y; } return imageY != -1; }
RectF MeasureTextQuick(Graphics* g, Font* f, const WCHAR* s, int len) { CrashIf(0 >= len); static Vec<Font*> fontCache; static Vec<bool> fixCache; RectF bbox; g->MeasureString(s, len, f, PointF(0, 0), &bbox); int idx = fontCache.Find(f); if (-1 == idx) { LOGFONTW lfw; Status ok = f->GetLogFontW(g, &lfw); bool isItalicOrMonospace = Ok != ok || lfw.lfItalic || str::Eq(lfw.lfFaceName, L"Courier New") || str::Find(lfw.lfFaceName, L"Consol") || str::EndsWith(lfw.lfFaceName, L"Mono") || str::EndsWith(lfw.lfFaceName, L"Typewriter"); fontCache.Append(f); fixCache.Append(isItalicOrMonospace); idx = (int)fontCache.size() - 1; } // most documents look good enough with these adjustments if (!fixCache.at(idx)) { REAL correct = 0; for (int i = 0; i < len; i++) { switch (s[i]) { case 'i': case 'l': correct += 0.2f; break; case 't': case 'f': case 'I': correct += 0.1f; break; case '.': case ',': case '!': correct += 0.1f; break; } } bbox.Width *= (1.0f - correct / len) * 0.99f; } bbox.Height *= 0.95f; return bbox; }
void PopulateFavTreeIfNeeded(WindowInfo* win) { HWND hwndTree = win->hwndFavTree; if (TreeView_GetCount(hwndTree) > 0) { return; } Vec<const WCHAR*> filePathsSorted; GetSortedFilePaths(filePathsSorted); SendMessage(hwndTree, WM_SETREDRAW, FALSE, 0); for (size_t i = 0; i < filePathsSorted.size(); i++) { DisplayState* f = gFavorites.GetFavByFilePath(filePathsSorted.at(i)); bool isExpanded = win->expandedFavorites.Contains(f); HTREEITEM node = InsertFavTopLevelNode(hwndTree, f, isExpanded); if (f->favorites->size() > 1) { InsertFavSecondLevelNodes(hwndTree, node, f); } } SendMessage(hwndTree, WM_SETREDRAW, TRUE, 0); UINT fl = RDW_ERASE | RDW_FRAME | RDW_INVALIDATE | RDW_ALLCHILDREN; RedrawWindow(hwndTree, nullptr, nullptr, fl); }
void PaintTransparentRectangles(HDC hdc, RectI screenRc, Vec<RectI>& rects, COLORREF selectionColor, BYTE alpha, int margin) { using namespace Gdiplus; // create path from rectangles GraphicsPath path(FillModeWinding); screenRc.Inflate(margin, margin); for (size_t i = 0; i < rects.size(); i++) { RectI rc = rects.at(i).Intersect(screenRc); if (!rc.IsEmpty()) path.AddRectangle(rc.ToGdipRect()); } // fill path (and draw optional outline margin) Graphics gs(hdc); Color c(alpha, GetRValueSafe(selectionColor), GetGValueSafe(selectionColor), GetBValueSafe(selectionColor)); SolidBrush tmpBrush(c); gs.FillPath(&tmpBrush, &path); if (margin) { path.Outline(nullptr, 0.2f); Pen tmpPen(Color(alpha, 0, 0, 0), (REAL)margin); gs.DrawPath(&tmpPen, &path); } }
// removes thumbnails that don't belong to any frequently used item in file history void CleanUpThumbnailCache(const FileHistory& fileHistory) { AutoFreeW thumbsPath(AppGenDataFilename(THUMBNAILS_DIR_NAME)); if (!thumbsPath) return; AutoFreeW pattern(path::Join(thumbsPath, L"*.png")); WStrVec files; WIN32_FIND_DATA fdata; HANDLE hfind = FindFirstFile(pattern, &fdata); if (INVALID_HANDLE_VALUE == hfind) return; do { if (!(fdata.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) files.Append(str::Dup(fdata.cFileName)); } while (FindNextFile(hfind, &fdata)); FindClose(hfind); Vec<DisplayState*> list; fileHistory.GetFrequencyOrder(list); for (size_t i = 0; i < list.size() && i < FILE_HISTORY_MAX_FREQUENT * 2; i++) { AutoFreeW bmpPath(GetThumbnailPath(list.at(i)->filePath)); if (!bmpPath) continue; int idx = files.Find(path::GetBaseName(bmpPath)); if (idx != -1) { CrashIf(idx < 0 || files.size() <= (size_t)idx); free(files.PopAt(idx)); } } for (size_t i = 0; i < files.size(); i++) { AutoFreeW bmpPath(path::Join(thumbsPath, files.at(i))); file::Delete(bmpPath); } }
UINT RenderCache::Paint(HDC hdc, RectI bounds, DisplayModel* dm, int pageNo, PageInfo* pageInfo, bool* renderOutOfDateCue) { AssertCrash(pageInfo->shown && 0.0 != pageInfo->visibleRatio); if (!dm->ShouldCacheRendering(pageNo)) { int rotation = dm->GetRotation(); float zoom = dm->GetZoomReal(pageNo); bounds = pageInfo->pageOnScreen.Intersect(bounds); RectD area = bounds.Convert<double>(); area.Offset(-pageInfo->pageOnScreen.x, -pageInfo->pageOnScreen.y); area = dm->GetEngine()->Transform(area, pageNo, zoom, rotation, true); RenderedBitmap* bmp = dm->GetEngine()->RenderBitmap(pageNo, zoom, rotation, &area); bool success = bmp && bmp->GetBitmap() && bmp->StretchDIBits(hdc, bounds); delete bmp; return success ? 0 : RENDER_DELAY_FAILED; } int rotation = dm->GetRotation(); float zoom = dm->GetZoomReal(); USHORT targetRes = GetTileRes(dm, pageNo); USHORT maxRes = GetMaxTileRes(dm, pageNo, rotation); if (maxRes < targetRes) maxRes = targetRes; Vec<TilePosition> queue; queue.Append(TilePosition(0, 0, 0)); UINT renderDelayMin = RENDER_DELAY_UNDEFINED; bool neededScaling = false; while (queue.size() > 0) { TilePosition tile = queue.PopAt(0); RectI tileOnScreen = GetTileOnScreen(dm->GetEngine(), pageNo, rotation, zoom, tile, pageInfo->pageOnScreen); if (tileOnScreen.IsEmpty()) { // display an error message when only empty tiles should be drawn (i.e. on page loading errors) renderDelayMin = std::min(RENDER_DELAY_FAILED, renderDelayMin); continue; } tileOnScreen = pageInfo->pageOnScreen.Intersect(tileOnScreen); RectI isect = bounds.Intersect(tileOnScreen); if (isect.IsEmpty()) continue; bool isTargetRes = tile.res == targetRes; UINT renderDelay = PaintTile(hdc, isect, dm, pageNo, tile, tileOnScreen, isTargetRes, renderOutOfDateCue, isTargetRes ? &neededScaling : nullptr); if (!(isTargetRes && 0 == renderDelay) && tile.res < maxRes) { queue.Append(TilePosition(tile.res + 1, tile.row * 2, tile.col * 2)); queue.Append(TilePosition(tile.res + 1, tile.row * 2, tile.col * 2 + 1)); queue.Append(TilePosition(tile.res + 1, tile.row * 2 + 1, tile.col * 2)); queue.Append(TilePosition(tile.res + 1, tile.row * 2 + 1, tile.col * 2 + 1)); } if (isTargetRes && renderDelay > 0) neededScaling = true; renderDelayMin = std::min(renderDelay, renderDelayMin); // paint tiles from left to right from top to bottom if (tile.res > 0 && queue.size() > 0 && tile.res < queue.at(0).res) queue.Sort(cmpTilePosition); } #ifdef CONSERVE_MEMORY if (!neededScaling) { if (renderOutOfDateCue) *renderOutOfDateCue = false; // free tiles with different resolution TilePosition tile(targetRes, (USHORT)-1, 0); FreePage(dm, pageNo, &tile); } FreeNotVisible(); #endif return renderDelayMin; }
// - Note // index접근을 지원하는 operator[] overload const time_type& operator[](int idx) const noexcept(false) { return times.at(idx); }
// reference to element at position i, with bounds check T& at(long i) { return data.at(i); }
////////////////////////////////////////////////// // start the scan parsing of a mzXML file void FTPeakDetectController::startScanParsing(Vec datavec) { // set the titel of the current LC_MS run: string name = "tmplcms"; // create a new LC/MS: lcms_ = new LCMS(name); lcms_->set_spectrum_ID((int) this->lcmsRuns_.size()); ProcessData * dataProcessor = new ProcessData(); unsigned int i; for (i = 0; i < datavec.size(); i++) { Map it = datavec.at(i); dataProcessor->setMaxScanDistance(0); if ((it.first >= SuperHirnParameters::instance()->getMinTR()) && (it.first <= SuperHirnParameters::instance()->getMaxTR())) { SuperHirnParameters::instance()->getScanTRIndex()->insert(std::pair<int, float>(i, (float) it.first)); // centroid it: CentroidData cd(SuperHirnParameters::instance()->getCentroidWindowWidth(), it.second, it.first, SuperHirnParameters::instance()->centroidDataModus()); // store it: dataProcessor->add_scan_raw_data(i, it.first, &cd); } } ////////////////////////////////////// // post processing of mzXML data of a file: // !!! MS1 LEVEL !!! process_MS1_level_data_structure(dataProcessor); // !!! MS2 LEVEL !!! //process_MS2_level_data_structure( reader ); lcms_->order_by_mass(); if (SuperHirnParameters::instance()->ms1FeatureClustering()) { MS1FeatureMerger * merg = new MS1FeatureMerger(lcms_); merg->startFeatureMerging(); delete merg; } lcms_->show_info(); /* Debug-output, commented out for brutus */ /* string SEP = ""; FILE *file; file = fopen("ffsh-features.txt","w+"); fprintf(file,"%s", "Features\n"); vector<feature>::iterator p = lcms_->get_feature_list_begin(); while(p != lcms_->get_feature_list_end()){ fprintf(file, "MS1 Feature#:%d,%s", (*p).get_feature_ID(),SEP.c_str()); fprintf(file, "m/z:%0.5f%s",(*p).get_MZ(),SEP.c_str()); fprintf(file, "[+%d],%s",(*p).get_charge_state(),SEP.c_str()); fprintf(file, "Area:%0.2f%s",(*p).get_peak_area(),SEP.c_str()); fprintf(file, ",apex:%0.2f[%0.2f:%0.2f][%d:%d:%d],s/n:%0.2f,%0.2f%s",(*p).get_retention_time(),(*p).get_retention_time_START(),(*p).get_retention_time_END(),(*p).get_scan_start(),(*p).get_scan_number(),(*p).get_scan_end(), (*p).getSignalToNoise(), (*p).get_peak_score(),SEP.c_str()); fprintf(file, ",matches:%d%s",(*p).get_replicate_match_nb(),SEP.c_str()); fprintf(file, ",LCMS-ID: %d",(*p).get_spectrum_ID()); fprintf(file,"%s", "\n"); p++; } fclose(file); */ lcmsRuns_.push_back(*lcms_); delete dataProcessor; }
// read-only reference to element at position i, with bounds check const T& at(long i) const { return data.at(i); }
// size of dimension d long getDim(long d) const { return dims.at(d); }
// product of sizes of dimensions d, d+1, ... long getProd(long d) const { return prods.at(d);}
// get coordinate in dimension d of index i long getCoord(long i, long d) const { assert(i >= 0 && i < size); return (i % prods.at(d)) / prods.at(d+1); }
bool StressTest::OpenFile(const WCHAR* fileName) { wprintf(L"%s\n", fileName); fflush(stdout); LoadArgs args(fileName); args.forceReuse = rand() % 3 != 1; WindowInfo* w = LoadDocument(args); if (!w) return false; if (w == win) { // WindowInfo reused if (!win->IsDocLoaded()) return false; } else if (!w->IsDocLoaded()) { // new WindowInfo CloseWindow(w, false); return false; } // transfer ownership of stressTest object to a new window and close the // current one AssertCrash(this == win->stressTest); if (w != win) { if (win->IsDocLoaded()) { // try to provoke a crash in RenderCache cleanup code ClientRect rect(win->hwndFrame); rect.Inflate(rand() % 10, rand() % 10); SendMessage(win->hwndFrame, WM_SIZE, 0, MAKELONG(rect.dx, rect.dy)); if (win->AsFixed()) win->cbHandler->RequestRendering(1); win->RepaintAsync(); } WindowInfo* toClose = win; w->stressTest = win->stressTest; win->stressTest = nullptr; win = w; CloseWindow(toClose, false); } if (!win->IsDocLoaded()) return false; win->ctrl->SetDisplayMode(DM_CONTINUOUS); win->ctrl->SetZoomVirtual(ZOOM_FIT_PAGE, nullptr); win->ctrl->GoToFirstPage(); if (win->tocVisible || gGlobalPrefs->showFavorites) SetSidebarVisibility(win, win->tocVisible, gGlobalPrefs->showFavorites); currPage = pageRanges.at(0).start; win->ctrl->GoToPage(currPage, false); currPageRenderTime.Start(); ++filesCount; pageForSearchStart = (rand() % win->ctrl->PageCount()) + 1; // search immediately in single page documents if (1 == pageForSearchStart) { // use text that is unlikely to be found, so that we search all pages win::SetText(win->hwndFindBox, L"!z_yt"); FindTextOnThread(win, TextSearchDirection::Forward, true); } int secs = SecsSinceSystemTime(stressStartTime); AutoFreeW tm(FormatTime(secs)); AutoFreeW s(str::Format(L"File %d: %s, time: %s", filesCount, fileName, tm)); win->ShowNotification(s, NOS_PERSIST, NG_STRESS_TEST_SUMMARY); return true; }
// see http://itexmac.sourceforge.net/pdfsync.html for the specification int Pdfsync::RebuildIndex() { OwnedData data(file::ReadFile(syncfilepath)); if (!data.data) { return PDFSYNCERR_SYNCFILE_CANNOT_BE_OPENED; } // convert the file data into a list of zero-terminated strings str::TransChars(data.data, "\r\n", "\0\0"); // parse preamble (jobname and version marker) char* line = data.data; char* dataEnd = data.data + data.size; // replace star by spaces (TeX uses stars instead of spaces in filenames) str::TransChars(line, "*/", " \\"); AutoFreeW jobName(str::conv::FromAnsi(line)); jobName.Set(str::Join(jobName, L".tex")); jobName.Set(PrependDir(jobName)); line = Advance0Line(line, dataEnd); UINT versionNumber = 0; if (!line || !str::Parse(line, "version %u", &versionNumber) || versionNumber != 1) { return PDFSYNCERR_SYNCFILE_CANNOT_BE_OPENED; } // reset synchronizer database srcfiles.Reset(); lines.Reset(); points.Reset(); fileIndex.Reset(); sheetIndex.Reset(); Vec<size_t> filestack; UINT page = 1; sheetIndex.Append(0); // add the initial tex file to the source file stack filestack.Push(srcfiles.size()); srcfiles.Append(jobName.StealData()); PdfsyncFileIndex findex = {0}; fileIndex.Append(findex); PdfsyncLine psline; PdfsyncPoint pspoint; // parse data UINT maxPageNo = engine->PageCount(); while ((line = Advance0Line(line, dataEnd)) != nullptr) { if (!line) break; switch (*line) { case 'l': psline.file = filestack.Last(); if (str::Parse(line, "l %u %u %u", &psline.record, &psline.line, &psline.column)) lines.Append(psline); else if (str::Parse(line, "l %u %u", &psline.record, &psline.line)) { psline.column = 0; lines.Append(psline); } // else dbg("Bad 'l' line in the pdfsync file"); break; case 's': if (str::Parse(line, "s %u", &page)) sheetIndex.Append(points.size()); // else dbg("Bad 's' line in the pdfsync file"); // if (0 == page || page > maxPageNo) // dbg("'s' line with invalid page number in the pdfsync file"); break; case 'p': pspoint.page = page; if (0 == page || page > maxPageNo) /* ignore point for invalid page number */; else if (str::Parse(line, "p %u %u %u", &pspoint.record, &pspoint.x, &pspoint.y)) points.Append(pspoint); else if (str::Parse(line, "p* %u %u %u", &pspoint.record, &pspoint.x, &pspoint.y)) points.Append(pspoint); // else dbg("Bad 'p' line in the pdfsync file"); break; case '(': { AutoFreeW filename(str::conv::FromAnsi(line + 1)); // if the filename contains quotes then remove them // TODO: this should never happen!? if (filename[0] == '"' && filename[str::Len(filename) - 1] == '"') filename.Set(str::DupN(filename + 1, str::Len(filename) - 2)); // undecorate the filepath: replace * by space and / by \ (backslash) str::TransChars(filename, L"*/", L" \\"); // if the file name extension is not specified then add the suffix '.tex' if (str::IsEmpty(path::GetExt(filename))) filename.Set(str::Join(filename, L".tex")); // ensure that the path is absolute if (PathIsRelative(filename)) filename.Set(PrependDir(filename)); filestack.Push(srcfiles.size()); srcfiles.Append(filename.StealData()); findex.start = findex.end = lines.size(); fileIndex.Append(findex); } break; case ')': if (filestack.size() > 1) fileIndex.at(filestack.Pop()).end = lines.size(); // else dbg("Unbalanced ')' line in the pdfsync file"); break; default: // dbg("Ignoring invalid pdfsync line starting with '%c'", *line); break; } } fileIndex.at(0).end = lines.size(); AssertCrash(filestack.size() == 1); return Synchronizer::RebuildIndex(); }