void StringTotString(tString& tstrDest, const std::string& strSrc) { TCHAR* pwszStr = NULL; C2T(&pwszStr, strSrc.c_str()); tstrDest = pwszStr; SAFE_ARRYDELETE(pwszStr); }
void StringTotString(tString& tstrDest, LPCSTR lpcSrc) { TCHAR* pwszStr = NULL; C2T(&pwszStr, lpcSrc); tstrDest = pwszStr; SAFE_ARRYDELETE(pwszStr); }
BOOL CPicDowloadThread::Run() { //tString tstrUrl; //tString tstrFriendId; PicInfo* pInfo = (PicInfo*)m_pPara; //TCHAR tszPicPath[MAX_PATH] = {0x0}; GetDownloadUrl(); // 向 界面发送消息将已经有的头像图片贴上去 PostMessage(pInfo->m_hWnd, WM_SETHEADPICTURE, MAKELPARAM(m_downloadType, 0), (LPARAM)this); HINSTANCE hDll = NULL; #ifdef USE_WININET_DOWNLOAD #ifdef _DEBUG hDll = LoadLibrary(_T("HtttpDownload_d.dll")); #else hDll = LoadLibrary(_T("HtttpDownload.dll")); #endif #endif for (map<std::string, tString>::iterator it = pInfo->m_mapIdNotHadPic.begin(); it != pInfo->m_mapIdNotHadPic.end(); it++) { TCHAR tszPicPath[MAX_PATH] = {0x0}; LPTSTR lptId = NULL; C2T(&lptId, it->first.c_str()); GetLinkmanPicPath(tszPicPath, MAX_PATH,it->second.c_str(), lptId); if (URLDownloadPicture(it->second.c_str(), tszPicPath, hDll)) it->second = tszPicPath; else //下载失败 it->second = _T(""); SAFE_ARRYDELETE(lptId); } #ifdef USE_WININET_DOWNLOAD FreeLibrary(hDll); #endif //下载的图片 PostMessage(pInfo->m_hWnd, WM_SETHEADPICTURE, MAKELPARAM(m_downloadType, 1), (LPARAM)this); return FALSE; }
void BuildIndex(const vector<string>& chrom_files, const int& indicator, const string& output_file, uint32_t& size_of_index) { switch (indicator) { case 0: fprintf(stderr, "[BIULD INDEX FOR FORWARD STRAND (C->T)]\n"); break; case 1: fprintf(stderr, "[BIULD INDEX FOR REVERSE STRAND (C->T)]\n"); break; case 2: fprintf(stderr, "[BIULD INDEX FOR FORWARD STRAND (G->A)]\n"); break; case 3: fprintf(stderr, "[BIULD INDEX FOR REVERSE STRAND (G->A)]\n"); } Genome genome; HashTable hash_table; ReadGenome(chrom_files, genome); if (indicator % 2) { ReverseComplementGenome(genome); } if (indicator == 0 || indicator == 1) { C2T(genome.sequence); } else { G2A(genome.sequence); } set<uint32_t> extremal_large_bucket; CountBucketSize(genome, hash_table, extremal_large_bucket); HashToBucket(genome, hash_table, extremal_large_bucket); SortHashTableBucket(genome, hash_table); WriteIndex(output_file, genome, hash_table); size_of_index = hash_table.index_size > size_of_index ? hash_table.index_size : size_of_index; }
term hlist(register term H, register term regs, stack wam) { no i; cell xval; bp_long ival; byte stamp; #if TRACE>0 fprintf(STD_err,"entering hlist, wam=%d, bboard=%d H=%d\n", wam,g.shared[BBoardStk].base,H); bbcheck(wam); #endif if(!INTEGER(X(1))) return NULL; /* first arg: stamp */ stamp=(byte)(OUTPUT_INT(X(1))); xval=X(2); /* second arg: starting arity of listed terms */ if(!INTEGER(xval)) return NULL; ival=OUTPUT_INT(xval); for(i=0; i<HMAX; i++) if(hstamp[i]>=stamp && HUSED()) { term xref=C2T(g.predmark); if(hstamp[i]<=RUNTIME) { /* gets preds of arity < ival `represented' as g.predmark*/ if(g.predmark!=htable[i].pred || GETARITY(htable[i].fun)<(no)ival) continue; xval=g.predmark; } else { /* gets RUNTIME data of arity > ival */ cell v=htable[i].val; if(NULL==(term)v) continue; if(VAR(v) && !( ONSTACK(g.shared[BBoardStk],v) || ONSTACK(g.shared[InstrStk],v) /*|| ON(HeapStk,v) */ )) { #if TRACE>0 fprintf(STD_err, "unexpected data in htable[%d]=>\n<%s,%s>->%s\n",i, smartref(htable[i].pred,wam), smartref(htable[i].fun,wam), smartref(v,wam)); #endif /* continue; */ } FDEREF(v); if((INTEGER(xval) && ival>0) || VAR(xval) || (GETARITY(xval) < (no)ival) || xval==g.empty ) continue; if(COMPOUND(xval)) xval=T2C(xref); } IF_OVER("COPY_KEYS",(term *)H,HeapStk,bp_halt(9)); SAVE_FUN(htable[i].pred); SAVE_FUN(htable[i].fun); #if 0 ASSERT2(( ATOMIC(xval) || ONSTACK(g.shared[BBoardStk],xval) || ON(HeapStk,xval)), /* will fail with multiple engines */ xval); #endif PUSH_LIST(xval); } PUSH_NIL(); return H; }
void PairEndMapping(const string& org_read, const Genome& genome, const HashTable& hash_table, const char& strand, const bool& AG_WILDCARD, const uint32_t& max_mismatches, TopCandidates& top_match) { uint32_t read_len = org_read.size(); if (read_len < MINIMALREADLEN) { return; } /* return the maximal seed length for a particular read length */ uint32_t seed_pattern_repeats = (read_len - SEEPATTERNLEN + 1) / SEEPATTERNLEN; uint32_t seed_len = seed_pattern_repeats * SEEPATTERNCAREDWEIGHT; string read; if (AG_WILDCARD) { G2A(org_read, read_len, read); } else { C2T(org_read, read_len, read); } uint32_t cur_max_mismatches = max_mismatches; for (uint32_t seed_i = 0; seed_i < SEEPATTERNLEN; ++seed_i) { /* all exact matches are covered by the first seed */ if (!top_match.Empty() && top_match.Full() && top_match.Top().mismatch == 0 && seed_i) break; #if defined SEEDPATTERN3 || SEEDPATTERN5 /* all matches with 1 mismatch are covered by the first two seeds */ if (!top_match.Empty() && top_match.Full() && top_match.Top().mismatch == 1 && seed_i >= 2) break; #endif #ifdef SEEDPATTERN7 /* all matches with 1 mismatch are covered by the first two seeds */ if (!top_match.Empty() && top_match.Full() && top_match.Top().mismatch == 1 && seed_i >= 4) break; #endif string read_seed = read.substr(seed_i); uint32_t hash_value = getHashValue(read_seed.c_str()); pair<uint32_t, uint32_t> region; region.first = hash_table.counter[hash_value]; region.second = hash_table.counter[hash_value + 1]; if (region.first == region.second) continue; IndexRegion(read_seed, genome, hash_table, seed_len, region); if (region.second - region.first + 1 > 5000) { continue; } for (uint32_t j = region.first; j <= region.second; ++j) { uint32_t genome_pos = hash_table.index[j]; uint32_t chr_id = getChromID(genome.start_index, genome_pos); if (genome_pos - genome.start_index[chr_id] < seed_i) continue; genome_pos = genome_pos - seed_i; if (genome_pos + read_len >= genome.start_index[chr_id + 1]) continue; /* check the position */ uint32_t num_of_mismatch = 0; uint32_t num_of_nocared = seed_pattern_repeats * SEEPATTERNNOCAREDWEIGHT + seed_i; for (uint32_t p = 0; p < num_of_nocared && num_of_mismatch <= cur_max_mismatches; ++p) { if (genome.sequence[genome_pos + F2NOCAREDPOSITION[seed_i][p]] != read[F2NOCAREDPOSITION[seed_i][p]]) { num_of_mismatch++; } } for (uint32_t p = seed_pattern_repeats * SEEPATTERNLEN + seed_i; p < read_len && num_of_mismatch <= cur_max_mismatches; ++p) { if (genome.sequence[genome_pos + p] != read[p]) { num_of_mismatch++; } } if (num_of_mismatch > max_mismatches) { continue; } top_match.Push(CandidatePosition(genome_pos, strand, num_of_mismatch)); if (top_match.Full()) { cur_max_mismatches = top_match.Top().mismatch; } } } }
Node TarjanHD::hd(const Digraph& g, const DoubleArcMap& w, SubDigraph& subG, NodeNodeMap& mapToOrgG, NodeNodeMap& G2T, const ArcList& sortedArcs, int i) { assert(i >= 0); int m = static_cast<int>(sortedArcs.size()); assert(m == lemon::countArcs(subG)); int r = m - i; if (r == 0 || r == 1) { // add to _T a subtree rooted at node r, // labeled with w(e_m) and having n children labeled with // the vertices of subG Node root = _T.addNode(); _label[root] = w[sortedArcs.back()]; _T2OrgG[root] = lemon::INVALID; for (SubNodeIt v(subG); v != lemon::INVALID; ++v) { Node vv = G2T[v]; if (vv == lemon::INVALID) { vv = _T.addNode(); _label[vv] = -1; if (mapToOrgG[v] != lemon::INVALID) { _orgG2T[mapToOrgG[v]] = vv; _T2OrgG[vv] = mapToOrgG[v]; } } _T.addArc(vv, root); } return root; } else { int j = (i + m) % 2 == 0 ? (i + m) / 2 : (i + m) / 2 + 1; // remove arcs j+1 .. m ArcListIt arcEndIt, arcIt = sortedArcs.begin(); for (int k = 1; k <= m; ++k) { if (k == j + 1) { arcEndIt = arcIt; } if (k > j) { subG.disable(*arcIt); } ++arcIt; } // compute SCCs IntNodeMap comp(g, -1); int numSCC = lemon::stronglyConnectedComponents(subG, comp); if (numSCC == 1) { ArcList newSortedArcs(sortedArcs.begin(), arcEndIt); // _subG is strongly connected return hd(g, w, subG, mapToOrgG, G2T, newSortedArcs, i); } else { // determine strongly connected components NodeHasher<Digraph> hasher(g); NodeSetVector components(numSCC, NodeSet(42, hasher)); for (SubNodeIt v(subG); v != lemon::INVALID; ++v) { components[comp[v]].insert(v); } NodeVector roots(numSCC, lemon::INVALID); double w_i = i > 0 ? w[getArcByRank(sortedArcs, i)] : -std::numeric_limits<double>::max(); for (int k = 0; k < numSCC; ++k) { const NodeSet& component = components[k]; if (component.size() > 1) { // construct new sorted arc list for component: O(m) time ArcList newSortedArcs; for (ArcListIt arcIt = sortedArcs.begin(); arcIt != arcEndIt; ++arcIt) { Node u = g.source(*arcIt); Node v = g.target(*arcIt); bool u_in_comp = component.find(u) != component.end(); bool v_in_comp = component.find(v) != component.end(); if (u_in_comp && v_in_comp) { newSortedArcs.push_back(*arcIt); } } // remove nodes not in component from the graph for (NodeIt v(g); v != lemon::INVALID; ++v) { subG.status(v, component.find(v) != component.end()); } // find new_i, i.e. largest k such that w(e'_k) <= w(e_i) // if i == 0 or i > 0 but no such k exists => new_i := 0 int new_i = get_i(newSortedArcs, w, w_i); // recurse on strongly connected component roots[k] = hd(g, w, subG, mapToOrgG, G2T, newSortedArcs, new_i); } // enable all nodes again for (int k = 0; k < numSCC; ++k) { const NodeSet& component = components[k]; for (NodeSetIt nodeIt = component.begin(); nodeIt != component.end(); ++nodeIt) { subG.enable(*nodeIt); } } } // construct the condensed graph: // each strongly connected component is collapsed into a single node, and // from the resulting sets of multiple arcs retain only those with minimum weight Digraph c; DoubleArcMap ww(c); NodeNodeMap mapCToOrgG(c); NodeNodeMap C2T(c); ArcList newSortedArcs; int new_i = constructCondensedGraph(g, w, mapToOrgG, G2T, sortedArcs, comp, components, roots, j, c, ww, mapCToOrgG, C2T, newSortedArcs); BoolArcMap newArcFilter(c, true); BoolNodeMap newNodeFilter(c, true); SubDigraph subC(c, newNodeFilter, newArcFilter); Node root = hd(c, ww, subC, mapCToOrgG, C2T, newSortedArcs, new_i); return root; } } return lemon::INVALID; }