Beispiel #1
0
int main()
{
    StringListTest t1;
    StringHashTest t2;
    QTest::qExec(&t1);
    QTest::qExec(&t2);


    RandomFunction f[3];
    StringHash h;
    h.setFunc(&f[0]);

    int answer = -1;
    int x = 0;
    string s;

    cout << "Commands:\n0 - exit\n";
    cout << "1 - add string to hash\n";
    cout << "2 - remove string from hash\n";
    cout << "3 - find string from hash\n";
    cout << "4 - get status\n";
    cout << "5 - set func(1..3)\n";

    while (answer != 0)
    {
        cout << "Command: ";
        cin >> answer;
        if (answer == 1)
        {
            cout << "Input string\n";
            cin >> s;
            h.add(s);

        }
        else if (answer == 2)
bool isValid(StringHash &aHS, StringHash &bHS, int L){
  vector<u64> as = aHS.getAll(L);
  vector<u64> bs = bHS.getAll(L); 
  sort(as.begin(), as.end());
  for (size_t i = 0; i < bs.size(); i++){
    if (binary_search(as.begin(), as.end(), bs[i])) return true;
  }
  return false;
}
/**
crearConfigServer
@param past: Puerto Programa Administrador - Servidor de Transacciones.
@param pcst: Puerto Programa Cliente - Servidor de Transacciones.
@param maxClients: Numero maximo de clientes.
@param bdNombre: Nombre de la base de datos.
@param bdDriver: Nombre del driver.
@param bdHost: Nombre del host.
@param bdPuerto: Nombre del puerto.
@param usuario: Nombre de usuario.
@param password: Password de usuario.
*/
QPDocumentoXML SbXmlConfig::crearConfigServer(StringHash argumentos)
{
	QPDocumentoXML conf;
	QDomElement raiz = conf.createElement( "QPSConfig" );
	conf.appendChild( raiz );
	
	QDomElement puertoAST = conf.createElement("PuertoAST");
	QDomText pastTxt = conf.createTextNode( *argumentos.find("PuertoAST") );
	puertoAST.appendChild(pastTxt);
	raiz.appendChild(puertoAST);
	
	QDomElement puertoCST = conf.createElement("PuertoCST");
	QDomText pcstTxt = conf.createTextNode(*argumentos.find("PuertoCST") );
	puertoCST.appendChild(pcstTxt);
	raiz.appendChild(puertoCST);

	QDomElement clientes = conf.createElement("MaxClients");
	QDomText mcTxt = conf.createTextNode(*argumentos.find("MaxClients"));
	clientes.appendChild(mcTxt);
	raiz.appendChild(clientes);
	
	QDomElement conexion = conf.createElement("QPConexion");
	raiz.appendChild(conexion);
	
	QDomElement bd = conf.createElement("QPBD");
	conexion.appendChild(bd);
	
	QDomElement bdname = conf.createElement("bdnombre");
	QDomText bdnameTxt = conf.createTextNode(*argumentos.find("bdnombre"));
	bdname.appendChild(bdnameTxt);
	bd.appendChild(bdname);
			
	QDomElement bddriver = conf.createElement("bddriver");
	QDomText bddriverTxt = conf.createTextNode(*argumentos.find("bddriver"));
	bddriver.appendChild(bddriverTxt);
	bd.appendChild(bddriver);

	QDomElement bdhost = conf.createElement("bdhost");
	QDomText bdhostTxt = conf.createTextNode(*argumentos.find("bdhost"));
	bdhost.appendChild(bdhostTxt);
	bd.appendChild(bdhost);
	
	QDomElement bdpuerto = conf.createElement("bdpuerto");
	QDomText bdpuertoTxt = conf.createTextNode(*argumentos.find("bdpuerto"));
	bdpuerto.appendChild(bdpuertoTxt);
	bd.appendChild(bdpuerto);	
		
	QDomElement user = conf.createElement("usuario");
	QDomText usuarioTxt = conf.createTextNode(*argumentos.find("usuario"));
	user.appendChild(usuarioTxt);
	bd.appendChild(user);
	
	QDomElement pass = conf.createElement("password");
	QDomText passTxt = conf.createTextNode(*argumentos.find("password"));
	pass.appendChild(passTxt);
	bd.appendChild(pass);
		
	return conf;
}
Beispiel #4
0
void PluginFrame::OnPluginframeBtnOpenClick( wxCommandEvent& event )
{
////@begin wxEVT_COMMAND_BUTTON_CLICKED event handler for ID_PLUGINFRAME_BTN_OPEN in PluginFrame.
    // Before editing this code, remove the block markers.
    // wxMessageBox(_("To be implemented"));

    StringHash pluginHash = GetAvailablePlugins();

    // explicit type cast to avoid warning on conversion from size_t to int;
    // we reckon we'll never have over 4 billion (!) plugins.
    int n_pluginLibrary = (int)pluginHash.size();

    if(n_pluginLibrary == 0)
    {
        wxMessageBox(NO_PLUGIN_FOUND);
    }
    else
    {
        //
        wxString *choices = new wxString [n_pluginLibrary];

        //
        StringHash::iterator j = pluginHash.begin();
        for(int i = 0; i < n_pluginLibrary; i++, j++)
        {
            choices[i] = j->first;

            if (m_pluginSelected == choices[i])
                m_indexPlugin = i;
        }

        //
        wxSingleChoiceDialog dialog(this,
                                    CHOOSE_PLUGIN_CAPTION,
                                    CHOOSE_PLUGIN,
                                    n_pluginLibrary, choices);

        //dialog.SetSelection(m_indexPlugin);

        if (dialog.ShowModal() == wxID_OK)
        {
            wxString choice = choices[dialog.GetSelection()];
            //Hide();	// done so that there is no more than one instance of this window visible at any time

            LoadPlugin(pluginHash[choice]);
        }
        // Delete the choices (after the dialog modal, no more need of them)
        delete [] choices;
    }

////@end wxEVT_COMMAND_BUTTON_CLICKED event handler for ID_PLUGINFRAME_BTN_OPEN in PluginFrame.
}
Beispiel #5
0
StringHash Tools::splitStyle(const QString &style)
{
    StringHash hash;
    if (style.isEmpty())
        return hash;
    QStringList list = removeEdgeSpaces(style).split(";", QString::SkipEmptyParts);
    for (int i = 0; i < list.count(); ++i) {
        QString attr = list.at(i);
        int pos = attr.indexOf(QLatin1Char(':'));
        if (pos != -1)
            hash.insert(removeEdgeSpaces(attr.mid(0, pos)), removeEdgeSpaces(attr.mid(pos+1)));
    }
    return hash;
}
Beispiel #6
0
// DirPath should by a directory with a trailing slash.
// Returns all files in all search paths, as unique relative paths.
// Subdirectories will have a trailing slash.
// All files and directories that start with . are skipped.
StringHash< String > RelativeDirectoryFileList( const Array< String > & searchPaths, const char * RelativeDirPath )
{
	//Check each of the mirrors in searchPaths and build up a list of unique strings
	StringHash< String >	uniqueStrings;

	const int numSearchPaths = searchPaths.GetSizeI();
	for ( int index = 0; index < numSearchPaths; ++index )
	{
		const String fullPath = searchPaths[index] + String( RelativeDirPath );

		DIR * dir = opendir( fullPath.ToCStr() );
		if ( dir != NULL )
		{
			struct dirent * entry;
			while ( ( entry = readdir( dir ) ) != NULL )
			{
				if ( entry->d_name[ 0 ] == '.' )
				{
					continue;
				}
				if ( entry->d_type == DT_DIR )
				{
					String s( RelativeDirPath );
					s += entry->d_name;
					s += "/";
					uniqueStrings.SetCaseInsensitive( s, s );
				}
				else if ( entry->d_type == DT_REG )
				{
					String s( RelativeDirPath );
					s += entry->d_name;
					uniqueStrings.SetCaseInsensitive( s, s );
				}
			}
			closedir( dir );
		}
	}

	return uniqueStrings;
}
Beispiel #7
0
void Node::SetNetParentAttr(const PODVector<unsigned char>& value)
{
    Scene* scene = GetScene();
    if (!scene)
        return;

    MemoryBuffer buf(value);
    // If nothing in the buffer, parent is the root node
    if (buf.IsEof())
    {
        scene->AddChild(this);
        return;
    }

    unsigned baseNodeID = buf.ReadNetID();
    Node* baseNode = scene->GetNode(baseNodeID);
    if (!baseNode)
    {
        LOGWARNING("Failed to find parent node " + String(baseNodeID));
        return;
    }

    // If buffer contains just an ID, the parent is replicated and we are done
    if (buf.IsEof())
        baseNode->AddChild(this);
    else
    {
        // Else the parent is local and we must find it recursively by name hash
        StringHash nameHash = buf.ReadStringHash();
        Node* parentNode = baseNode->GetChild(nameHash, true);
        if (!parentNode)
            LOGWARNING("Failed to find parent node with name hash " + nameHash.ToString());
        else
            parentNode->AddChild(this);
    }
}
Beispiel #8
0
void SceneNode::addComponent(Component* component)
{
	StringHash type = component->getType();

	if (hasComponent(type))
	{
		Logger::warn("Scene node " + name_ + " has already component of type " + std::to_string(type.getValue()));
		return;
	}

	components_[type] = component;

	component->node_ = this;
	component->onNodeSet();
}
StringHash StringHashRegister::RegisterString(const StringHash& hash, const char* string)
{
    if (mutex_)
        mutex_->Acquire();

    auto iter = map_.find(hash);
    if (iter == map_.end())
    {
        map_[hash] = string;
    }
    else if (iter->compare(string, Qt::CaseInsensitive) != 0)
    {
        URHO3D_LOGWARNING(QString::asprintf("StringHash collision detected! Both \"%s\" and \"%s\" have hash #%s",
            string, qPrintable(*iter), qPrintable(hash.ToString())));
    }

    if (mutex_)
        mutex_->Release();

    return hash;
}
Beispiel #10
0
StringHash StringHashRegister::RegisterString(const StringHash& hash, const char* string)
{
    if (mutex_)
        mutex_->Acquire();

    auto iter = map_.Find(hash);
    if (iter == map_.End())
    {
        map_.Populate(hash, string);
    }
    else if (iter->second_.Compare(string, false) != 0)
    {
        URHO3D_LOGWARNINGF("StringHash collision detected! Both \"%s\" and \"%s\" have hash #%s",
            string, iter->second_.CString(), hash.ToString().CString());
    }

    if (mutex_)
        mutex_->Release();

    return hash;
}
Beispiel #11
0
void Technique::RemovePass(StringHash type)
{
    passes_.Erase(type.Value());
}
Beispiel #12
0
bool BackgroundLoader::QueueResource(StringHash type, const ea::string& name, bool sendEventOnFailure, Resource* caller)
{
    StringHash nameHash(name);
    ea::pair<StringHash, StringHash> key = ea::make_pair(type, nameHash);

    MutexLock lock(backgroundLoadMutex_);

    // Check if already exists in the queue
    if (backgroundLoadQueue_.find(key) != backgroundLoadQueue_.end())
        return false;

    BackgroundLoadItem& item = backgroundLoadQueue_[key];
    item.sendEventOnFailure_ = sendEventOnFailure;

    // Make sure the pointer is non-null and is a Resource subclass
    item.resource_ = DynamicCast<Resource>(owner_->GetContext()->CreateObject(type));
    if (!item.resource_)
    {
        URHO3D_LOGERROR("Could not load unknown resource type " + type.ToString());

        if (sendEventOnFailure && Thread::IsMainThread())
        {
            using namespace UnknownResourceType;

            VariantMap& eventData = owner_->GetEventDataMap();
            eventData[P_RESOURCETYPE] = type;
            owner_->SendEvent(E_UNKNOWNRESOURCETYPE, eventData);
        }

        backgroundLoadQueue_.erase(key);
        return false;
    }

    URHO3D_LOGDEBUG("Background loading resource " + name);

    item.resource_->SetName(name);
    item.resource_->SetAsyncLoadState(ASYNC_QUEUED);

    // If this is a resource calling for the background load of more resources, mark the dependency as necessary
    if (caller)
    {
        ea::pair<StringHash, StringHash> callerKey = ea::make_pair(caller->GetType(), caller->GetNameHash());
        auto j = backgroundLoadQueue_.find(
            callerKey);
        if (j != backgroundLoadQueue_.end())
        {
            BackgroundLoadItem& callerItem = j->second;
            item.dependents_.insert(callerKey);
            callerItem.dependencies_.insert(key);
        }
        else
            URHO3D_LOGWARNING("Resource " + caller->GetName() +
                       " requested for a background loaded resource but was not in the background load queue");
    }

    // Start the background loader thread now
    if (!IsStarted())
        Run();

    return true;
}
Beispiel #13
0
bool Serializer::WriteStringHash(const StringHash& value)
{
    return WriteUInt(value.Value());
}
Beispiel #14
0
int main(int argc, char ** argv)
{
    printf("glfMultiples -- SNP calls based on .glf or .glz files\n");
    printf("(c) 2008-2011 Goncalo Abecasis, Sebastian Zoellner, Yun Li\n\n");

    String pedfile;
    String positionfile;
    String callfile;
    String glfAliases;
    String glfPrefix;
    String glfSuffix;
    ParameterList pl;

    double posterior = 0.50;
    int    mapQuality = 0;
    int    minTotalDepth = 1;
    int    maxTotalDepth = INT_MAX;
    bool   verbose = false;
    bool   mapQualityStrict = false;
    bool   hardFilter = false;
    bool   smartFilter = false;
    bool   softFilter = true;
    bool   robustPrior = true;
    bool   uniformPrior = false;
    String xLabel("X"), yLabel("Y"), mitoLabel("MT");
    int    xStart = 2699520, xStop = 154931044;

    BEGIN_LONG_PARAMETERS(longParameters)
        LONG_PARAMETER_GROUP("Pedigree File")
        LONG_STRINGPARAMETER("ped", &pedfile)
        LONG_PARAMETER_GROUP("Map Quality Filter")
        LONG_INTPARAMETER("minMapQuality", &mapQuality)
        LONG_PARAMETER("strict", &mapQualityStrict)
        LONG_PARAMETER_GROUP("Total Depth Filter")
        LONG_INTPARAMETER("minDepth", &minTotalDepth)
        LONG_INTPARAMETER("maxDepth", &maxTotalDepth)
        LONG_PARAMETER_GROUP("Position Filter")
        LONG_STRINGPARAMETER("positionFile", &positionfile)
        LONG_PARAMETER_GROUP("Chromosome Labels")
        LONG_STRINGPARAMETER("xChr", &xLabel)
        LONG_STRINGPARAMETER("yChr", &yLabel)
        LONG_STRINGPARAMETER("mito", &mitoLabel)
        LONG_INTPARAMETER("xStart", &xStart)
        LONG_INTPARAMETER("xStop", &xStop)
        LONG_PARAMETER_GROUP("Filtering Options")
        EXCLUSIVE_PARAMETER("hardFilter", &hardFilter)
        EXCLUSIVE_PARAMETER("smartFilter", &smartFilter)
        EXCLUSIVE_PARAMETER("softFilter", &softFilter)
        LONG_PARAMETER_GROUP("Prior Options")
        EXCLUSIVE_PARAMETER("uniformPrior", &uniformPrior)
        EXCLUSIVE_PARAMETER("robustPrior", &robustPrior)
        LONG_PARAMETER_GROUP("Output")
        LONG_PARAMETER("verbose", &verbose)
        LONG_PARAMETER_GROUP("Sample Names")
        LONG_STRINGPARAMETER("glfAliases", &glfAliases)
        LONG_PARAMETER_GROUP("Prefixes and Suffixes")
        LONG_STRINGPARAMETER("glfPrefix",&glfPrefix)
        LONG_STRINGPARAMETER("glfSuffix",&glfSuffix)
        END_LONG_PARAMETERS();

    pl.Add(new StringParameter('b', "Base Call File", callfile));
    pl.Add(new DoubleParameter('p', "Posterior Threshold", posterior));
    pl.Add(new LongParameters("Additional Options", longParameters));
    int argstart = pl.ReadWithTrailer(argc, argv) + 1;
    pl.Status();

    if (posterior < 0.50)
        error("Posterior threshold for genotype calls (-p option) must be > 0.50.");

    time_t t;
    time(&t);

    printf("Analysis started on %s\n", ctime(&t));
    fflush(stdout);

    int n = argc - argstart;
    argv += argstart;

    Pedigree ped;
    if (!pedfile.IsEmpty())
    {
        ped.pd.AddStringColumn("glfFile");
        ped.Load(pedfile);

        n = ped.count;
    }
    else
        if (n == 0)
            error("No pedigree file present and no glf files listed at the end of command line\n");

    // Prior for finding difference from the reference at a particular site
    //BgzfFileType::setRequireEofBlock(false);

    double prior = 0.0;
    for (int i = 1; i <= 2 * n; i++)
        prior += 1.0 / i;
    prior *= 0.001;

    glfHandler * glf = new glfHandler[n];

    bool firstGlf = n;
    if (ped.count)
    {
        bool warn = false;

        for (int i = n - 1; i > 0; i++)
        {
          
            if (!glf[i].Open(ped[i].strings[0]))
            {
                printf("Failed to open genotype likelihood file [%s] for individual %s:%s\n",
                       (const char *) ped[i].strings[0],
                       (const char *) ped[i].famid,
                       (const char *) ped[i].pid);

                glf[i].OpenStub();
                firstGlf = i;
            }

            if (warn)
                printf("\n");

            if (firstGlf == n)
                error("No genotype likelihood files could be opened");
        }
    }
    else
    {
        for (int i = firstGlf = 0; i < n; i++)
        {
            String glfName = glfPrefix + String(argv[i]) + glfSuffix;
            if (!glf[i].Open(glfName))
                error("Failed to open genotype likelihood file [%s]\n", glfName.c_str());
        }
    }

    StringAlias aliases;
    aliases.ReadFromFile(glfAliases);

    printf("Calling genotypes for files ...\n");
    for (int i = 0; i < n; i++)
        printf("%s\n", ped.count ? (const char *) ped[i].strings[0] : argv[i]);
    printf("\n");

    baseCalls = fopen(callfile, "wt");

    if (baseCalls != NULL)
    {
        fprintf(baseCalls, "##fileformat=VCFv4.0\n");
        ReportDate(baseCalls);
        fprintf(baseCalls, "##source=glfMultiples\n");
        fprintf(baseCalls, "##minDepth=%d\n", minTotalDepth);
        fprintf(baseCalls, "##maxDepth=%d\n", maxTotalDepth);
        fprintf(baseCalls, "##minMapQuality=%d\n", mapQuality);
        fprintf(baseCalls, "##minPosterior=%.4f\n", posterior);
        fprintf(baseCalls, "##INFO=<ID=DP,Number=1,Type=Integer,Description=\"Total Depth\">\n");
        fprintf(baseCalls, "##INFO=<ID=MQ,Number=1,Type=Integer,Description=\"Root Mean Squared Mapping Quality\">\n");
        fprintf(baseCalls, "##INFO=<ID=NS,Number=1,Type=Integer,Description=\"Number of samples with coverage\">\n");
        fprintf(baseCalls, "##INFO=<ID=AN,Number=1,Type=Integer,Description=\"Total number of alleles (with coverage)\">\n");
        fprintf(baseCalls, "##INFO=<ID=AC,Number=.,Type=Integer,Description=\"Alternative allele count (with coverage)\">\n");
        fprintf(baseCalls, "##INFO=<ID=AF,Number=.,Type=Float,Description=\"Alternate allele frequency\">\n");
        fprintf(baseCalls, "##INFO=<ID=AB,Number=1,Type=Float,Description=\"Estimated allele balance between the alleles\">\n");
	if ( mapQuality > 0 ) {
	  fprintf(baseCalls, "##FILTER=<ID=mq%d,Description=\"Mapping Quality less than %d\">\n",mapQuality,mapQuality);
	}
	if ( minTotalDepth > 1 ) {
	  fprintf(baseCalls, "##FILTER=<ID=dp%d,Description=\"Total Read Depth less than %d\">\n",minTotalDepth,minTotalDepth);
	}
	if ( minTotalDepth < INT_MAX ) {
	  fprintf(baseCalls, "##FILTER=<ID=DP%d,Description=\"Total Read Depth greater than %d\">\n",maxTotalDepth,maxTotalDepth);
	}
        fprintf(baseCalls, "##FORMAT=<ID=GT,Number=1,Type=String,Description=\"Most Likely Genotype\">\n");
        fprintf(baseCalls, "##FORMAT=<ID=GQ,Number=1,Type=Integer,Description=\"Genotype Call Quality\">\n");
        fprintf(baseCalls, "##FORMAT=<ID=DP,Number=1,Type=Integer,Description=\"Read Depth\">\n");
        fprintf(baseCalls, "##FORMAT=<ID=PL,Number=3,Type=Integer,Description=\"Genotype Likelihoods for Genotypes 0/0,0/1,1/1\">\n");
        fprintf(baseCalls, "##FORMAT=<ID=PL3,Number=6,Type=Integer,Description=\"Genotype Likelihoods for Genotypes 0/0,0/1,1/1,0/2,1/2,2/2\">\n");
        fprintf(baseCalls, "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT");
        for (int i = 0; i < n; i++)
            fprintf(baseCalls, "\t%s", ped.count ?
                    (const char *) (ped[i].famid + ":" + ped[i].pid) :
                    (const char *) aliases.GetAlias(argv[i]));

        fprintf(baseCalls, "\n");
    }

    StringArray buffer, tokens;
    StringHash  positions;

    buffer.Read(positionfile);

    for (int i = 0; i < buffer.Length(); i++)
    {
        tokens.ReplaceTokens(buffer[i], " \t:");

        if (tokens.Length() != 2) continue;

        positions.Add(tokens[0] + ":" + (int(tokens[1].AsInteger() - 1)));
    }

    int chromosomeType = 0;

    while (glf[firstGlf].NextSection())
    {
        for (int i = firstGlf + 1; i < n; i++)
        {
            if (glf[i].isStub) continue;

            glf[i].NextSection();

            if (glf[firstGlf].maxPosition != glf[i].maxPosition || glf[firstGlf].label != glf[i].label)
            {
                error("Genotype files '%s' and '%s' are not compatible ...\n"
                      "    File '%s' has section %s with %d entries ...\n"
                      "    File '%s' section %s with %d entries ...\n",
                      ped.count ? (const char *) ped[firstGlf].strings[0] : argv[firstGlf],
                      ped.count ? (const char *) ped[i].strings[0] : argv[i],
                      ped.count ? (const char *) ped[firstGlf].strings[0] : argv[firstGlf],
                      (const char *) glf[firstGlf].label, glf[firstGlf].maxPosition,
                      ped.count ? (const char *) ped[i].strings[0] : argv[i],
                      (const char *) glf[i].label, glf[i].maxPosition);
            }
        }

        chromosomeType = CT_AUTOSOME;

        if (ped.count)
        {
            if (glf[firstGlf].label == xLabel) chromosomeType = CT_CHRX;
            if (glf[firstGlf].label == yLabel) chromosomeType = CT_CHRY;
            if (glf[firstGlf].label == mitoLabel) chromosomeType = CT_MITO;
        }

        printf("Processing section %s with %d entries\n",
               (const char *) glf[firstGlf].label, glf[firstGlf].maxPosition);

        int refBase = 0;
        int position = 0;
        int mapQualityFilter = 0;
        int depthFilter = 0;
        int homozygousReference = 0;
        int transitions = 0;
        int transversions = 0;
        int otherPolymorphisms = 0;
        int sinkFilter = 0;
        int smartFilterHits = 0;
        int baseCounts[5] = {0, 0, 0, 0, 0};

        String filter;

        while (true)
        {
            if (position > 0)
            {
                // Check whether we have reached the end of the current chromosome
                bool done = true;
                for (int i = 0; i < n; i++)
                    if (glf[i].data.recordType != 0)
                        done = false;
                if (done) break;
            }

            // Advance to the next position where needed
            for (int i = 0; i < n; i++)
                if (glf[i].position == position)
                    glf[i].NextBaseEntry();

            // Figure out the current analysis position
            refBase = glf[0].data.refBase;
            position = glf[0].position;
            for (int i = 1; i < n; i++)
                if (position > glf[i].position)
                {
                    position = glf[i].position;
                    refBase = glf[i].data.refBase;
                }

            // Avoid alignments that extend past the end of the chromosome
            if (position >= glf[firstGlf].maxPosition)
                break;

            baseCounts[(int)refBase]++;

            // These lines can be uncommented for debugging purposes
            // for (int i = 0; i < n; i++)
            //   printf("GLF %d : position %d, refBase %d\n", i, position, refBase);
            // printf("Position: %d, refBase: %d\n", position, refBase);

            if (positions.Entries())
            {
                filter = glf[firstGlf].label + ":" + position;

                if (positions.Find(filter) < 0) continue;
            }

            if (refBase == 0) continue;

            // Corrected calculation of root-mean-square Map Quality score 
            // and check if we have at least one sample with good quality data

            int     currentDepth = 0, totalDepth = 0, numCovered = 0;
            double  currentQuality = 0.0, averageMapQuality = 0.0;
            bool    passMapQualityFilter = false;

            for (int i = 0; i < n; i++)
            {
                currentDepth = glf[i].GetDepth(position);
                if (currentDepth != 0)
                {
                    totalDepth += currentDepth;
                    numCovered++;     // not currently used -- will be "NS"
                    currentQuality = glf[i].GetMapQuality(position);
                    averageMapQuality += 
                        currentDepth * currentQuality * currentQuality;
                    if (currentQuality >= mapQuality)
                        passMapQualityFilter = true;
                }
            }
            averageMapQuality = sqrt(averageMapQuality / totalDepth);


            filter.Clear();

            if (!passMapQualityFilter)
            {
                if (filter.Length() == 0) mapQualityFilter++;
                if (hardFilter) continue;
                filter.catprintf("%smq%d", filter.Length() ? ";" : "", mapQuality);
            }

            if (totalDepth < minTotalDepth)
            {
                if (filter.Length() == 0) depthFilter++;
                if (hardFilter) continue;
                filter.catprintf("%sdp%d", filter.Length() ? ";" : "", minTotalDepth);
            }

            if (totalDepth > maxTotalDepth)
            {
                if (filter.Length() == 0) depthFilter++;
                if (hardFilter) continue;
                filter.catprintf("%sDP%d", filter.Length() ? ";" : "", maxTotalDepth);
            }

            // Create convenient aliases for each base
            unsigned char transition = (((refBase - 1) ^ 2) + 1);
            unsigned char transvers1 = (((refBase - 1) ^ 3) + 1);
            unsigned char transvers2 = (((refBase - 1) ^ 1) + 1);

            int homRef = glf[0].GenotypeIndex(refBase, refBase);

            // Calculate likelihood assuming every is homozygous for the reference
            double lRef = log(1.0 - prior);
            for (int i = 0; i < n; i++)
                lRef += log(glf[i].GetLikelihoods(position)[homRef]);

            // Calculate maximum likelihood for a variant
            if (smartFilter)
            {
                double anyVariant = log(prior) + FilteringLikelihood(glf, n, position, refBase);

                if (exp(lRef - anyVariant) > (1.0 - posterior)/posterior)
                {
                    smartFilterHits++;
                    continue;
                }
            }

	    //fprintf(stderr,"position = %d\n",position);

            double pTs = uniformPrior ? 1./3. : 2./3.;
            double pTv = uniformPrior ? 1./3. : 1./6.;

            // Calculate likelihoods for the most likelily SNP configurations
            double refTransition = log(prior * pTs) + PolymorphismLikelihood(glf, n, position, refBase, transition);
            double refTransvers1 = log(prior * pTv) + PolymorphismLikelihood(glf, n, position, refBase, transvers1);
            double refTransvers2 = log(prior * pTv) + PolymorphismLikelihood(glf, n, position, refBase, transvers2);

            // Calculate likelihoods for less likely SNP configurations
            double transitiontv1 = log(prior * 0.001) + PolymorphismLikelihood(glf, n, position, transition, transvers1);
            double transitiontv2 = log(prior * 0.001) + PolymorphismLikelihood(glf, n, position, transition, transvers2);
            double transvers1tv2 = log(prior * 0.001) + PolymorphismLikelihood(glf, n, position, transvers1, transvers2);

            // Calculate the likelihood for unusual configurations where everyone is heterozygous ...
            double sink = n > 10 ? log(prior * 1e-8) + SinkLikelihood(glf, n, position) : -1e100;

            double lmax = max(
                              max(max(lRef, refTransition),max(refTransvers1, refTransvers2)),
                              max(max(transitiontv1, transitiontv2), max(transvers1tv2, sink)));

            double sum = exp(lRef - lmax) + exp(refTransition -lmax) +
                exp(refTransvers1 - lmax) + exp(refTransvers2 - lmax) +
                exp(transitiontv1 - lmax) + exp(transitiontv2 - lmax) +
                exp(transvers1tv2 - lmax) + exp(sink - lmax);

            if (sum == 0.0) continue;

            if (exp(lRef - lmax)/sum > 1.0 - prior)
            {
                if (filter.Length() == 0) homozygousReference++;

                if (positions.Entries())
                    ReportSNP(glf, n, position, refBase, refBase, refBase, filter, totalDepth, averageMapQuality, lRef / sum);

                continue;
            }

            double quality = 1.0 - exp(lRef - lmax) / sum;

            if (verbose)
            {
                DumpDetails(glf, n, position, refBase);

                printf("%.3f %.3f %.3f %.3f %.3f %.3f %.3f\n",
                       lRef, refTransition, refTransvers1, refTransvers2,
                       transitiontv1, transitiontv2, transvers1tv2);
            }

            if (exp(refTransition - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, refBase, transition,
                          filter, totalDepth, averageMapQuality, quality /* refTransition/sum */);
                if (filter.Length() == 0) transitions++;
            }
            else if (exp(refTransvers1 - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, refBase, transvers1,
                          filter, totalDepth, averageMapQuality, quality /* refTransvers1/sum */);
                if (filter.Length() == 0) transversions++;
            }
            else if (exp(refTransvers2 - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, refBase, transvers2,
                          filter, totalDepth, averageMapQuality, quality /* refTransvers2/sum */);
                if (filter.Length() == 0) transversions++;
            }
            else if (exp(transitiontv1 - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, transition, transvers1,
                          filter, totalDepth, averageMapQuality, quality /* transitiontv1/sum */);
                if (filter.Length() == 0) otherPolymorphisms++;
            }
            else if (exp(transitiontv2 - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, transition, transvers2,
                          filter, totalDepth, averageMapQuality, quality /* transitiontv2/sum */);
                if (filter.Length() == 0) otherPolymorphisms++;
            }
            else if (exp(transvers1tv2 - lmax)/sum > posterior)
            {
                ReportSNP(glf, n, position, refBase, transvers1, transvers2,
                          filter, totalDepth, averageMapQuality, quality /* transvers1tv2/sum */);
                if (filter.Length() == 0) otherPolymorphisms++;
            }
            else if (exp(sink - lmax)/sum > posterior)
                sinkFilter++;
        }

        int actualBases = glf[firstGlf].maxPosition - baseCounts[0];

        printf("          Missing bases = %9d (%.3f%%)\n",
               baseCounts[0], baseCounts[0] * 100. / glf[firstGlf].maxPosition);
        printf("        Reference bases = %9d (%.3f%%)\n",
               glf[firstGlf].maxPosition - baseCounts[0], (glf[firstGlf].maxPosition - baseCounts[0]) * 100. / glf[firstGlf].maxPosition);

        printf("              A/T bases = %9d (%.3f%%, %d A, %d T)\n",
               baseCounts[1] + baseCounts[4],
               (baseCounts[1] + baseCounts[4]) * 100. / actualBases,
               baseCounts[1], baseCounts[4]);

        printf("              G/C bases = %9d (%.3f%%, %d G, %d C)\n",
               baseCounts[3] + baseCounts[2],
               (baseCounts[3] + baseCounts[2]) * 100. / actualBases,
               baseCounts[3], baseCounts[2]);

        printf("           Depth Filter = %9d bases (%.3f%%)\n",
               depthFilter, depthFilter * 100. / actualBases);

        printf("     Map Quality Filter = %9d bases (%.3f%%)\n",
               mapQualityFilter, mapQualityFilter * 100. / actualBases);

        printf("        Non-Polymorphic = %9d bases (%.3f%%)\n",
               homozygousReference, homozygousReference * 100. / actualBases);

        printf("            Transitions = %9d bases (%.3f%%)\n",
               transitions, transitions * 100. / actualBases);

        printf("          Transversions = %9d bases (%.3f%%)\n",
               transversions, transversions * 100. / actualBases);

        printf("    Other Polymorphisms = %9d bases (%.3f%%)\n",
               otherPolymorphisms, otherPolymorphisms * 100. / actualBases);

        if (n > 10)
            printf("          Homology Sink = %9d bases (%.3f%%)\n",
                   sinkFilter, sinkFilter * 100. / actualBases);

        if (smartFilter)
            printf("           Smart Filter = %9d bases (%.3f%%)\n",
                   smartFilterHits, smartFilterHits * 100. / actualBases);

        int noCalls = actualBases - homozygousReference - transitions - transversions - otherPolymorphisms - sinkFilter;
        printf("                No call = %9d bases (%.3f%%)\n",
               noCalls, noCalls * 100. / actualBases);

        fflush(stdout);
    }

    if (baseCalls != NULL)
        fclose(baseCalls);

    time(&t);
    printf("\nAnalysis completed on %s\n", ctime(&t));
    fflush(stdout);
}
Beispiel #15
0
	DllExport
	unsigned urho_stringhash_from_string (const char *p)
	{
		StringHash foo (p);
		return foo.Value ();
	}
void JSEventHelper::HandleEvent(StringHash eventType, VariantMap& eventData)
{
    if (object_.Null())
        return;

    JSVM* vm = JSVM::GetJSVM(0);
    duk_context* ctx = vm->GetJSContext();

    duk_idx_t top = duk_get_top(ctx);

    js_push_class_object_instance(ctx, this);

    duk_get_prop_string(ctx, -1, "__eventHelperFunctions");

    assert(duk_is_object(ctx, -1));

    duk_get_prop_string(ctx, -1, eventType.ToString().CString());

    if (duk_is_function(ctx, -1))
    {
        // look in the variant map cache
        duk_push_global_stash(ctx);
        duk_get_prop_index(ctx, -1, JS_GLOBALSTASH_VARIANTMAP_CACHE);
        duk_push_pointer(ctx, (void*) &eventData);
        duk_get_prop(ctx, -2);

        if (!duk_is_object(ctx, -1))
        {
            // pop result
            duk_pop(ctx);

            // we need to push a new variant map and store to cache
            // the cache object will be cleared at the send end in  the
            // global listener above
            js_push_variantmap(ctx, eventData);
            duk_push_pointer(ctx, (void*) &eventData);
            duk_dup(ctx, -2);
            duk_put_prop(ctx, -4);

        }

        duk_remove(ctx, -2); // vmap cache
        duk_remove(ctx, -2); // global stash

        if (duk_pcall(ctx, 1) != 0)
        {
            vm->SendJSErrorEvent();
        }
        else
        {
            // For widget events, need to check return value
            // and set whether handled
            if (eventType == E_WIDGETEVENT)
            {
                if (duk_is_boolean(ctx, -1))
                {
                    if (duk_to_boolean(ctx, -1))
                    {
                        eventData[WidgetEvent::P_HANDLED] = true;
                    }
                }
            }
        }
    }

    duk_set_top(ctx, top);

}
Beispiel #17
0
int main(int argc, char **argv) {
  
#ifdef _MEMMGR
  HeapManager heap;
#endif
  
  clock_t from, to;
  double diff;
  
  LongHash lhash;
  LongMap lmap;
  StringHash shash;
  StringMap smap;
  
  srand(clock());
  
  int nelems = 10;
  int naccess = 100;
  
  if (argc >= 2) {
    sscanf(argv[1], "%d", &nelems);
  }
  if (argc >= 3) {
    sscanf(argv[2], "%d", &naccess);
  }
  
  const char *strpool[] = {
    "hello",
    "_",
    "world",
    "012",
    "before",
    "after",
    "if",
    "then",
    "else",
    "for",
    "while",
    "do",
    "@",
    "~",
    "+",
    "-",
    "function",
    "class"
  };
  size_t nstrings = sizeof(strpool) / sizeof(const char*);
  
  // fill elements
  std::vector<std::string> skeys(nelems);
  std::vector<long> lkeys(nelems);
  std::cout << "Buid hash and map with " << nelems << " elements" << std::endl;
  for (int i=0; i<nelems; ++i) {
    long k = rand();
    lkeys[i] = k;
    int p = rand() % nstrings;
    int s = rand() % nstrings;
    skeys[i]  = strpool[p];
    skeys[i] += strpool[s];
  }
  
  from = clock();
  for (int i=0; i<nelems; ++i) {
    lhash.insert(lkeys[i], rand());
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "LongHash fill " << nelems << " values: " << diff << " (s)" << std::endl;
  from = clock();
  for (int i=0; i<nelems; ++i) {
    lmap[lkeys[i]] = rand();
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "LongMap fill " << nelems << " values: " << diff << " (s)" << std::endl;
  from = clock();
  for (int i=0; i<nelems; ++i) {
    shash.insert(skeys[i], rand());
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "StringHash fill " << nelems << " values: " << diff << " (s)" << std::endl;
  from = clock();
  for (int i=0; i<nelems; ++i) {
    smap[skeys[i]] = rand();
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "StringMap fill " << nelems << " values: " << diff << " (s)" << std::endl;
  
  
  // check datas
  /*
  std::cout << "Check data" << std::endl;
  for (int k=0; k<nelems; ++k) {
    long v0 = lhash.getValue(lkeys[k]);
    long v1 = lmap[lkeys[k]];
    if (v0 != v1) {
      std::cout << "Data mismatch at long key \"" << lkeys[k] << "\"" << std::endl;
    }
    v0 = shash.getValue(skeys[k]);
    v1 = smap[skeys[k]];
    if (v0 != v1) {
      std::cout << "Data mismatch at std::string key \"" << skeys[k] << "\"" << std::endl;
    }
  }
  */
  
  // test long map
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rand() % nelems;
    long v = lhash.getValue(lkeys[k]);
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "LongHash " << naccess << " random access: " << diff << " (s)" << std::endl;
  
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rand() % nelems;
    long v = lmap[lkeys[k]];
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "LongMap " << naccess << " random access: " << diff << " (s)" << std::endl;
  
  // test string map
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rand() % nelems;
    long v = shash.getValue(skeys[k]);
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "StringHash " << naccess << " random access: " << diff << " (s)" << std::endl;
  
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rand() % nelems;
    long v = smap[skeys[k]];
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  std::cout << "StringMap " << naccess << " random access: " << diff << " (s)" << std::endl;
  
  // test random keys
  // should make it so we have around 50% chances to find the element
  // build a new key array
  size_t numhits = 0;
  double hitperc = 0.0;
  std::vector<long> rkeys(2*nelems);
  for (size_t i=0; i<lkeys.size(); ++i) {
    rkeys[i] = lkeys[i];
  }
  for (size_t i=nelems; i<size_t(2 * nelems); ++i) {
    rkeys[i] = rand();
  }
  
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rkeys[rand() % (2 * nelems)];
    long v;
    if (lhash.getValue(k, v)) {
    //if (lhash.hasKey(k)) {
      ++numhits;
      //long v = lhash.getValue(k);
    }
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  hitperc = double(numhits) / double(naccess);
  std::cout << "LongHash " << naccess << " random keys: "
            << diff << " (s) [" << (hitperc*100) << " % hit]" << std::endl;
  
  /*
  HashMap<long, long>::iterator hit;
  numhits = 0;
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rkeys[rand() % (2 * nelems)];
    hit = lhash.find(k);
    if (hit != lhash.end()) {
      ++numhits;
      long v = hit->second;
    }
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  hitperc = double(numhits) / double(naccess);
  std::cout << "LongHash " << naccess << " random keys: "
            << diff << " (s) [" << (hitperc*100) << " % hit, using iterators]" << std::endl;
  */
  
  std::map<long,long>::iterator mit;
  numhits = 0;
  from = clock();
  for (int i=0; i<naccess; ++i) {
    long k = rkeys[rand() % (2 * nelems)];
    mit = lmap.find(k);
    if (mit != lmap.end()) {
      ++numhits;
      long v = mit->second;
    }
  }
  to = clock();
  diff = double(to - from) / CLOCKS_PER_SEC;
  hitperc = double(numhits) / double(naccess);
  std::cout << "LongMap " << naccess << " random keys: "
            << diff << " (s) [" << (hitperc*100) << " % hit]" << std::endl;
  
  return 0;
}