Example #1
0
void
Master::distributeSynapses(const Mapper& mapper, const network::Generator& net)
{
	typedef std::vector<Synapse> svector;
	svector input; // dummy
	std::vector<svector> output(m_world.size());
	unsigned queued = 0;

	//! \todo pass this in
	const unsigned bufferSize = 2 << 11;

	for(network::synapse_iterator s = net.synapse_begin(); s != net.synapse_end(); ++s, ++queued) {
		int sourceRank = mapper.rankOf(s->source);
		int targetRank = mapper.rankOf(s->target());
		output.at(sourceRank).push_back(*s);
		if(sourceRank != targetRank) {
			output.at(targetRank).push_back(*s);
		}
		if(queued == bufferSize) {
			flushBuffer(SYNAPSE_VECTOR, input, output, m_world);
			queued = 0;
		}
	}
	flushBuffer(SYNAPSE_VECTOR, input, output, m_world);
	int tag = SYNAPSES_END;
	broadcast(m_world, tag, MASTER);
}
Example #2
0
ConnectivityMatrix::ConnectivityMatrix(
    const network::Generator& net,
    const ConfigurationImpl& conf,
    const mapper_t& mapper,
    const bool verifySources) :
    m_mapper(mapper),
    m_fractionalBits(conf.fractionalBits()),
    m_maxDelay(0),
    m_writeOnlySynapses(conf.writeOnlySynapses())
{
    if(conf.stdpFunction()) {
        m_stdp = StdpProcess(conf.stdpFunction().get(), m_fractionalBits);
    }

    construction::RCM<nidx_t, RSynapse, 32> m_racc(conf, net, RSynapse(~0U,0));
    network::synapse_iterator i = net.synapse_begin();
    network::synapse_iterator i_end = net.synapse_end();

    for( ; i != i_end; ++i) {
        nidx_t source = mapper.localIdx(i->source);
        nidx_t target = mapper.localIdx(i->target());
        sidx_t sidx = addSynapse(source, target, *i);
        m_racc.addSynapse(target, RSynapse(source, i->delay), *i, sidx);
    }

    //! \todo avoid two passes here
    finalizeForward(mapper, verifySources);
    m_rcm.reset(new runtime::RCM(m_racc));
}
Example #3
0
void
Master::distributeNeurons(const Mapper& mapper, const network::Generator& net)
{
	typedef std::vector< std::pair<nidx_t, Neuron<float> > > nvector;
	nvector input; // dummy
	std::vector<nvector> output(m_world.size());
	unsigned queued = 0;
	//! \todo pass this in
	const unsigned bufferSize = 2 << 11;

	for(network::neuron_iterator n = net.neuron_begin(); n != net.neuron_end(); ++n, ++queued) {
		output.at(mapper.rankOf(n->first)).push_back(*n);
		if(queued == bufferSize) {
			flushBuffer(NEURON_VECTOR, input, output, m_world);
			queued = 0;
		}
	}

	flushBuffer(NEURON_VECTOR, input, output, m_world);
	int tag = NEURONS_END;
	broadcast(m_world, tag, MASTER);
}