void ANNetwork::saveWs(bool file) const { FILE* fp; wchar_t fname[256] = L""; for (unsigned int i = 0; i < m_layers.size(); i++) { swprintf(fname, L"W%d.txt", i + 1); if (file && (fp = _wfopen(fname, L"wt"))) { const vec2D &tW = *m_layers[i]->m_tW; for (unsigned int x = 0; x < tW.width(); x++) { for (unsigned int y = 0; y < tW.height(); y++) fwprintf(fp, L" %g\t", tW(y, x)); fwprintf(fp, L"\n"); } fclose(fp); } else { const vec2D& tW = *m_layers[i]->m_tW; wprintf(L"W%d %dx%d\n", i + 1, tW.width(), tW.height()); for (unsigned int x = 0; x < tW.width(); x++) { for (unsigned int y = 0; y < tW.height(); y++) wprintf(L" %8.4g", tW(y, x)); wprintf(L"\n"); } } } }
ImpWTLF ( iterator BWT, uint64_t const rn, ::libmaus2::util::TempFileNameGenerator & rtmpgen, uint64_t const rmaxval = 0) : n(rn) { if ( n ) { uint64_t maxval = rmaxval; for ( uint64_t i = 0; i < n; ++i ) maxval = std::max ( maxval, static_cast<uint64_t>(BWT[i]) ); uint64_t const b = ::libmaus2::math::numbits(maxval); ::libmaus2::wavelet::ImpExternalWaveletGenerator IEWG(b,rtmpgen); for ( uint64_t i = 0; i < n; ++i ) IEWG.putSymbol(BWT[i]); std::string const tmpfilename = rtmpgen.getFileName(); IEWG.createFinalStream(tmpfilename); std::ifstream istr(tmpfilename.c_str(),std::ios::binary); wt_ptr_type tW(new wt_type(istr)); W = UNIQUE_PTR_MOVE(tW); istr.close(); remove ( tmpfilename.c_str() ); D = ::libmaus2::autoarray::AutoArray < uint64_t >((1ull<<W->getB())+1); for ( uint64_t i = 0; i < (1ull<<W->getB()); ++i ) D [ i ] = W->rank(i,n-1); D.prefixSums(); } }
//////////////////////////////////////////init neuron weights/////////////////////////////////////////////////////// void ANNetwork::init_weights(unsigned int rseed) const { int w; srand(rseed); //input layer remains with w=1.0 for (unsigned int l = 0; l < m_layers.size(); l++) { vec2D& tW = *m_layers[l]->m_tW; for (unsigned int n = 0; n < tW.height(); n++) { for (unsigned int i = 0; i < tW.width(); i++) { w = 0xFFF & rand(); w -= 0x800; tW(n, i) = (float)w / 2048.0f; } } } }
ImpWTLF (::libmaus2::huffman::RLDecoder & decoder, uint64_t const b, ::libmaus2::util::TempFileNameGenerator & rtmpgen) : n(decoder.getN()) { if ( n ) { ::libmaus2::wavelet::ImpExternalWaveletGenerator IEWG(b,rtmpgen); for ( uint64_t i = 0; i < n; ++i ) IEWG.putSymbol(decoder.decode()); std::string const tmpfilename = rtmpgen.getFileName(); IEWG.createFinalStream(tmpfilename); std::ifstream istr(tmpfilename.c_str(),std::ios::binary); wt_ptr_type tW(new wt_type(istr)); W = UNIQUE_PTR_MOVE(tW); istr.close(); remove ( tmpfilename.c_str() ); D = ::libmaus2::autoarray::AutoArray < uint64_t >((1ull<<W->getB())+1); for ( uint64_t i = 0; i < (1ull<<W->getB()); ++i ) D [ i ] = W->rank(i,n-1); D.prefixSums(); } }
//////////////////////////////////////////////////constructor/destructor//////////////////////////////////////////////////////// ANNetwork::ANNetwork(const wchar_t* fname) : m_status(-1), m_nrule(0.2f), m_alpha(0.7f), m_input_vec(0), m_add_vec(0), m_mul_vec(0) { int res = 0; unsigned int neurons_num = 0; float w = 0.0f; unsigned int layers_num = 0; FILE* fp = _wfopen(fname, L"rt"); if (fp != 0) { //get num of layers//////////////////////////////////// if ((res = fwscanf(fp, L"%d", &layers_num)) != 1) { fclose(fp); m_status = -1; return; } m_neurons.resize(layers_num); //get num of Neurons per layer///////////////////////// for (unsigned int l = 0; l < layers_num; l++) { if ((res = fwscanf(fp, L"%d", &neurons_num)) != 1) { fclose(fp); m_status = -2; return; } else m_neurons[l] = neurons_num; } for (unsigned int l = 0; l < layers_num - 1; l++) m_layers.push_back(new AnnLayer(m_neurons[l] + 1, m_neurons[l+1])); //activation function for hidden/output layers///////////////////// wchar_t function[256]; for (unsigned int l = 0; l < layers_num; l++) { if ((res = fwscanf(fp, L"%s", function)) != 1) { fclose(fp); m_status = -3; return; } else { if (std::wstring(function) == L"linear") m_actfuns.push_back(AnnLayer::LINEAR); else if (std::wstring(function) == L"sigmoid") m_actfuns.push_back(AnnLayer::SIGMOID); else if (std::wstring(function) == L"tanh") m_actfuns.push_back(AnnLayer::TANH); else m_actfuns.push_back(AnnLayer::LINEAR); } } //normalization params//////////////////////////////////////////// m_add_vec = new vec2D(1, m_neurons[0]); m_mul_vec = new vec2D(1, m_neurons[0]); m_input_vec = new vec2D(1, m_neurons[0]); for (unsigned int n = 0; n < m_neurons[0]; n++) { float add, mul; if ((res = fwscanf(fp, L"%g %g", &add, &mul)) != 2) { //blank network file? for (unsigned int m = 0; m < m_neurons[0]; m++) { (*m_add_vec)[m] = 0.0f; //default add = 0 (*m_mul_vec)[m] = 1.0f; //default mult = 1 } break; } (*m_add_vec)[n] = add; (*m_mul_vec)[n] = mul; } //load weights///////////////////////////////////////////////////// for (unsigned int l = 0; l < m_layers.size(); l++) { //load all weights except input layer vec2D& tW = *m_layers[l]->m_tW; for (unsigned int n = 0; n < tW.height(); n++) { //num of Neurons in layer for (unsigned int i = 0; i < tW.width(); i++) { //num of inputs in Neuron if ((res = fwscanf(fp, L"%g", &w)) != 1) { //blank network file? fclose(fp); m_status = 1; init_weights((unsigned int)time(0)); //init to random values return; } else { tW(n, i) = w; } } } } fclose(fp); m_status = 0; } else m_status = -1; }