/********************************************************************************************** * * ReadCharacter * * Character = #x9 | #xA | #xD | [#x20-#x7F] ; * *********************************************************************************************/ bool XMLScanner::ReadCharacter () { bool invalid = false; current = '\0'; while (true) { if (&(buffer [length - 1]) < ++position) { if (invalid) { throw InvalidInputException (); } else { long received = input->Read (buffer, STREAM_BUFFER_SIZE); if (received == 0) { return false; } buffer [length] = '\0'; position = buffer; } } if ((*position == '\x9' || *position == '\xA' || *position == '\xD') || ('\x20' <= *position && *position < '\x7f')) { if (! invalid) { current = *position; return true; } else { throw InvalidInputException (); } } else { invalid = true; continue; } } return false; }
void Applicant::verifyApplicants(const std::vector<Applicant> &applicants) { if (applicants.size() == 0) { throw InvalidInputException("No applicants"); } const index_t count = applicants.at(0).values.size(); for (auto it = applicants.begin() + 1; it != applicants.end(); ++it) { if (it->values.size() != count) { throw InvalidInputException("Incorrect values count"); } } }
NeuralNetwork::NeuralNetwork(std::initializer_list<int> numNeuronsOnLayer, float regularizationFactor, float learningRate) :m_numNeuronsOnLayer(numNeuronsOnLayer), m_numLayers(m_numNeuronsOnLayer.size()), m_regFactor(regularizationFactor), m_learningRate(learningRate), m_featureNormalization(false) { if (m_numLayers < 2) throw InvalidInputException("A neural network cannot have less than 2 layers."); for (const auto& numNeurons : numNeuronsOnLayer) if (numNeurons <= 0) throw InvalidInputException("You can't have less than 1 neuron per layer."); randomlyInitWeights(); }
void NeuralNetwork::trainOn(arma::mat& input, const arma::mat& output, int numIterations, int iterationsBetweenReport) { if (input.n_cols != static_cast<unsigned int>(m_numNeuronsOnLayer[0]) || output.n_cols != static_cast<unsigned int>(m_numNeuronsOnLayer[m_numLayers - 1])) throw InvalidInputException("File's input / output length doesn't match with the" "number of neurons on input / output layer."); if (m_featureNormalization) normalizeFeatures(input); double prevCost = computeCost(input, output, m_theta); double crtCost = prevCost; for (int iteration = 0; iteration < numIterations; ++iteration) { if (iterationsBetweenReport) if (iteration % iterationsBetweenReport == 0 || iteration + 1 == numIterations) std::cout << "Iteration: " << iteration << " | Cost: " << crtCost << std::endl; if (crtCost > prevCost) { std::cout << "The cost is increasing. Choose a smaller learning rate." << std::endl; return; } backprop(input, output); prevCost = crtCost; crtCost = computeCost(input, output, m_theta); } }
Token Lexer::parseOperator() { std::string operatorText = std::string{next_}; if (!validOperators.count(operatorText)) { throw InvalidInputException("Invalid operator type: " + operatorText); } advance(); skipSpaces(); return Token(TokenType::OPERATOR, operatorText); }
void NeuralNetwork::loadWeights(const std::string& fileName) { NnIO::loadWeights(fileName, m_theta); if (m_theta.size() != m_numLayers -1) throw InvalidInputException("The weights you are trying to load don't correspond with " "the number of layers in the network.\n"); for (unsigned int layer = 0; layer < m_numLayers - 1 ; ++layer) { if (m_theta[layer].n_rows != static_cast<unsigned int>(m_numNeuronsOnLayer[layer + 1]) || m_theta[layer].n_cols - 1 != static_cast<unsigned int>(m_numNeuronsOnLayer[layer])) { std::stringstream ss; ss << "There are too many / few weights on layer " << (layer + 1) << "\n"; throw InvalidInputException(ss.str()); } } }
Token Lexer::parseNewLine() { if (next_ == '\r') { advance(); if (next_ != '\n') { throw InvalidInputException("Expected a \n after a \r"); } } advance(); skipSpaces(); return Token(TokenType::END_OF_LINE, ""); }
void Record::init(string data){ valid = false; //check if given data if(data==""){ return; } //check if data is properly formatted int numDelims = count(data.begin(),data.end(),DELIM); if(numDelims!=NUM_REQUIRED_DELIMS){ cerr<<"Found Malformed Record:"<<data<<endl; throw InvalidInputException(); } //populate vals fillAll(data); type = getType(vals[MISC_ATTR]); //check type if(type==None){ cerr<<"Found Malformed MISC:"<<vals[MISC_ATTR]<<endl <<"\tPath:"<<vals[PATH]<<endl; throw InvalidInputException(); } //being type OTHER is not exception worthy, but it isn't valid. if(type==Other){ cerr<<"Found Device, Socket, or Other Type. Will Not Move.\t" <<vals[PATH]<<endl; return; } //if we make it here, its good valid = true; }
void Applicant::loadApplicants(const std::string &fileName, std::vector<Applicant> &applicants) { /* 1. řádek label - pamatovat 2. řádek - zadání počtů = integery další nabídky */ // cena vyšetření na přístoj = (počet vyšetření) * ((spec. cena) + (nespecif. cena)) // celková cena = součet možných přístojů + minim. z cen vyštření/přístoj pro každé vyšetření std::ifstream f; if (!f) { f.close(); throw InvalidInputException("Nepodarilo se otevrit vstupni soubor"); } f.open(fileName, std::ifstream::in); std::string line; getline(f, line); getline(f, line); while (getline(f, line)) { auto chunks = explode(line, ';'); std::vector<double> vals; for (auto it = chunks.begin() + 4; it != chunks.end(); ++it) { if (it->size() == 0) { vals.push_back(-1); } else { vals.push_back(string2double(*it)); } } applicants.push_back(Applicant(chunks.at(0), chunks.at(1), string2double(chunks.at(2)), string2double(chunks.at(3)), vals)); } f.close(); }
[[noreturn]] inline void FailInput(const std::string& msg) { throw InvalidInputException(std::string("Invalid input error: ") + msg); }