void MFE::printFE(QString &filename) { QFile file(filename); QTextStream ou(&file); if(file.open(QFile::WriteOnly | QFile::Text)) for(int i = 0; i < nFE; i++) ou << FE[i] <<"\n"; file.close(); }
void MagicSquare :: loadToFile() const { std :: ofstream ou(MagicKey); ou << size << ' '; for (std :: size_t i = 0; i < size*size; ++i) ou << mas[i] << ' '; ou.close(); }
void KoskoStudy::outp(const QVector<int> &nowImg) { std::ofstream ou ("output.txt", std::ios::app); //ou.seekp(std::ios_base::end); char ch; for (int i = 0; i < mHeight; i++) { for (int j = 0; j < mWidth; j++) { if (nowImg[i * mWidth + j] == 1) ch = 7; else ch = ' '; ou << ch; } ou << std::endl; } ou << std::endl; ou.close(); }
void BPNN_Recommendation::printNNOutput(vector<vector<double>>& output) { ofstream ou("Output.txt"); if(ou.fail()) { cout << "ERROR: fail to open Output.txt..." << endl; return; } for(int k = 0; k < output.size(); k++) { for(int i = 0; i < output[k].size(); i++) ou << output[k][i] << " "; ou << "\n"; } ou.close(); }
void FC_nonp_variance_varselection::outresults(ofstream & out_stata,ofstream & out_R, const ST::string & pathresults) { if (pathresults.isvalidfile() != 1) { ST::string pathresults_delta = pathresults.substr(0,pathresults.length()-4) + "_delta.res"; ST::string pathresults_omega = pathresults.substr(0,pathresults.length()-4) + "_omega.res"; FC_nonp_variance::outresults(out_stata,out_R,pathresults); FC_delta.outresults(out_stata,out_R,""); FC_omega.outresults(out_stata,out_R,pathresults_omega); optionsp->out(" Inclusion probability: " + ST::doubletostring(FC_delta.betamean(0,0),6) + "\n"); optionsp->out("\n"); optionsp->out(" Results for the inclusion probabilities are also stored in file\n"); optionsp->out(" " + pathresults_delta + "\n"); optionsp->out("\n"); optionsp->out("\n"); optionsp->out(" Inclusion probability parameter omega:\n"); optionsp->out("\n"); FC_omega.outresults_singleparam(out_stata,out_R,""); optionsp->out(" Results for the inclusion probability parameter omega are also stored in file\n"); optionsp->out(" " + pathresults_omega + "\n"); optionsp->out("\n"); optionsp->out("\n"); // deltas ofstream ou(pathresults_delta.strtochar()); ou << "pmean" << endl; ou << FC_delta.betamean(0,0) << endl; } // FC_nonp_variance::outresults(out_stata,out_R,pathresults); }
bool SendMsg(char * szHost, int port, const char* szMsg, ostringstream& out) { bool bRet = false; SOCKET ServerSocket = INVALID_SOCKET; WSADATA wsaData; hostent *pHost; SOCKADDR_IN stAddr; do { if (szHost == nullptr) break; if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)break; ServerSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (ServerSocket < 0) break; pHost = gethostbyname(szHost); if (pHost == nullptr) break; stAddr.sin_family = AF_INET; stAddr.sin_addr.S_un.S_addr = *(ULONG*)pHost->h_addr; stAddr.sin_port = htons(port); if (connect(ServerSocket, (SOCKADDR*)&stAddr, sizeof(stAddr)) != 0)break; send(ServerSocket, szMsg, strlen(szMsg), 0); char Buffer[1024] = { 0 }; int len; string tmp; while (len = recv(ServerSocket, Buffer, 1024, 0)>0) { tmp += Buffer; } int b = tmp.find("{\"errNo\":"); int e = tmp.find("}}", b) + 2; if (b == string::npos || e == string::npos) { out << tmp << endl; break; } string ou(tmp.c_str() + b, tmp.c_str() + e); out << ou << endl; bRet = true; } while (false); WSACleanup(); if (ServerSocket != INVALID_SOCKET) closesocket(ServerSocket); return bRet; }
void Gate::calculate_output() { auto t = get_type(); vector<int> ou (8); for (int i = 0; i < 8; ++i) { switch (t) { case 'a': ou[i] = ((total_gates[get_first_input()].get_output()[i]) && (total_gates[get_second_input()].get_output()[i])); break; case 'o': ou[i] = (total_gates[get_first_input()].get_output()[i] || total_gates[get_second_input()].get_output()[i]); break; case 'n': ou[i] = !(total_gates[get_first_input()].get_output()[i]); break; } } set_output(ou); }
void FC_nonp_variance::outresults(ofstream & out_stata,ofstream & out_R, const ST::string & pathresults) { FC::outresults(out_stata,out_R,""); ST::string l1 = ST::doubletostring(optionsp->lower1,4); ST::string l2 = ST::doubletostring(optionsp->lower2,4); ST::string u1 = ST::doubletostring(optionsp->upper1,4); ST::string u2 = ST::doubletostring(optionsp->upper2,4); ST::string nl1 = ST::doubletostring(optionsp->lower1,4); ST::string nl2 = ST::doubletostring(optionsp->lower2,4); ST::string nu1 = ST::doubletostring(optionsp->upper1,4); ST::string nu2 = ST::doubletostring(optionsp->upper2,4); nl1 = nl1.replaceallsigns('.','p'); nl2 = nl2.replaceallsigns('.','p'); nu1 = nu1.replaceallsigns('.','p'); nu2 = nu2.replaceallsigns('.','p'); ST::string vstr; if (optionsp->samplesize > 1) { vstr = " Mean: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betamean(0,0),6) + "\n"); vstr = " Std. dev.: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(sqrt(betavar(0,0)),6) + "\n"); vstr = " " + l1 + "% Quantile: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betaqu_l1_lower(0,0),6) + "\n"); vstr = " " + l2 + "% Quantile: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betaqu_l2_lower(0,0),6) + "\n"); vstr = " 50% Quantile: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betaqu50(0,0),6) + "\n"); vstr = " " + u1 + "% Quantile: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betaqu_l2_upper(0,0),6) + "\n"); vstr = " " + u2 + "% Quantile: "; optionsp->out(vstr + ST::string(' ',20-vstr.length()) + ST::doubletostring(betaqu_l1_upper(0,0),6) + "\n"); optionsp->out("\n"); } else { optionsp->out(" Smoothing parameter: " + ST::doubletostring(betamean(0,1),6) + "\n"); optionsp->out("\n"); } // out_R << "term=" << title << ";" << endl; if (pathresults.isvalidfile() != 1) { optionsp->out(" Results for the variance component are also stored in file\n"); optionsp->out(" " + pathresults + "\n"); optionsp->out("\n"); ST::string paths = pathresults.substr(0,pathresults.length()-4) + "_sample.raw"; out_R << "pathvarsample=" << paths << endl; // out_R << "filetype=param; path=" << pathresults << ";" << endl; ofstream ou(pathresults.strtochar()); if (optionsp->samplesize > 1) { ou << "pmean pstd pqu" << nl1 << " pqu" << nl2 << " pmed pqu" << nu1 << " pqu" << nu2 << endl; } else { ou << "pmean" << endl; } ou << betamean(0,0) << " "; if (optionsp->samplesize > 1) { if (betavar(0,0) < 0.0000000000001) ou << 0 << " "; else ou << sqrt(betavar(0,0)) << " "; ou << betaqu_l1_lower(0,0) << " "; ou << betaqu_l2_lower(0,0) << " "; ou << betaqu50(0,0) << " "; ou << betaqu_l2_upper(0,0) << " "; ou << betaqu_l1_upper(0,0) << " " << endl; } optionsp->out("\n"); } }
int main(void) { mtscramble( clock() ); ou( gamdt ); return 0; }
/** * Simplifie une formule. * Cette fonction exprime les équivalences et implications de la formule en entrée avec les opérateurs ET, OU et NON. * Elle ramène également toutes les négations de la formule au niveau des littéraux. * @param form La formule à simplifier. * @param negation Indique si l'élément parent était un NON. Vaut false par défaut. * @return La formule simplifiée. * @see formule */ formule* simplifie_formule(const formule *form, const bool negation) { formule *form_out = NULL; switch(form->op) { case o_variable: { if(negation) { form_out = non(var(*(form->nom))); } else { form_out = var(*(form->nom)); } break; } case o_equivaut: { if(negation) { form_out = ou( et( simplifie_formule(form->arg1, true), simplifie_formule(form->arg2)), et( simplifie_formule(form->arg2, true), simplifie_formule(form->arg1)) ); } else { form_out = et( ou( simplifie_formule(form->arg1, true), simplifie_formule(form->arg2)), ou( simplifie_formule(form->arg2, true), simplifie_formule(form->arg1)) ); } break; } case o_implique: { if(negation) { form_out = et(simplifie_formule(form->arg1), simplifie_formule(form->arg2, true)); } else { form_out = ou(simplifie_formule(form->arg1, true), simplifie_formule(form->arg2)); } break; } case o_non: { form_out = simplifie_formule(form->arg, !negation); break; } case o_ou: { if(negation) { form_out = et(simplifie_formule(form->arg1, true), simplifie_formule(form->arg2, true)); } else { form_out = ou(simplifie_formule(form->arg1), simplifie_formule(form->arg2)); } break; } case o_et: { if(negation) { form_out = ou(simplifie_formule(form->arg1, true), simplifie_formule(form->arg2, true)); } else { form_out = et(simplifie_formule(form->arg1), simplifie_formule(form->arg2)); } break; } } return form_out; }
void BPNN_Recommendation::training() { ReadXYintoFile("xy.txt"); //figure weight for(int i = 0; i < m_userList.size(); ++i){ //input for(int k = 0; k < m_userList[i]->alternativeList.size(); k++) { m_userList[i]->alternativeList[k].weight[FINAL] = m_x * m_userList[i]->alternativeList[k].weight[SOCIAL] / m_userList[i]->maxWeght[SOCIAL] + m_y * m_userList[i]->alternativeList[k].weight[PERSONAL] / m_userList[i]->maxWeght[PERSONAL] + (1 - m_x - m_y) * m_userList[i]->alternativeList[k].weight[ITEM] / m_userList[i]->maxWeght[ITEM]; //max if(m_userList[i]->maxWeght[FINAL] < m_userList[i]->alternativeList[k].weight[FINAL]) m_userList[i]->maxWeght[FINAL] = m_userList[i]->alternativeList[k].weight[FINAL]; m_userList[i]->maxWeght[FINAL] = (m_userList[i]->maxWeght[FINAL] == 0? 1.0 : m_userList[i]->maxWeght[FINAL]); } } vector <vector<double>> input, output; vector <double> curI, curO; //make input and output for(int i = 1; i <= CASENUM; ++i){ curI.clear(); curO.clear(); //input for(int k = 0; k < m_userList[i - 1]->alternativeList.size(); k++) { m_userList[i - 1]->alternativeList[k].weight[FINAL] /= m_userList[i - 1]->maxWeght[FINAL]; curI.push_back(m_userList[i - 1]->alternativeList[k].weight[FINAL]); //output bool isNum1 = false; for(int m = 0; m < m_answer[i - 1].size(); m++) { if(m_userList[i - 1]->alternativeList[k].id == m_answer[i - 1][m]) { curO.push_back(1.0); isNum1 = true; break; } } if(!isNum1) curO.push_back(0.0); } while(curI.size() < NodeNum) curI.push_back(0.0); input.push_back(curI); while(curO.size() < NodeNum) curO.push_back(0.0); output.push_back(curO); } //printNNInput(input); //printNNOutput(output); //int neurons[3] = {8, 8, NodeNum}; int neurons[3] = {20, 20, NodeNum}; BPLearning NN( Network(NodeNum,3,new SigmodFunction(2),neurons), 1,0.0 ); //NN.run(input[0],output[0]); for(int i = 0;i < TIME; ++i) { double error = NN.runAll(input,output); printf("case %d's error is %lf\n", i, error); if(error < ACCURACY) break; } /********************save***********************/ ofstream ou("NetModel//netWorkOutput.txt"); //netWork outPut if(ou.fail()) { cout << "ERROR: fail to open file netWorkOutput.txt..." << endl; return; } for(int i = 0; i < NN.network.output.size(); i++) ou << NN.network.output[i] << endl; ou.close(); ou.open("NetModel//laySum.txt"); // lay sum if(ou.fail()) { cout << "ERROR: fail to open file laySum.txt..." << endl; return; } for(int i = 0; i < NN.network.layer.size(); i++) for(int k = 0; k < NN.network.layer[i].sum.size(); k++) { ou << NN.network.layer[i].sum[k] << endl; } ou.close(); ou.open("NetModel//layOutput.txt"); // lay output if(ou.fail()) { cout << "ERROR: fail to open file layOutput.txt..." << endl; return; } for(int i = 0; i < NN.network.layer.size(); i++) for(int k = 0; k < NN.network.layer[i].output.size(); k++) ou << NN.network.layer[i].output[k] << endl; ou.close(); ou.open("NetModel//layNeuronOutput.txt"); // lay neuron output if(ou.fail()) { cout << "ERROR: fail to open file layNeuronOutput.txt..." << endl; return; } for(int i = 0; i < NN.network.layer.size(); i++) for(int k = 0; k < NN.network.layer[i].neuron.size(); k++) ou << NN.network.layer[i].neuron[k].output << endl; ou.close(); ou.open("NetModel//layNeuronThreshold.txt"); // lay neuron threshold if(ou.fail()) { cout << "ERROR: fail to open file layNeuronThreshold.txt..." << endl; return; } for(int i = 0; i < NN.network.layer.size(); i++) for(int k = 0; k < NN.network.layer[i].neuron.size(); k++) ou << NN.network.layer[i].neuron[k].threshold << endl; ou.close(); ou.open("NetModel//layNeuronWeight.txt"); // lay neuron weight if(ou.fail()) { cout << "ERROR: fail to open file layNeuronWeight.txt..." << endl; return; } for(int i = 0; i < NN.network.layer.size(); i++) for(int k = 0; k < NN.network.layer[i].neuron.size(); k++) for(int j = 0; j < NN.network.layer[i].neuron[k].weight.size(); j++) ou << NN.network.layer[i].neuron[k].weight[j] << endl; ou.close(); cout << "NN model completed! " << endl; /* //test vector <double> data; for(int i = 0; i < input[0].size(); i++) data.push_back(input[0][i]); vector <double> res = NN.calc(data); for(int i=0;i<(int)res.size();++i) cout<< res[i] << " "; cout << endl; */ }