int main(int argc, char* argv[]) { LogisticRegression lr; LogisticRegressionScoreTest lrst; LogisticRegressionPermutationTest lrpt; Vector y; Matrix x; Matrix cov; LoadVector("input.y", y); LoadMatrix("input.x", x); LoadMatrix("input.cov", cov); Matrix xall; xall = x; xall.StackRight(cov); // 1 + x + cov if (lr.FitLogisticModel(xall, y, 100) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } Vector& beta = lr.GetCovEst(); Matrix& v = lr.GetCovB(); Vector& pWald = lr.GetAsyPvalue(); fprintf(stdout, "wald_beta\t"); Print(beta); fputc('\n', stdout); fprintf(stdout, "wald_vcov\t"); Print(v); fputc('\n', stdout); fprintf(stdout, "wald_p\t"); Print(pWald[1]); fputc('\n', stdout); if (lrpt.FitLogisticModelCov(xall, 1, y, 2000, -1) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } fprintf(stdout, "permutation_p\t"); double permu_p = lrpt.getPvalue(); Print(permu_p); fputc('\n', stdout); if (lrst.FitLogisticModel(xall, y, 1, 100) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } fprintf(stdout, "score_p\t"); double score_p = lrst.GetPvalue(); Print(score_p); fputc('\n', stdout); return 0; };
int main(int argc, char *argv[]) { Vector Y; Matrix X; Matrix Cov; LoadVector("input.linear.mvt.y", Y); LoadMatrix("input.linear.mvt.x", X); LoadMatrix("input.linear.mvt.cov", Cov); { Matrix x; Vector y; x = X; y = Y; LinearRegressionVT linear; if (!linear.FitNullModel(Cov, Y)) { fprintf(stderr, "Fitting failed! - step 1!\n"); return -1; } if (!linear.TestCovariate(Cov, Y, X)) { fprintf(stderr, "Fitting failed - step 2!\n"); return -1; } dumpToFile(linear.GetU(), stdout); dumpToFile(linear.GetV(), stdout); dumpToFile(linear.GetT(), stdout); dumpToFile(linear.GetCov(), stdout); fprintf(stdout, "%g\t0\n", linear.GetPvalue(), 0); } return 0; };
/* * Draws the plant. */ void drawPlant(int i,float scale,float radscale,int string) { char *ptr = lsystem[string]; char ch[] = { '\0','\0' }; GLfloat c1[] = { 0.6549f,0.4901f,0.2392f }; /* light brown */ GLfloat c2[] = { 0.3607f,0.2510f,0.2000f }; /* dark brown */ GLfloat c3[] = { 0.1373f,0.5568f,0.1373f }; /* forest green */ if(i==0) return; PushMatrix(); while(*ptr != '\0') { switch(*ptr) { case 'F': Rotate(tilt/10000,0.0,0.0,1.0); /* tilt very very slightly */ LoadMatrix(); if(do_growth && floor(growth)==i) { draw_branch(LENGTH*scale*(growth-1),RADIUS*radscale,BASE_TRI,c1,c2); Translate(0.0,LENGTH*scale*(growth-1),0.0); } else { draw_branch(LENGTH*scale,RADIUS*radscale,BASE_TRI,c1,c2); Translate(0.0,LENGTH*scale,0.0); } break; case '[': PushMatrix(); break; case ']': PopMatrix(); break; case 'L': if(do_growth && floor(growth)==i) draw_leaf(4*(growth-1)*scale,1*(growth-1)*scale,25,c3); else draw_leaf(4*scale,1*scale,25,c3); break; case 'R': Rotate(yrotate,0.0,1.0,0.0); break; case 'T': Rotate(tilt,0.0,0.0,1.0); break; default: if(isdigit(*ptr)) { ch[0] = *ptr; drawPlant(i-1,scale*SCALE,radscale*RADSCALE,atoi(ch)); } break; } ptr++; } PopMatrix(); }
AutoEncoder::AutoEncoder(std::string add, const int rows, const int cols,UINT hiddenUnits, double sparsity): IO_dim(cols), hidden_units(hiddenUnits), samples(rows), st_sparsity(sparsity) { /* * Description: * A constructed function to initialize the Sparse AutoEncoder * * @param rows: number of samples * @param cols: dimension of samples * @param hiddenUnits: number of hidden units * @param sparsity: standard sparsity we want to approximate * */ LoadMatrix(*OriginalData, add, cols, rows); HiddenMatrix = MatrixXf::Zero(samples, hidden_units); Sparsity_average = VectorXf::Zero(hidden_units); OutputMatrix = MatrixXf::Zero(samples, IO_dim); /* Initialize the en/decode weight matrix */ double w_init_interval = sqrt(6./(IO_dim + hidden_units + 1)); Weight_encode = w_init_interval * MatrixXf::Random(hidden_units, IO_dim); Weight_decode = w_init_interval * MatrixXf::Random(IO_dim, hidden_units); Bias_encode = VectorXf::Ones(hidden_units); Bias_decode = VectorXf::Ones(IO_dim); }
Animation* AnimationLoader::LoadAnimation(BinaryReader &br) { Animation *anim = new Animation(); bool hasOwnAnim = false; float angleScale; anim ->id = br.Read<int>(); anim ->worldTMInv = LoadMatrix(br); anim ->pos = LoadVec3Anim(br, anim ->localPos); anim ->rot = LoadQuatAnim(br, anim ->localRot, hasOwnAnim, angleScale); anim ->scale = LoadVec3Anim(br, anim ->localScale); if (hasOwnAnim) { anim->hasOwnRotate = hasOwnAnim; anim->angleScale = angleScale; } int subAnimsCount = br.Read<int>(); for (int i = 0; i < subAnimsCount; i++) anim ->subAnims.push_back(LoadAnimation(br)); return anim; }
void DrawNode(R3Scene *scene, R3Node *node) { // Push transformation onto stack glPushMatrix(); LoadMatrix(&node->transformation); // Load material if (node->material) LoadMaterial(node->material); // Draw shape if (node->shape) DrawShape(node->shape); // Draw children nodes for (int i = 0; i < (int) node->children.size(); i++) DrawNode(scene, node->children[i]); // Restore previous transformation glPopMatrix(); // Show bounding box if (show_bboxes) { GLboolean lighting = glIsEnabled(GL_LIGHTING); glDisable(GL_LIGHTING); node->bbox.Outline(); if (lighting) glEnable(GL_LIGHTING); } }
void nuiGLPainter::MultMatrix(const nuiMatrix& rMatrix) { NUI_RETURN_IF_RENDERING_DISABLED; nuiPainter::MultMatrix(rMatrix); LoadMatrix(mMatrixStack.top()); nuiCheckForGLErrors(); }
void nuiGLPainter::PopMatrix() { NUI_RETURN_IF_RENDERING_DISABLED; nuiPainter::PopMatrix(); LoadMatrix(mMatrixStack.top()); nuiCheckForGLErrors(); }
double EvalDeterminant( char *FileName ) { int i; LoadMatrix(FileName); sum = 0; for (i = 0; i < N; i++) P[i] = i; Go(0); return sum; }
int main(int argc, char* argv[]) { LinearRegression lr; LinearRegressionScoreTest lrst; LinearRegressionPermutationTest lrpt; Vector y; Matrix x; LoadVector("input.y", y); LoadMatrix("input.x", x); if (lr.FitLinearModel(x, y) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } Vector& beta = lr.GetCovEst(); Matrix& v = lr.GetCovB(); Vector& pWald = lr.GetAsyPvalue(); fprintf(stdout, "wald_beta\t"); Print(beta); fputc('\n', stdout); fprintf(stdout, "wald_vcov\t"); Print(v); fputc('\n', stdout); fprintf(stdout, "wald_p\t"); Print(pWald[1]); fputc('\n', stdout); if (lrpt.FitLinearModel(x, 1, y, 200, 0.05) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } fprintf(stdout, "permutation_p\t"); double permu_p = lrpt.getPvalue(); Print(permu_p); fputc('\n', stdout); if (lrst.FitLinearModel(x, y, 1) == false) { fprintf(stderr, "Fitting failed!\n"); return -1; } fprintf(stdout, "score_p\t"); double score_p = lrst.GetPvalue(); Print(score_p); fputc('\n', stdout); return 0; };
void gatherAEff(const char *inpFile=NULL) { std::string line; std::vector<std::string> lines; if (!inpFile) { std::cout << "\ninpFile=NULL\n"; std::cout << "Creating sample input file\n"; const char *sampleFName="aeff_sample.inp"; std::ofstream fout(sampleFName); fout << "../root_files/constants/DY_j22_19789pb/acceptance_constants1D.root\n"; fout << "../root_files/constants/DY_j22_19789pb/event_efficiency_constants1D.root\n"; fout << "../root_files/constants/DY_j22_19789pb/scale_factors_1D_Full2012_hltEffOld_PU.root\n"; fout.close(); std::cout << "check the file <" << sampleFName << ">\n"; std::cout << "1D/2D in the name does not matter\n"; return; } else { // load input std::ifstream finput(inpFile); if (!finput) { std::cout << "failed to open file <" << inpFile << ">\n"; return; } getline(finput,line); lines.push_back(line); std::cout << "line1=<" << line << ">\n"; getline(finput,line); lines.push_back(line); std::cout << "line2=<" << line << ">\n"; getline(finput,line); lines.push_back(line); std::cout << "line3=<" << line << ">\n"; finput.close(); } std::cout << dashline; std::string fnameAcc,fnameEff,fnameRho; int count=0; for (unsigned int i=0; i<lines.size(); ++i) { line=lines[i]; AdjustDim(line); if (PosOk(line,"acceptance")) { fnameAcc=line; count++; } else if (PosOk(line,"efficiency")) { fnameEff=line; count++; } else if (PosOk(line,"scale_factors")) { fnameRho=line; count++; } else { std::cout << "could not identify the file <" << line << ">\n"; return; } } if (count!=3) { std::cout << "not all files were identified\n"; return; } std::cout << "files were identified ok\n"; std::cout << dashline; TMatrixD effM(DYTools::nMassBins,DYTools::nYBinsMax); TMatrixD effErrM(effM); TMatrixD accM(effM), accErrM(effM); TMatrixD rhoM(effM), rhoErrM(effM); if (!LoadMatrix(fnameAcc,accM,accErrM,"acceptanceMatrix","acceptanceErrMatrix") || !LoadMatrix(fnameEff,effM,effErrM,"efficiencyArray","efficiencyErrArray") || !LoadMatrix(fnameRho,rhoM,rhoErrM,"scaleFactor","scaleFactorErr") ) { std::cout << "failed to load field\n"; return; } TString outFName; if (DYTools::study2D) { outFName="dyee_aeff_2D.root"; TFile fout(outFName,"recreate"); accM.Write("acceptance"); accErrM.Write("acceptanceErr"); effM.Write("efficiency"); effErrM.Write("efficiencyErr"); rhoM.Write("scaleFactor"); rhoErrM.Write("scaleFactorErr"); unfolding::writeBinningArrays(fout); for (int i=0; i<DYTools::nMassBins; ++i) { TString massRange=Form("_%1.0lf_%2.0lf",DYTools::massBinLimits[i],DYTools::massBinLimits[i+1]); TString hAccName=TString("hAcc") + massRange; Histo_t *hAcc=extractRapidityDependence(hAccName,"",accM,accErrM,i,0); TString hEffName=TString("hEff") + massRange; Histo_t *hEff=extractRapidityDependence(hEffName,"",effM,effErrM,i,0); TString hRhoName=TString("hRho") + massRange; Histo_t *hRho=extractRapidityDependence(hRhoName,"",rhoM,rhoErrM,i,0); if (!hAcc || !hEff || !hRho) { std::cout << "got unexpected null histo\n"; break; } hAcc->Write(); hEff->Write(); hRho->Write(); } fout.Close(); } else { // 1D case outFName="dyee_aeff_1D.root"; std::cout << "accM: rows " << accM.GetNrows() << ", cols " << accM.GetNcols() << "\n"; TVectorD eff(DYTools::nMassBins), effErr(eff); TVectorD acc(eff), accErr(eff); TVectorD rho(eff), rhoErr(eff); GetMassProfile1D(accM,accErrM, acc,accErr); GetMassProfile1D(effM,effErrM, eff,effErr); GetMassProfile1D(rhoM,rhoErrM, rho,rhoErr); Histo_t *hAcc=extractMassDependence("hAcc","",accM,accErrM,0,0,0); Histo_t *hEff=extractMassDependence("hEff","",effM,effErrM,0,0,0); Histo_t *hRho=extractMassDependence("hRho","",rhoM,rhoErrM,0,0,0); if (!hAcc || !hEff || !hRho) { std::cout << "got unexpected null histo\n"; } else { TFile fout(outFName,"recreate"); acc.Write("acceptance"); accErr.Write("acceptanceErr"); eff.Write("efficiency"); effErr.Write("efficiencyErr"); rho.Write("scaleFactor"); rhoErr.Write("scaleFactorErr"); unfolding::writeBinningArrays(fout); hAcc->Write(); hEff->Write(); hRho->Write(); fout.Close(); } } std::cout << "file <" << outFName << "> created\n"; return; }
void nuiGLPainter::ApplyTexture(const nuiRenderState& rState, bool ForceApply) { // if ((rState.mTexturing && !rState.mpTexture) || (!rState.mTexturing && rState.mpTexture)) // { // printf("bleh!\n"); // char* bleh = NULL; // bleh[0] = 0; // } // 2D Textures: std::map<nuiTexture*, TextureInfo>::const_iterator it = mTextures.find(rState.mpTexture); bool uptodate = (it == mTextures.end()) ? false : ( !it->second.mReload && it->second.mTexture >= 0 ); if (ForceApply || (mState.mpTexture != rState.mpTexture) || (mState.mpTexture && !uptodate)) { GLenum intarget = 0; GLenum outtarget = 0; if (mState.mpTexture) { outtarget = GetTextureTarget(mState.mpTexture->IsPowerOfTwo()); //mState.mpTexture->UnapplyGL(this); #TODO Un apply the texture nuiCheckForGLErrors(); mState.mpTexture->Release(); nuiCheckForGLErrors(); } //NGL_OUT(_T("Change texture to 0x%x (%ls)\n"), rState.mpTexture, rState.mpTexture?rState.mpTexture->GetSource().GetChars() : nglString::Empty.GetChars()); mState.mpTexture = rState.mpTexture ; if (mState.mpTexture) { intarget = GetTextureTarget(mState.mpTexture->IsPowerOfTwo()); mState.mpTexture->Acquire(); nuiSurface* pSurface = mState.mpTexture->GetSurface(); if (pSurface) { std::map<nuiSurface*, FramebufferInfo>::const_iterator it = mFramebuffers.find(pSurface); bool create = (it == mFramebuffers.end()) ? true : false; if (create || pSurface->IsDirty()) { PushClipping(); nuiRenderState s(mState);// PushState(); PushProjectionMatrix(); PushMatrix(); #ifdef _OPENGL_ES_ if (mpSurfaceStack.empty()) { // mDefaultFramebuffer = 0; // mDefaultRenderbuffer = 0; glGetIntegerv(GL_FRAMEBUFFER_BINDING_NUI, &mDefaultFramebuffer); glGetIntegerv(GL_RENDERBUFFER_BINDING_NUI, (GLint *) &mDefaultRenderbuffer); } #endif PushSurface(); SetState(nuiRenderState()); ResetClipRect(); mClip.Set(0, 0, pSurface->GetWidth(), pSurface->GetHeight()); LoadMatrix(nglMatrixf()); NGL_ASSERT(pSurface); SetSurface(pSurface); //Set2DProjectionMatrix(nuiRect(0.0f, 0.0f, pSurface->GetWidth(), pSurface->GetHeight())); nuiMatrix m; m.Translate(-1.0f, 1.0f, 0.0f); m.Scale(2.0f / pSurface->GetWidth(), -2.0f / pSurface->GetHeight(), 1.0f); LoadProjectionMatrix(nuiRect(pSurface->GetWidth(), pSurface->GetHeight()), m); // clear the surface with transparent black: // nuiRenderState s2(mState);// PushState(); // mState.mClearColor = nuiColor(0.0f, 0.0f, 0.0f, 0.0f); SetState(mState); // ClearColor(); // SetState(s2); ////////////////////////////// nuiDrawContext Ctx(nuiRect(pSurface->GetWidth(), pSurface->GetHeight())); Ctx.SetPainter(this); pSurface->Realize(&Ctx); Ctx.SetPainter(NULL); ////////////////////////////// PopSurface(); PopMatrix(); PopProjectionMatrix(); //PopState(); SetState(s); PopClipping(); } } UploadTexture(mState.mpTexture); nuiCheckForGLErrors(); } //NGL_OUT(_T("Change texture type from 0x%x to 0x%x\n"), outtarget, intarget); mTextureTarget = intarget; if (intarget != outtarget) { // Texture Target has changed if (outtarget) { glDisable(outtarget); nuiCheckForGLErrors(); } //NGL_OUT(_T("disable outtarget\n")); if (intarget && mState.mTexturing && mState.mpTexture) { mState.mTexturing = rState.mTexturing; //NGL_OUT(_T("enable intarget\n")); glEnable(intarget); nuiCheckForGLErrors(); } } else { // Texture Target have not changed if (mState.mTexturing != rState.mTexturing) // Have texture on/off changed? { // Should enable or disable texturing mState.mTexturing = rState.mTexturing; if (mState.mTexturing) { glEnable(mTextureTarget); nuiCheckForGLErrors(); } else { glDisable(mTextureTarget); nuiCheckForGLErrors(); } } } } if (ForceApply || (mState.mTexturing != rState.mTexturing)) { // Texture have not changed, but texturing may have been enabled / disabled mState.mTexturing = rState.mTexturing; if (mState.mpTexture) { if (mTextureTarget && mState.mTexturing) { //NGL_OUT(_T("Enable 0x%x\n"), mTextureTarget); glEnable(mTextureTarget); nuiCheckForGLErrors(); } else { //NGL_OUT(_T("Disable 0x%x\n"), mTextureTarget); glDisable(mTextureTarget); nuiCheckForGLErrors(); } } else { if (mTextureTarget) { //NGL_OUT(_T("Disable 0x%x\n"), mTextureTarget); glDisable(mTextureTarget); } nuiCheckForGLErrors(); } } }
int main(int argc,char **argv){ int i,j,k = 0; double MSE = 0,TrainingAccuracy; float **input,**weight,*biase,**tranpI,**tempH,**H; float **tranpw; float **train_set; float **T,**Y; float **out; train_set = (float **)calloc(DATASET,sizeof(float *)); tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *)); input = (float **)calloc(DATASET,sizeof(float *)); /*datasize * INPUT_NEURONS*/ weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * INPUT_NEURONS*/ biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); /*HIDDEN_NEURONS*/ tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * datasize*/ tranpw = (float **)calloc(INPUT_NEURONS,sizeof(float *)); H = (float **)calloc(DATASET,sizeof(float *)); T = (float **)calloc(DATASET,sizeof(float *)); Y = (float **)calloc(DATASET,sizeof(float *)); out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); for(i=0;i<DATASET;i++){ train_set[i] = (float *)calloc(NUMROWS,sizeof(float)); input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); } for(i=0;i<DATASET;i++) { T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } for(i=0;i<INPUT_NEURONS;i++){ tranpI[i] = (float *)calloc(DATASET,sizeof(float)); tranpw[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); } for(i=0;i<HIDDEN_NEURONS;i++) { weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); tempH[i] = (float *)calloc(DATASET,sizeof(float)); out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } printf("begin to regression test...\n"); printf("begin to random weight and biase...\n"); /*得到随机的偏置和权重*/ //RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS); //RandomBiase(biase,HIDDEN_NEURONS); if(LoadMatrix(tranpw,"../result/weight",INPUT_NEURONS,HIDDEN_NEURONS) == 0){ printf("load input file error!!!\n"); return 0; } TranspositionMatrix(tranpw,weight,INPUT_NEURONS,HIDDEN_NEURONS); if(LoadMatrix_s(biase,"../result/bias",1,HIDDEN_NEURONS) == 0){ printf("load input file error!!!\n"); return 0; } /*加载数据集到内存*/ printf("begin to load input from the file...\n"); if(LoadMatrix(train_set,"../sample/ppp",DATASET,NUMROWS) == 0){ printf("load input file error!!!\n"); return 0; } /*将数据集划分成输入和输出*/ for(i = 0;i < DATASET ;i++) { T[k++][0] = train_set[i][0]; for(j = 1;j <= INPUT_NEURONS;j++) { input[i][j-1] = train_set[i][j]; } } /*ELM*/ printf("begin to compute...\n"); TranspositionMatrix(input,tranpI,DATASET,INPUT_NEURONS); printf("begin to compute step 1...\n"); MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,DATASET,tempH); printf("begin to compute setp 2...\n"); AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,DATASET); printf("begin to compute step 3...\n"); SigmoidHandle(tempH,HIDDEN_NEURONS,DATASET); printf("begin to compute step 4...\n"); TranspositionMatrix(tempH,H,HIDDEN_NEURONS,DATASET); printf("begin to load input from the file...\n"); if(LoadMatrix(out,"../result/result",HIDDEN_NEURONS,OUTPUT_NEURONS) == 0){ printf("load input file error!!!\n"); return 0; } //检测准确率 MultiplyMatrix(H,DATASET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y); for(i = 0;i< DATASET;i++) { MSE += (Y[i][0] - T[i][0])*(Y[i][0] - T[i][0]); } TrainingAccuracy = sqrt(MSE/DATASET); printf("trainning accuracy :%f\n",TrainingAccuracy); printf("test complete...\n"); //print(PIMatrix,DATASET,HIDDEN_NEURONS); return 0; }
/** * Construct an ELM * @param * elm_type - 0 for regression; 1 for (both binary and multi-classes) classification */ void ELMTrain(int elm_type) { double starttime,endtime; double TrainingAccuracy; int i,j,k = 0; float **input,**weight,*biase,**tranpI,**tempH,**H; float **PIMatrix; float **train_set; float **T,**Y; float **out; train_set = (float **)calloc(DATASET,sizeof(float *)); tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *)); input = (float **)calloc(DATASET,sizeof(float *)); /*datasize * INPUT_NEURONS*/ weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * INPUT_NEURONS*/ biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); /*HIDDEN_NEURONS*/ tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * datasize*/ PIMatrix = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); H = (float **)calloc(DATASET,sizeof(float *)); T = (float **)calloc(DATASET,sizeof(float *)); Y = (float **)calloc(DATASET,sizeof(float *)); out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); for(i=0;i<DATASET;i++){ train_set[i] = (float *)calloc(NUMROWS,sizeof(float)); input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); } for(i=0;i<DATASET;i++) { T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } for(i=0;i<INPUT_NEURONS;i++) tranpI[i] = (float *)calloc(DATASET,sizeof(float)); for(i=0;i<HIDDEN_NEURONS;i++) { weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); tempH[i] = (float *)calloc(DATASET,sizeof(float)); out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); PIMatrix[i] = (float *)calloc(DATASET,sizeof(float)); } printf("begin to random weight and biase...\n"); /*得到随机的偏置和权重*/ RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS); RandomBiase(biase,HIDDEN_NEURONS); SaveMatrix(weight,"./result/weight",HIDDEN_NEURONS,INPUT_NEURONS); SaveMatrix_s(biase,"./result/biase",1,HIDDEN_NEURONS); /*加载数据集到内存*/ printf("begin to load input from the file...\n"); if(LoadMatrix(train_set,"./sample/frieman",DATASET,NUMROWS) == 0){ printf("load input file error!!!\n"); return; } /*将数据集划分成输入和输出*/ if(elm_type == 0){ //regression /* the first column is the output of the dataset */ for(i = 0;i < DATASET ;i++) { T[k++][0] = train_set[i][0]; for(j = 1;j <= INPUT_NEURONS;j++) { input[i][j-1] = train_set[i][j]; } } }else{ //classification /* the last column is the lable of class of the dataset */ InitMatrix(T,DATASET,OUTPUT_NEURONS,-1); for(i = 0;i < DATASET ;i++) { //get the last column T[k++][0] = train_set[i][0] - 1;//class label starts from 0,so minus one for(j = 1;j <= INPUT_NEURONS;j++) { input[i][j-1] = train_set[i][j]; } } for(i = 0;i < DATASET ;i++) { for(j = 0;j < OUTPUT_NEURONS;j++) { k = T[i][0]; if(k < OUTPUT_NEURONS && k >= 0){ T[i][k] = 1; } if(k != 0){ T[i][0] = -1; } } } } /*ELM*/ printf("begin to compute...\n"); starttime = omp_get_wtime(); TranspositionMatrix(input,tranpI,DATASET,INPUT_NEURONS); printf("begin to compute step 1...\n"); MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,DATASET,tempH); printf("begin to compute setp 2...\n"); AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,DATASET); printf("begin to compute step 3...\n"); SigmoidHandle(tempH,HIDDEN_NEURONS,DATASET); printf("begin to compute step 4...\n"); TranspositionMatrix(tempH,H,HIDDEN_NEURONS,DATASET); PseudoInverseMatrix(H,DATASET,HIDDEN_NEURONS,PIMatrix); MultiplyMatrix(PIMatrix,HIDDEN_NEURONS,DATASET,T,DATASET,OUTPUT_NEURONS,out); //SaveMatrix(H,"./result/H",DATASET,HIDDEN_NEURONS); //SaveMatrix(PIMatrix,"./result/PIMatrix",HIDDEN_NEURONS,DATASET); printf("begin to compute step 5...\n"); endtime = omp_get_wtime(); //保存输出权值 SaveMatrix(out,"./result/result",HIDDEN_NEURONS,OUTPUT_NEURONS); MultiplyMatrix(H,DATASET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y); printf("use time :%f\n",endtime - starttime); printf("train complete...\n"); if(elm_type == 0){ //检测准确率 double MSE = 0; for(i = 0;i< DATASET;i++) { MSE += (Y[i][0] - T[i][0])*(Y[i][0] - T[i][0]); } TrainingAccuracy = sqrt(MSE/DATASET); printf("Regression/trainning accuracy :%f\n",TrainingAccuracy); }else{ float MissClassificationRate_Training=0; double maxtag1,maxtag2; int tag1 = 0,tag2 = 0; for (i = 0; i < DATASET; i++) { maxtag1 = Y[i][0]; tag1 = 0; maxtag2 = T[i][0]; tag2 = 0; for (j = 1; j < OUTPUT_NEURONS; j++) { if(Y[i][j] > maxtag1){ maxtag1 = Y[i][j]; tag1 = j; } if(T[i][j] > maxtag2){ maxtag2 = T[i][j]; tag2 = j; } } if(tag1 != tag2) MissClassificationRate_Training ++; } TrainingAccuracy = 1 - MissClassificationRate_Training*1.0f/DATASET; printf("Classification/training accuracy :%f\n",TrainingAccuracy); } FreeMatrix(train_set,DATASET); FreeMatrix(tranpI,INPUT_NEURONS); FreeMatrix(input,DATASET); FreeMatrix(weight,HIDDEN_NEURONS); free(biase); FreeMatrix(tempH,HIDDEN_NEURONS); FreeMatrix(PIMatrix,HIDDEN_NEURONS); FreeMatrix(H,DATASET); FreeMatrix(T,DATASET); FreeMatrix(Y,DATASET); FreeMatrix(out,HIDDEN_NEURONS); }
int main(int argc,char **argv){ int i,j,k = 0; double TrainingAccuracy; float **input,**weight,*biase,**tranpI,**tempH,**H; float **train_set; float **T,**Y; float **out; train_set = (float **)calloc(TESTSET,sizeof(float *)); tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *)); input = (float **)calloc(TESTSET,sizeof(float *)); /*datasize * INPUT_NEURONS*/ weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * INPUT_NEURONS*/ biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); /*HIDDEN_NEURONS*/ tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * datasize*/ H = (float **)calloc(TESTSET,sizeof(float *)); T = (float **)calloc(TESTSET,sizeof(float *)); Y = (float **)calloc(TESTSET,sizeof(float *)); out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); for(i=0;i<TESTSET;i++){ train_set[i] = (float *)calloc(NUMROWS,sizeof(float)); input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); } for(i=0;i<TESTSET;i++) { T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } for(i=0;i<INPUT_NEURONS;i++){ tranpI[i] = (float *)calloc(TESTSET,sizeof(float)); } for(i=0;i<HIDDEN_NEURONS;i++) { weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); tempH[i] = (float *)calloc(TESTSET,sizeof(float)); out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } printf("begin to classification test...\n"); /*得到随机的偏置和权重*/ //RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS); //RandomBiase(biase,HIDDEN_NEURONS); printf("begin to load weight...\n"); if(LoadMatrix(weight,"../result/weight",HIDDEN_NEURONS,INPUT_NEURONS) == 0){ printf("load weight file error!!!\n"); return 0; } printf("begin to load bias...\n"); if(LoadMatrix_s(biase,"../result/biase",1,HIDDEN_NEURONS) == 0){ printf("load bias file error!!!\n"); return 0; } /*加载数据集到内存*/ printf("begin to load test file...\n"); if(LoadMatrix(train_set,"../sample/covtype_test",TESTSET,NUMROWS) == 0){ printf("load input file error!!!\n"); return 0; } InitMatrix(T,TESTSET,OUTPUT_NEURONS,-1); /*将数据集划分成输入和输出*/ for(i = 0;i < TESTSET ;i++) { //get the last column T[k++][0] = train_set[i][0] - 1;//class label starts from 0,so minus one for(j = 1;j <= INPUT_NEURONS;j++) { input[i][j-1] = train_set[i][j]; } } for(i = 0;i < TESTSET ;i++) { for(j = 0;j < OUTPUT_NEURONS;j++) { k = T[i][0]; if(k < OUTPUT_NEURONS && k >= 0){ T[i][k] = 1; } if(k != 0){ T[i][0] = -1; } } } /*ELM*/ printf("begin to compute...\n"); TranspositionMatrix(input,tranpI,TESTSET,INPUT_NEURONS); printf("begin to compute step 1...\n"); MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,TESTSET,tempH); printf("begin to compute setp 2...\n"); AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,TESTSET); printf("begin to compute step 3...\n"); SigmoidHandle(tempH,HIDDEN_NEURONS,TESTSET); printf("begin to compute step 4...\n"); TranspositionMatrix(tempH,H,HIDDEN_NEURONS,TESTSET); printf("begin to load hidden output matrix from the file...\n"); if(LoadMatrix(out,"../result/result",HIDDEN_NEURONS,OUTPUT_NEURONS) == 0){ printf("load input file error!!!\n"); return 0; } MultiplyMatrix(H,TESTSET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y); float MissClassificationRate_Training=0; double maxtag1,maxtag2; int tag1 = 0,tag2 = 0; for (i = 0; i < TESTSET; i++) { maxtag1 = Y[i][0]; tag1 = 0; maxtag2 = T[i][0]; tag2 = 0; for (j = 1; j < OUTPUT_NEURONS; j++) { if(Y[i][j] > maxtag1){ maxtag1 = Y[i][j]; tag1 = j; } if(T[i][j] > maxtag2){ maxtag2 = T[i][j]; tag2 = j; } } if(tag1 != tag2) MissClassificationRate_Training ++; } TrainingAccuracy = 1 - MissClassificationRate_Training*1.0f/TESTSET; printf("trainning accuracy :%f\n",TrainingAccuracy); printf("test complete...\n"); //print(PIMatrix,TESTSET,HIDDEN_NEURONS); return 0; }
int main(int argc, char* argv[]) { Matrix G; Matrix Y; Matrix Cov; LoadMatrix("input.mt.g", G); LoadMatrix("input.mt.y", Y); LoadMatrix("input.mt.cov", Cov); Cov.SetColumnLabel(0, "c1"); Cov.SetColumnLabel(1, "c2"); Y.SetColumnLabel(0, "y1"); Y.SetColumnLabel(1, "y2"); Y.SetColumnLabel(2, "y3"); FormulaVector tests; { const char* tp1[] = {"y1"}; const char* tc1[] = {"c1"}; std::vector<std::string> p1(tp1, tp1 + 1); std::vector<std::string> c1(tc1, tc1 + 1); tests.add(p1, c1); } { const char* tp1[] = {"y2"}; const char* tc1[] = {"c2"}; std::vector<std::string> p1(tp1, tp1 + 1); std::vector<std::string> c1(tc1, tc1 + 1); tests.add(p1, c1); } { const char* tp1[] = {"y2"}; const char* tc1[] = {"c1", "c2"}; std::vector<std::string> p1(tp1, tp1 + 1); std::vector<std::string> c1(tc1, tc1 + 2); tests.add(p1, c1); } { const char* tp1[] = {"y1"}; const char* tc1[] = {"1"}; std::vector<std::string> p1(tp1, tp1 + 1); std::vector<std::string> c1(tc1, tc1 + 1); tests.add(p1, c1); } AccurateTimer t; { FastMultipleTraitLinearRegressionScoreTest mt(1024); bool ret = mt.FitNullModel(Cov, Y, tests); if (ret == false) { printf("Fit null model failed!\n"); exit(1); } ret = mt.AddGenotype(G); if (ret == false) { printf("Add covariate failed!\n"); exit(1); } ret = mt.TestCovariateBlock(); if (ret == false) { printf("Test covariate block failed!\n"); exit(1); } const Vector& u = mt.GetU(0); printf("u\t"); Print(u); printf("\n"); const Vector& v = mt.GetV(0); printf("v\t"); Print(v); printf("\n"); const Vector& pval = mt.GetPvalue(0); printf("pval\t"); Print(pval); printf("\n"); } return 0; }
void ShaderProgram::LoadProjectionMatrix(GLuint location) { LoadMatrix(location, Camera::GetProjMatrix()); }
void Direct3D::D3DMatrixStack::LoadMatrixClass(Matrix &mMatrix) { LoadMatrix((D3DXMATRIX*)(&mMatrix)); }
void nuiDrawContext::LoadIdentity() { nuiMatrix m; LoadMatrix(m); }
// the state is only guaranteed after a SetModelMatrix call void SetModelMatrix ( const matrix2x3& m ) { modelMatrix = m; mvpMatrix = modelMatrix * viewMatrix * projectionMatrix; LoadMatrix(mvpMatrix); }
//回归 void ELMTrain() { double starttime,endtime; int i,j,k = 0; double MSE = 0,TrainingAccuracy; float **input,**weight,*biase,**tranpI,**tempH,**H; float **PIMatrix; float **train_set; float **T,**Y; float **out; train_set = (float **)calloc(DATASET,sizeof(float *)); tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *)); input = (float **)calloc(DATASET,sizeof(float *)); /*datasize * INPUT_NEURONS*/ weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * INPUT_NEURONS*/ biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); /*HIDDEN_NEURONS*/ tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); /*HIDDEN_NEURONS * datasize*/ PIMatrix = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); H = (float **)calloc(DATASET,sizeof(float *)); T = (float **)calloc(DATASET,sizeof(float *)); Y = (float **)calloc(DATASET,sizeof(float *)); out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); for(i=0;i<DATASET;i++){ train_set[i] = (float *)calloc(NUMROWS,sizeof(float)); input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); } for(i=0;i<DATASET;i++) { T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); } for(i=0;i<INPUT_NEURONS;i++) tranpI[i] = (float *)calloc(DATASET,sizeof(float)); for(i=0;i<HIDDEN_NEURONS;i++) { weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float)); tempH[i] = (float *)calloc(DATASET,sizeof(float)); out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float)); PIMatrix[i] = (float *)calloc(DATASET,sizeof(float)); } printf("begin to random weight and biase...\n"); /*得到随机的偏置和权重*/ RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS); RandomBiase(biase,HIDDEN_NEURONS); /*加载数据集到内存*/ printf("begin to load input from the file...\n"); if(LoadMatrix(train_set,"../TrainingDataSet/covtype",DATASET,NUMROWS,1) == 0){ printf("load input file error!!!\n"); return; } /*将数据集划分成输入和输出*/ for(i = 0;i < DATASET ;i++) { T[k++][0] = train_set[i][0]; for(j = 1;j <= INPUT_NEURONS;j++) { input[i][j-1] = train_set[i][j]; } } SaveMatrix(input,"./result/input",DATASET,INPUT_NEURONS); /*ELM*/ printf("begin to compute...\n"); starttime = omp_get_wtime(); TranspositionMatrix(input,tranpI,DATASET,INPUT_NEURONS); printf("begin to compute step 1...\n"); MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,DATASET,tempH); printf("begin to compute setp 2...\n"); AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,DATASET); printf("begin to compute step 3...\n"); SigmoidHandle(tempH,HIDDEN_NEURONS,DATASET); printf("begin to compute step 4...\n"); TranspositionMatrix(tempH,H,HIDDEN_NEURONS,DATASET); PseudoInverseMatrix(H,DATASET,HIDDEN_NEURONS,PIMatrix); MultiplyMatrix(PIMatrix,HIDDEN_NEURONS,DATASET,T,DATASET,OUTPUT_NEURONS,out); //SaveMatrix(H,"./result/H",DATASET,HIDDEN_NEURONS); //SaveMatrix(PIMatrix,"./result/PIMatrix",HIDDEN_NEURONS,DATASET); printf("begin to compute step 5...\n"); endtime = omp_get_wtime(); //保存输出权值 SaveMatrix(out,"./result/result",HIDDEN_NEURONS,OUTPUT_NEURONS); //检测准确率 MultiplyMatrix(H,DATASET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y); for(i = 0;i< DATASET;i++) { MSE += (Y[i][0] - T[i][0])*(Y[i][0] - T[i][0]); } SaveMatrix(T,"./result/t",DATASET,OUTPUT_NEURONS); SaveMatrix(Y,"./result/y",DATASET,OUTPUT_NEURONS); TrainingAccuracy = sqrt(MSE/DATASET); printf("use time :%f\n",endtime - starttime); printf("trainning accuracy :%f\n",TrainingAccuracy); printf("train complete...\n"); //print(PIMatrix,DATASET,HIDDEN_NEURONS); }