Ejemplo n.º 1
0
/* call as  model = mexsvmlearn(data,labels,options) */
void mexFunction(int nlhs, mxArray *plhs[],
		  int nrhs, const mxArray *prhs[])
{
  int rows, cols,i,j,offset;
  double *kern,k;
  DOC **docsA, **docsB;
  SVECTOR *a, *b;
  MODEL *model;

  global_init( );

  /* load model parameters from the "model" parameter */
  model = restore_model((mxArray *)prhs[2]);

  rows = mxGetM(prhs[0]);
  cols = mxGetN(prhs[0]);

  /* load the testing arrays into docs */
  mexToDOC((mxArray *)prhs[0], NULL, &docsA, NULL, NULL, NULL);
  mexToDOC((mxArray *)prhs[1], NULL, &docsB, NULL, NULL, NULL);
  
  /* setup output environment */
  plhs[0] = mxCreateDoubleMatrix(1,1,mxREAL);
  kern = mxGetPr(plhs[0]);

  a = docsA[0]->fvec;
  b = docsB[0]->fvec;

  kern[0] = single_kernel(&(model->kernel_parm), a, b);


  global_destroy();
}
Ejemplo n.º 2
0
/* call as  model = mexsvmlearn(data,labels,options) */
void mexFunction(int nlhs, mxArray *plhs[],
		  int nrhs, const mxArray *prhs[])
{

  
  DOC **docs; /* hold a test example */
  double *target; /* hold labels */
  WORD *words;  /* the words read from the example */
  long rows, cols; /* the number of rows and cols in the test data */
  double dist, doc_label, costfactor;
  double *err,*pred;
  long int correct=0, incorrect=0, none=0,i;
  MODEL *model;
  checkParameters(nlhs, plhs, nrhs, prhs);

  global_init( );

  /* load model parameters from the "model" parameter */
  model = restore_model((mxArray *)prhs[2]);

  rows = mxGetM(prhs[0]);
  cols = mxGetN(prhs[0]);

  /* load the testing arrays into docs */

  mexToDOC((mxArray *)prhs[0], (mxArray *)prhs[1], &docs, &target, NULL, NULL);
  
  /* setup output environment */
  plhs[0] = mxCreateDoubleMatrix(1,1,mxREAL);
  plhs[1] = mxCreateDoubleMatrix(rows,1,mxREAL);

  err = mxGetPr(plhs[0]);
  pred = mxGetPr(plhs[1]);

  /* classify examples */
  for (i = 0; i < rows; i++) {

    dist = classify_example(model, docs[i]);
    pred[i] = dist;

    if (dist > 0) {
      if (target[i] > 0) correct++;
      else incorrect++;
    } else {
      if (target[i] < 0) correct++;
      else incorrect++;
    }

    if ((int)(0.1+(target[i] * target[i]))!=1)
      none++;

  }

  err[0] = incorrect / (double) rows;

  
  global_destroy( );

}
Ejemplo n.º 3
0
int main(int argc, char *argv[])
{
    signal(SIGUSR1, handle_signal);
    global_init();

    int listenfd = open_listenfd();

    struct epoll_event event;

    struct sockaddr_in peeraddr;
    socklen_t socklen;
    int sock;
    int nready, i;
    char buf[1024];

    epollfd = epoll_create1(EPOLL_CLOEXEC);
    event.data.fd = listenfd;
    event.events = EPOLLIN | EPOLLET;
    epoll_ctl(epollfd, EPOLL_CTL_ADD, listenfd, &event);

    while (g_switch == 0) {
        nready = epoll_wait(epollfd, events, MAX_EVENTS, -1);
        if (nready == -1) {
            if (errno == EINTR)
                continue;
            handle_err(1, "epoll_wait");
        }

        for (i = 0; i < nready; ++i) {
            if (events[i].data.fd == listenfd) {
                socklen = sizeof(peeraddr);
                sock = accept4(listenfd, (struct sockaddr *) &peeraddr, &socklen, SOCK_NONBLOCK);
                handle_err(sock == -1, "accept4");

                printf("new client %s:%hu accepted\n", inet_ntoa(peeraddr.sin_addr), ntohs(peeraddr.sin_port));
                sprintf(buf, "recv from (%s:%hu)\n", inet_ntoa(peeraddr.sin_addr), ntohs(peeraddr.sin_port));
                write_log(buf);

                event.data.fd = sock;
                event.events = EPOLLIN | EPOLLET;
                epoll_ctl(epollfd, EPOLL_CTL_ADD, sock, &event);
            } else {
                int *ptr = malloc(sizeof(int));
                *ptr = events[i].data.fd;
                threadpool_add(thp, do_task, (void *)(ptr), 0);
            }
        }
    }

    global_destroy();
    return 0;
}
Ejemplo n.º 4
0
void main_free() {

  extern struct stack stack[];
   
  unsigned long extent = (unsigned long)&stack[STACK_TOP];

    stack_destroy(extent); //destroy the stack
    lval_destroy();
    traverse_fun(); //display the functions
    fun_destroy();
    global_destroy();

    if(program) 
         free((void*)program);

    err(1,"Freed up the memory.Wrapping up Main:\n");

    }                 
Ejemplo n.º 5
0
/* call as  model = mexsvmlearn(data,labels,options) */
void mexFunction(int nlhs, mxArray *plhs[],
		  int nrhs, const mxArray *prhs[])
{
  int rows, cols,i,j,offset;
  double *kern,k;
  DOC **docs;
  SVECTOR *a, *b;
  MODEL *model;

  global_init();

  /* load model parameters from the "model" parameter */
  model = restore_model((mxArray *)prhs[1]);

  rows = mxGetM(prhs[0]);
  cols = mxGetN(prhs[0]);

  /* load the testing arrays into docs */
  mexToDOC((mxArray *)prhs[0], NULL, &docs, NULL, NULL, NULL);
  
  /* setup output environment */
  plhs[0] = mxCreateDoubleMatrix(rows,rows,mxREAL);
  kern = mxGetPr(plhs[0]);

  for (i = 0; i < rows; i++) {
    a = docs[i]->fvec;
    for (j = 0; j < rows; j++) {
      b = docs[j]->fvec;
      k = single_kernel(&(model->kernel_parm), a, b);

      offset = computeOffset(rows, rows, i, j);      
      kern[offset] = k;
    }
  }

  global_destroy();
}
Ejemplo n.º 6
0
/* call as  model = mexsvmlearn(data,labels,options) */
void mexFunction(int nlhs, mxArray *plhs[],
				 int nrhs, const mxArray *prhs[])
{
	char **argv;
	int argc;
	DOC **docs;  /* training examples */
	long totwords,totdoc,i;
	double *target;
	double *alpha_in=NULL;
	KERNEL_CACHE *kernel_cache;
	LEARN_PARM learn_parm;
	KERNEL_PARM kernel_parm;
	MODEL model;

	/* check for valid calling format */
	if ((nrhs != 3)  || (nlhs != 1))
		mexErrMsgTxt(ERR001);

	if (mxGetM(prhs[0]) != mxGetM(prhs[1]))
		mexErrMsgTxt(ERR002);

	if (mxGetN(prhs[1]) != 1)
		mexErrMsgTxt(ERR003);

	/* reset static variables -- as a .DLL, static things are sticky  */
	global_init( );

	/* convert the parameters (given in prhs[2]) into an argv/argc combination */
	argv = make_argv((mxArray *)prhs[2],&argc); /* send the options */



	/* this was originally supposed to be argc, argv, re-written for MATLAB ...
	its cheesy - but it workss :: convert the options array into an argc, 
	argv pair  and let svm_lite handle it from there. */

	read_input_parameters(argc,argv,docfile,modelfile,restartfile,&verbosity, 
		&learn_parm,&kernel_parm);

	extract_user_opts((mxArray *)prhs[2], &kernel_parm);

	totdoc = mxGetM(prhs[0]);
	totwords = mxGetN(prhs[0]);

	/* prhs[0] = samples (mxn) array
	prhs[1] = labels (mx1) array */
	mexToDOC((mxArray *)prhs[0], (mxArray *)prhs[1], &docs, &target, NULL, NULL);

	/* TODO modify to accept this array 
	if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc); */

	if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */
		kernel_cache=NULL;
	}
	else {
		/* Always get a new kernel cache. It is not possible to use the
		same cache for two different training runs */
		kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size);
	}


	if(learn_parm.type == CLASSIFICATION) {
		svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,
			&kernel_parm,kernel_cache,&model,alpha_in);

	}
	else if(learn_parm.type == REGRESSION) {
		svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,
			&kernel_parm,&kernel_cache,&model);
	}
	else if(learn_parm.type == RANKING) {
		svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm,
			&kernel_parm,&kernel_cache,&model);
	}
	else if(learn_parm.type == OPTIMIZATION) {
		svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm,
			&kernel_parm,kernel_cache,&model,alpha_in);
	}
	else {
		mexErrMsgTxt(ERR004);
	}

	if(kernel_cache) {
		/* Free the memory used for the cache. */
		kernel_cache_cleanup(kernel_cache);
	}

	/* **********************************
	* After the training/learning portion has finished,
	* copy the model back to the output arrays for MATLAB 
	* ********************************** */
	store_model(&model, plhs);

	free_kernel();
	global_destroy( );	
}