void mexFunction(int nlhs, mxArray * plhs[], int nrhs, const mxArray * prhs[]){ // check number of arguments if( nrhs!=2 ) mexErrMsgTxt("This function requires 2 arguments\n"); if( !mxIsNumeric(prhs[0]) ) mexErrMsgTxt("varargin{0} must be a valid kdtree pointer\n"); if( !mxIsNumeric(prhs[1]) ) mexErrMsgTxt("varargin{1} must be a query set of points\n"); // retrieve the tree pointer KDTree* tree; retrieve_tree( prhs[0], tree ); // retrieve the query data double* query_data; int npoints, ndims; retrieve_data( prhs[1], query_data, npoints, ndims ); // printf("query size: %dx%d\n", npoints, ndims); // check dimensions if( ndims != tree->ndims() ) mexErrMsgTxt("vararg{1} must be a [Nxk] matrix of N points in k dimensions\n"); // npoints x 1 indexes in output plhs[0] = mxCreateDoubleMatrix(npoints, 1, mxREAL); double* indexes = mxGetPr(plhs[0]); // cout << "nindexes: " << mxGetM(plhs[0]) << "x" << mxGetN(plhs[0]) << endl; // execute the query FOR EVERY point storing the index vector< double > query(ndims,0); for(int i=0; i<npoints; i++) for( int j=0; j<ndims; j++ ){ query[j] = query_data[ i+j*npoints ]; indexes[i] = tree->closest_point(query)+1; } }
int rename_file(char* dest, char* file, char * new_name, int client_id){ string local_old_path, server_old_path, local_new_path, server_new_path; string position, file_folder; string old_repo_path = calloc(1, MAX_PATH_LENGTH); string filename, newname, file_path, new_path; char old_file[MAX_PATH_LENGTH]; char old_new_name[MAX_PATH_LENGTH]; int version; struct dirent entry; struct dirent * result; DIR * old; local_old_path = append_to_path(dest, file); local_new_path = append_to_path(dest, new_name); server_old_path = append_to_path(REPOSITORY_PATH, file); server_new_path = append_to_path(REPOSITORY_PATH, new_name); if (!get_node_from_path(repository_tree, file)) { client_send("File not found in server", client_id); client_send(END_OF_TRANSMISSION, client_id); return NON_EXISTING_FILE; } run_command(COMMAND_MV, local_old_path, local_new_path); run_command(COMMAND_MV, server_old_path, server_new_path); file_folder = remove_last_appended(file); old_repo_path = append_to_path(OLD_REPO_PATH, file_folder); if (!(old = opendir(old_repo_path))) { client_send("File renamed", client_id); client_send(END_OF_TRANSMISSION, client_id); return SUCCESS; } filename = get_last_path(file); newname = get_last_path(new_name); do { readdir_r(old, &entry, &result); if ( strncmp(entry.d_name, ".", 1) && strncmp(entry.d_name, "..", 2)){ if (!strncmp(filename, entry.d_name, strlen(filename))) { position = strchr(entry.d_name, '-'); if (position) { sscanf(position+1, "%d", &version); sprintf(old_new_name, "%s-%d", newname, version); file_path = append_to_path(old_repo_path, entry.d_name); new_path = append_to_path(old_repo_path, old_new_name); run_command(COMMAND_MV, file_path, new_path); } } } } while ( result != NULL ); repository_tree = (fstree_t)new_fstree(); retrieve_tree(REPOSITORY_PATH, repository_tree->root); client_send("File renamed.", client_id); client_send(END_OF_TRANSMISSION, client_id); }
uerr_t retrieve_from_file (const char *file, bool html, int *count) { uerr_t status; struct urlpos *url_list, *cur_url; url_list = (html ? get_urls_html (file, NULL, NULL) : get_urls_file (file)); status = RETROK; /* Suppose everything is OK. */ *count = 0; /* Reset the URL count. */ for (cur_url = url_list; cur_url; cur_url = cur_url->next, ++*count) { char *filename = NULL, *new_file = NULL; int dt; if (cur_url->ignore_when_downloading) continue; if (opt.quota && total_downloaded_bytes > opt.quota) { status = QUOTEXC; break; } if ((opt.recursive || opt.page_requisites) && (cur_url->url->scheme != SCHEME_FTP || getproxy (cur_url->url))) { int old_follow_ftp = opt.follow_ftp; /* Turn opt.follow_ftp on in case of recursive FTP retrieval */ if (cur_url->url->scheme == SCHEME_FTP) opt.follow_ftp = 1; status = retrieve_tree (cur_url->url->url); opt.follow_ftp = old_follow_ftp; } else status = retrieve_url (cur_url->url->url, &filename, &new_file, NULL, &dt, opt.recursive); if (filename && opt.delete_after && file_exists_p (filename)) { DEBUGP (("\ Removing file due to --delete-after in retrieve_from_file():\n")); logprintf (LOG_VERBOSE, _("Removing %s.\n"), filename); if (unlink (filename)) logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno)); dt &= ~RETROKF; } xfree_null (new_file); xfree_null (filename); }
void mexFunction(int nlhs, mxArray * plhs[], int nrhs, const mxArray * prhs[]){ // check number of arguments if( nrhs!=2 ) mexErrMsgTxt("This function requires 2 arguments\n"); if( !mxIsNumeric(prhs[0]) ) mexErrMsgTxt("varargin{0} must be a valid kdtree pointer\n"); if( !mxIsNumeric(prhs[1]) ) mexErrMsgTxt("varargin{1} must be a query set of points\n"); // retrieve the tree pointer KDTree* tree; retrieve_tree( prhs[0], tree ); // retrieve the query data vector<double> query_data(tree->ndims(), 0); retrieve_point( prhs[1], query_data ); // printf("query size: %dx%d\n", npoints, ndims); // npoints x 1 indexes in output plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL); double* kk = mxGetPr(plhs[0]); // Compute kernel density estimate: kk[0] = tree->kde(query_data); }
uerr_t retrieve_from_file (const char *file, bool html, int *count) { uerr_t status; struct urlpos *url_list, *cur_url; struct iri *iri = iri_new(); char *input_file, *url_file = NULL; const char *url = file; status = RETROK; /* Suppose everything is OK. */ *count = 0; /* Reset the URL count. */ /* sXXXav : Assume filename and links in the file are in the locale */ set_uri_encoding (iri, opt.locale, true); set_content_encoding (iri, opt.locale); if (url_valid_scheme (url)) { int dt,url_err; uerr_t status; struct url *url_parsed = url_parse (url, &url_err, iri, true); if (!url_parsed) { char *error = url_error (url, url_err); logprintf (LOG_NOTQUIET, "%s: %s.\n", url, error); xfree (error); return URLERROR; } if (!opt.base_href) opt.base_href = xstrdup (url); status = retrieve_url (url_parsed, url, &url_file, NULL, NULL, &dt, false, iri, true); url_free (url_parsed); if (!url_file || (status != RETROK)) return status; if (dt & TEXTHTML) html = true; /* If we have a found a content encoding, use it. * ( == is okay, because we're checking for identical object) */ if (iri->content_encoding != opt.locale) set_uri_encoding (iri, iri->content_encoding, false); /* Reset UTF-8 encode status */ iri->utf8_encode = opt.enable_iri; xfree_null (iri->orig_url); iri->orig_url = NULL; input_file = url_file; } else input_file = (char *) file; url_list = (html ? get_urls_html (input_file, NULL, NULL, iri) : get_urls_file (input_file)); xfree_null (url_file); for (cur_url = url_list; cur_url; cur_url = cur_url->next, ++*count) { char *filename = NULL, *new_file = NULL; int dt; struct iri *tmpiri = iri_dup (iri); struct url *parsed_url = NULL; if (cur_url->ignore_when_downloading) continue; if (opt.quota && total_downloaded_bytes > opt.quota) { status = QUOTEXC; break; } parsed_url = url_parse (cur_url->url->url, NULL, tmpiri, true); if ((opt.recursive || opt.page_requisites) && (cur_url->url->scheme != SCHEME_FTP || getproxy (cur_url->url))) { int old_follow_ftp = opt.follow_ftp; /* Turn opt.follow_ftp on in case of recursive FTP retrieval */ if (cur_url->url->scheme == SCHEME_FTP) opt.follow_ftp = 1; status = retrieve_tree (parsed_url ? parsed_url : cur_url->url, tmpiri); opt.follow_ftp = old_follow_ftp; } else status = retrieve_url (parsed_url ? parsed_url : cur_url->url, cur_url->url->url, &filename, &new_file, NULL, &dt, opt.recursive, tmpiri, true); if (parsed_url) url_free (parsed_url); if (filename && opt.delete_after && file_exists_p (filename)) { DEBUGP (("\ Removing file due to --delete-after in retrieve_from_file():\n")); logprintf (LOG_VERBOSE, _("Removing %s.\n"), filename); if (unlink (filename)) logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno)); dt &= ~RETROKF; } xfree_null (new_file); xfree_null (filename); iri_free (tmpiri); }