int cmd_auto_complete(const char *const prompt, char *buf, int *np, int *colp)
{
	int n = *np, col = *colp;
	char *argv[CONFIG_SYS_MAXARGS + 1];		/* NULL terminated	*/
	char *cmdv[20];
	char *s, *t;
	const char *sep;
	int i, j, k, len, seplen, argc;
	int cnt;
	char last_char;

	if (strcmp(prompt, CONFIG_SYS_PROMPT) != 0)
		return 0;	/* not in normal console */

	cnt = strlen(buf);
	if (cnt >= 1)
		last_char = buf[cnt - 1];
	else
		last_char = '\0';

	/* copy to secondary buffer which will be affected */
	strcpy(tmp_buf, buf);

	/* separate into argv */
	argc = make_argv(tmp_buf, sizeof(argv)/sizeof(argv[0]), argv);

	/* do the completion and return the possible completions */
	i = complete_cmdv(argc, argv, last_char, sizeof(cmdv)/sizeof(cmdv[0]), cmdv);

	/* no match; bell and out */
	if (i == 0) {
		if (argc > 1)	/* allow tab for non command */
			return 0;
		putc('\a');
		return 1;
	}

	s = NULL;
	len = 0;
	sep = NULL;
	seplen = 0;
	if (i == 1) { /* one match; perfect */
		k = strlen(argv[argc - 1]);
		s = cmdv[0] + k;
		len = strlen(s);
		sep = " ";
		seplen = 1;
	} else if (i > 1 && (j = find_common_prefix(cmdv)) != 0) {	/* more */
		k = strlen(argv[argc - 1]);
		j -= k;
		if (j > 0) {
			s = cmdv[0] + k;
			len = j;
		}
	}

	if (s != NULL) {
		k = len + seplen;
		/* make sure it fits */
		if (n + k >= CONFIG_SYS_CBSIZE - 2) {
			putc('\a');
			return 1;
		}

		t = buf + cnt;
		for (i = 0; i < len; i++)
			*t++ = *s++;
		if (sep != NULL)
			for (i = 0; i < seplen; i++)
				*t++ = sep[i];
		*t = '\0';
		n += k;
		col += k;
		puts(t - k);
		if (sep == NULL)
			putc('\a');
		*np = n;
		*colp = col;
	} else {
		print_argv(NULL, "  ", " ", 78, cmdv);

		puts(prompt);
		puts(buf);
	}
	return 1;
}
Example #2
0
void build_embedfront() {
	char one_byte = '\0';
	char previous_term[MAX_WORD_LENGTH+1];
	long long i;
	dict_embedfront_t *one_header;
	char *node_buffer = NULL, *postings_buffer = NULL, *term_buffer = NULL;
	char *node_ptr = NULL, *postings_ptr = NULL, *term_ptr = NULL;
	long long node_max_size;
	long long needed_size, used_size;

	if (block_size <= 0) {
		fprintf(stderr, "ERROR: block size cannot be zero or less\n");
		exit(2);
	}

	//
	// (1) find the maximum size for the node and create temporary buffers
	//
	node_max_size = hd_sector_size * num_of_sectors;
	printf("    node_max_size: %lld\n", node_max_size);

	postings_buffer = (char *)malloc(node_max_size);
	memset(postings_buffer, 0, node_max_size);
	term_buffer = (char *)malloc(node_max_size);

	printf("building embedfront......\n");

	//
	// (2) create lists for headers and nodes
	//
	headers_list = new Linked_list<dict_embedfront_t *>();
	nodes_list = new Linked_list<char *> ();

	//
	// (3) read term_count
	//
	vocab_file = fopen(VOCAB_FILENAME, "rb");
	dict_file = fopen(DICT_EMBEDFRONT_FILENAME, "wb");
	fread(&term_count, sizeof(term_count), 1, vocab_file);
	dbg_printf("term_count: %lld\n", term_count);

	//
	// (4) reall all terms, build up the headers and nodes
	//
	i = 0;
	char filled_previously = TRUE;
	char new_node = TRUE;
	char node_is_full = FALSE, the_end = FALSE;
	node_count = 0;
	long long total_wasted = 0;
	long long terms_start_at = 0;
	term[0] = previous_term[0] = '\0';
	while (true) {
		//
		// (4-1) make sure the previous term is filled in properly before read the next term from file
		//
		if (filled_previously) {
			strcpy(previous_term, term);
			fread(term, MAX_WORD_LENGTH+1, 1, vocab_file);
			i++;
			if (i == term_count) {
				the_end = TRUE;
			}
		}

		//
		// (4-2) create a new header and the new associated node
		//
		if (new_node) {
			//one_header = (dict_embed_t *) malloc(sizeof(*one_header));
			one_header = new dict_embedfront_t;
			strncpy(one_header->term, term, strlen(term) + 1);
			headers_list->append(one_header);
			node_count++;

			//node_buffer =  (char *)malloc(sizeof(*node_buffer)*max_node_size*512);
			node_buffer = new char[node_max_size];
			nodes_list ->append(node_buffer);
			node_ptr = node_buffer;
			postings_ptr = postings_buffer;
			term_ptr = term_buffer;
			node_length = 0;

			new_node = FALSE;
			node_is_full = FALSE;
			previous_term[0] = '\0';
		}

		//
		// (4-3) fill as many terms as possible in the node
		//
		// only need to store the prefix_count for the first term in the node
		if (previous_term[0] == '\0') {
			suffix_count = strlen(term);
			needed_size = (long long)POSTING_PTR_SIZE + sizeof(suffix_count) + (sizeof(char) * suffix_count);
		} else {
			find_common_prefix(previous_term, term, &common_prefix_count);
			suffix_count = strlen(term) - common_prefix_count;
			needed_size = (long long)POSTING_PTR_SIZE + sizeof(common_prefix_count) + sizeof(suffix_count) + (sizeof(char) * suffix_count);
		}
		// the node length is stored at the last, so need to consider there is enough space left for it
		needed_size += sizeof(node_length_t);
		// find out how much space is already used in the node
		used_size = (postings_ptr - postings_buffer) + (term_ptr - term_buffer);
		if ((used_size + needed_size) < node_max_size) {
			// fill the postings buffer
			postings_ptr += POSTING_PTR_SIZE;

			// fill the term buffer
			if (previous_term[0] == '\0') {
				*term_ptr = suffix_count;
				term_ptr++;
				memcpy(term_ptr, term, suffix_count);
				term_ptr += suffix_count;
			} else {
				*term_ptr = common_prefix_count;
				term_ptr++;
				*term_ptr = suffix_count;
				term_ptr++;
				memcpy(term_ptr, &term[common_prefix_count], suffix_count);
				term_ptr += suffix_count;
			}

			node_length++;
			filled_previously = TRUE;
		} else {
			node_is_full = TRUE;
			filled_previously = FALSE;
			// the current block is full, need to create a new one
			new_node = TRUE;
		}

		//
		// (4-4) the current node is full or no more term to be processed
		//
		// if node_is_full and the_end happen at the same time, then the last block will not be handled.
		// need to take one more iteration to create a new node and full the buffer and then exit.
		if (node_is_full || the_end) {

			// store the postings in the node
			memcpy(node_ptr, postings_buffer, postings_ptr - postings_buffer);
			node_ptr += postings_ptr - postings_buffer;

			// store the terms in the node
			terms_start_at = node_ptr - node_buffer;
			memcpy(node_ptr, term_buffer, term_ptr - term_buffer);
			node_ptr += term_ptr - term_buffer;

			// find out how much space wasted
			total_wasted = node_max_size - (node_ptr - node_buffer) - sizeof (node_length_t);

			// store node length at the very end of the node
			node_ptr = node_buffer + node_max_size - sizeof(node_length_t);
			memcpy(node_ptr, &node_length, sizeof(node_length_t));
		}

#if 0
		if (node_is_full || the_end) {
			char term[MAX_WORD_LENGTH+1];
			char *node_ptr = node_buffer, *term_ptr;
			node_length_t node_length_02;
			long long i, j;
			uint8_t size;
			node_length_02 = *(node_length_t *) (node_buffer + node_max_size - sizeof(node_length_t));
			printf("node_legnth: %d, node_legnth_02: %d\n", node_length, node_length_02);
			term_ptr = node_buffer + node_length_02 * POSTING_PTR_SIZE;
			for (i = 0; i < node_length_02; i++) {
				if (i == 0) {
					suffix_count = *(uint8_t *)term_ptr;
					term_ptr++;
					memcpy(term, term_ptr, suffix_count);
					term[suffix_count] = '\0';
					term_ptr += suffix_count;
				} else {
					common_prefix_count = *(uint8_t *)term_ptr;
					term_ptr++;
					suffix_count = *(uint8_t *)term_ptr;
					term_ptr++;
					memcpy(&term[common_prefix_count], term_ptr, suffix_count);
					term[common_prefix_count+suffix_count] = '\0';
					term_ptr += suffix_count;
				}
				printf("term: %s\n", term);
			}
		}
#endif

		if ((the_end) && (filled_previously)) {
			break;
		}

	} //end of while (true)

	//
	// (5) write node_count and headers to disk
	//
	dbg_printf("node_count: %u\n", node_count);
	dict_embedfront_t *h = NULL;
	fwrite(&node_count, sizeof(node_count), 1, dict_file);
	for (h = headers_list->first(); h != NULL; h = headers_list->next()) {
		fwrite(h->term, strlen(h->term)+1, 1, dict_file);
		fwrite(&(h->node_ptr), sizeof(h->node_ptr), 1, dict_file);
	}

	//
	// (6) fill in the gap to make sure nodes starting at the beginning of a sector
	//
	//long long padding = hd_sector_size - ftell(dict_file) % hd_sector_size;
	long long padding = node_max_size - ftell(dict_file) % node_max_size;
	total_wasted += padding;
	for (i = 0; i < padding; i++) {
		fwrite(&one_byte, 1, 1, dict_file);
	}

	//
	// (7) write nodes to disk and update node pointer in headers
	//
	char *n = NULL;
	for (n = nodes_list->first(), h = headers_list->first(); n != NULL; n = nodes_list->next(), h = headers_list->next()) {
		h->node_ptr = ftell(dict_file);
		fwrite(n, node_max_size, 1, dict_file);
	}

	//
	// (8) re-write node_count and headers to disk
	//
	fseek(dict_file, 0, SEEK_SET);
	fwrite(&node_count, sizeof(node_count), 1, dict_file);
	for (h = headers_list->first(); h != NULL; h = headers_list->next()) {
		fwrite(h->term, strlen(h->term) + 1, 1, dict_file);
		fwrite(&(h->node_ptr), sizeof(h->node_ptr), 1, dict_file);
	}

	//
	// (9) finished
	//
	printf("     total_wasted: %lld bytes\n", total_wasted);
	fseek(dict_file, 0, SEEK_END);
	printf("  total file size: %ld\n", ftell(dict_file));
	fclose(dict_file);
	free(postings_buffer);
	free(term_buffer);
	delete headers_list;
	delete nodes_list;
	printf("FINISHED\n\n");
}