Exemple #1
0
static void tokenizer_test(const char *in, const char *expected, int split)
{
    char filename[1024];
    int fd;
    ssize_t len = strlen(expected);
    size_t inlen = strlen(in);

    if(split) {
        cli_js_process_buffer(state, in, inlen/2);
        cli_js_process_buffer(state, in + inlen/2, inlen - inlen/2);
    } else {
        cli_js_process_buffer(state, in, inlen);
    }

    cli_js_parse_done(state);
    cli_js_output(state, tmpdir);
    snprintf(filename, 1023, "%s/javascript", tmpdir);

    fd = open(filename, O_RDONLY);
    if(fd < 0) {
        jstest_teardown();
        fail_fmt("failed to open output file: %s", filename);
    }

    diff_file_mem(fd, expected, len);
}
Exemple #2
0
END_TEST

START_TEST (test_init_parse_destroy)
{
    const char buf[] = "function (p) { return \"anonymous\";}";
    struct parser_state *state = cli_js_init();
    fail_unless(!!state, "cli_js_init()");
    cli_js_process_buffer(state, buf, strlen(buf));
    cli_js_process_buffer(state, buf, strlen(buf));
    cli_js_parse_done(state);
    cli_js_destroy(state);
}
Exemple #3
0
void cli_js_parse_done(struct parser_state* state)
{
	struct tokens * tokens = &state->tokens;
	size_t par_balance = 0, i;
	char end = '\0';
	YYSTYPE val;

	cli_dbgmsg(MODULE "in cli_js_parse_done()\n");
	/* close unfinished token */
	switch (state->scanner->state) {
		case DoubleQString:
			end = '"';
			break;
		case SingleQString:
			end = '\'';
			break;
		default: /* make gcc happy */
			break;
	}
	if (end != '\0')
		cli_js_process_buffer(state, &end, 1);
	/* close remaining parenthesis */
	for (i=0;i<tokens->cnt;i++) {
		if (tokens->data[i].type == TOK_PAR_OPEN)
			par_balance++;
		else if (tokens->data[i].type == TOK_PAR_CLOSE && par_balance > 0)
			par_balance--;
	}
	if (par_balance > 0) {
		memset(&val, 0, sizeof(val));
		val.type = TOK_PAR_CLOSE;
		TOKEN_SET(&val, cstring, ")");
		while (par_balance-- > 0) {
			add_token(state, &val);
		}
	}

	/* we had to close unfinished strings, parenthesis,
	 * so that the folders/decoders can run properly */
	run_folders(&state->tokens);
	run_decoders(state);

	yylex_destroy(state->scanner);
	state->scanner = NULL;
}
Exemple #4
0
static void run_decoders(struct parser_state *state)
{
  size_t i;
  const char* name;
  struct tokens *tokens = &state->tokens;

  for(i = 0; i < tokens->cnt; i++) {
	  const char *cstring = TOKEN_GET(&tokens->data[i], cstring);
	  struct decode_result res;
	  res.pos_begin = res.pos_end = 0;
	  res.append = 0;
	  if(tokens->data[i].type == TOK_FUNCTION && i+13 < tokens->cnt) {
		  name = NULL;
		  ++i;
		  if(tokens->data[i].type == TOK_IDENTIFIER_NAME) {
			  cstring = TOKEN_GET(&tokens->data[i], cstring);
			  name = cstring;
			  ++i;
		  }
		  if(match_parameters(&tokens->data[i], de_packer_3, sizeof(de_packer_3)/sizeof(de_packer_3[0])) != -1
		     || match_parameters(&tokens->data[i], de_packer_2, sizeof(de_packer_2)/sizeof(de_packer_2[0])) != -1)  {
			  /* find function decl. end */
			  handle_de(tokens->data, i, tokens->cnt, name, &res);
		  }
	  } else if(i+2 < tokens->cnt && tokens->data[i].type == TOK_IDENTIFIER_NAME &&
		    cstring &&
		    !strcmp("dF", cstring) && tokens->data[i+1].type == TOK_PAR_OPEN) {
		  /* TODO: also match signature of dF function (possibly
		   * declared using unescape */

		  handle_df(tokens->data, i+2, &res);
	  } else if(i+2 < tokens->cnt && tokens->data[i].type == TOK_IDENTIFIER_NAME &&
			  cstring &&
			  !strcmp("eval", cstring) && tokens->data[i+1].type == TOK_PAR_OPEN) {
		  handle_eval(tokens, i+2, &res);
	  }
	if(res.pos_end > res.pos_begin) {
		struct tokens parent_tokens;
		if(res.pos_end < tokens->cnt && tokens->data[res.pos_end].type == TOK_SEMICOLON)
			res.pos_end++;
		parent_tokens = state->tokens;/* save current tokens */
		/* initialize embedded context */
		memset(&state->tokens, 0, sizeof(state->tokens));
		if(++state->rec > 16)
			cli_dbgmsg(MODULE "recursion limit reached\n");
		else {
			cli_js_process_buffer(state, res.txtbuf.data, res.txtbuf.pos);
			--state->rec;
		}
		free(res.txtbuf.data);
		/* state->tokens still refers to the embedded/nested context
		 * here */
		if(!res.append) {
			replace_token_range(&parent_tokens, res.pos_begin, res.pos_end, &state->tokens);
		} else {
			/* delete tokens */
			replace_token_range(&parent_tokens, res.pos_begin, res.pos_end, NULL);
			append_tokens(&parent_tokens, &state->tokens);
		}
		/* end of embedded context, restore tokens state */
		free(state->tokens.data);
		state->tokens = parent_tokens;
	}
	  state_update_scope(state, &state->tokens.data[i]);
  }
}