Example #1
0
static void findProtobufTags (void)
{
	cppInit (false, false, false, false,
			 KIND_GHOST_INDEX, 0, KIND_GHOST_INDEX, 0, 0);
	token.value = vStringNew ();

	nextToken ();

	while (token.type != TOKEN_EOF)
	{
		if (tokenIsKeyword (KEYWORD_PACKAGE))
			parseStatement (PK_PACKAGE);
		else if (tokenIsKeyword (KEYWORD_MESSAGE))
			parseStatement (PK_MESSAGE);
		else if (tokenIsKeyword (KEYWORD_ENUM))
			parseStatement (PK_ENUM);
		else if (tokenIsKeyword (KEYWORD_REPEATED) || tokenIsKeyword (KEYWORD_OPTIONAL) || tokenIsKeyword (KEYWORD_REQUIRED))
			parseStatement (PK_FIELD);
		else if (tokenIsKeyword (KEYWORD_SERVICE))
			parseStatement (PK_SERVICE);
		else if (tokenIsKeyword (KEYWORD_RPC))
			parseStatement (PK_RPC);

		skipUntil (";{}");
		nextToken ();
	}

	vStringDelete (token.value);
	cppTerminate ();
}
Example #2
0
/**
 * DeclBasic = [ "const" ] <Ident> | StructUnion | Enum
 */
static ast* parserDeclBasic (parserCtx* ctx) {
    debugEnter("DeclBasic");

    ast* Node;

    tokenLocation constloc = ctx->location;
    bool isConst = tokenTryMatchKeyword(ctx, keywordConst);

    if (tokenIsKeyword(ctx, keywordStruct) || tokenIsKeyword(ctx, keywordUnion))
        Node = parserStructOrUnion(ctx);

    else if (tokenIsKeyword(ctx, keywordEnum))
        Node = parserEnum(ctx);

    else {
        tokenLocation loc = ctx->location;
        sym* Symbol = symFind(ctx->scope, ctx->lexer->buffer);

        if (Symbol) {
            Node = astCreateLiteralIdent(loc, tokenDupMatch(ctx));
            Node->symbol = Symbol;

        } else {
            if (tokenIsIdent(ctx)) {
                errorUndefType(ctx);
                tokenNext(ctx);

            } else
                errorExpected(ctx, "type name");

            Node = astCreateInvalid(loc);
        }
    }

    if (isConst) {
        Node = astCreateConst(constloc, Node);
        Node->symbol = Node->r->symbol;
    }

    debugLeave();

    return Node;
}
Example #3
0
/**
 * Storage = [ "auto" | "static" | "extern" | "typedef" ]
 */
static ast* parserStorage (parserCtx* ctx, symTag* tag) {
    debugEnter("Storage");

    ast* Node = 0;

    markerTag marker =   tokenIsKeyword(ctx, keywordAuto) ? markerAuto
                       : tokenIsKeyword(ctx, keywordStatic) ? markerStatic
                       : tokenIsKeyword(ctx, keywordExtern) ? markerExtern : markerUndefined;

    if (tokenTryMatchKeyword(ctx, keywordTypedef))
        *tag = symTypedef;

    else if (marker) {
        Node = astCreateMarker(ctx->location, marker);
        tokenMatch(ctx);
    }

    debugLeave();

    return Node;
}
Example #4
0
static void parseEnumConstants (void)
{
	if (token.type != '{')
		return;
	nextToken ();

	while (token.type != TOKEN_EOF && token.type != '}')
	{
		if (token.type == TOKEN_ID && !tokenIsKeyword (KEYWORD_OPTION))
		{
			nextToken ();  /* doesn't clear token.value if it's punctuation */
			if (token.type == '=')
				createProtobufTag (token.value, PK_ENUMERATOR);
		}

		skipUntil (";}");

		if (token.type == ';')
			nextToken ();
	}
}
Example #5
0
SkPdfToUnicode::SkPdfToUnicode(SkPdfNativeDoc* parsed, SkPdfStream* stream) {
    fCMapEncoding = NULL;
    fCMapEncodingFlag = NULL;

    if (stream) {
        // Since font will be cached, the font has to sit in the per doc allocator, not to be
        // freed after the page is done drawing.
        SkPdfNativeTokenizer tokenizer(stream, parsed->allocator(), parsed);
        PdfToken token;

        fCMapEncoding = new unsigned short[256 * 256];
        fCMapEncodingFlag = new unsigned char[256 * 256];
        for (int i = 0 ; i < 256 * 256; i++) {
            fCMapEncoding[i] = i;
            fCMapEncodingFlag[i] = 0;
        }

        // TODO(edisonn): deal with multibyte character, or longer strings.
        // Right now we deal with up 2 characters, e.g. <0020> but not longer like <00660066006C>
        //2 beginbfrange
        //<0000> <005E> <0020>
        //<005F> <0061> [<00660066> <00660069> <00660066006C>]

        while (tokenizer.readToken(&token)) {

            if (tokenIsKeyword(token, "begincodespacerange")) {
                while (tokenizer.readToken(&token) &&
                       !tokenIsKeyword(token, "endcodespacerange")) {
//                    tokenizer.PutBack(token);
//                    tokenizer.readToken(&token);
                    // TODO(edisonn): check token type! ignore/report errors.
                    int start = skstoi(token.fObject);
                    tokenizer.readToken(&token);
                    int end = skstoi(token.fObject);
                    for (int i = start; i <= end; i++) {
                        fCMapEncodingFlag[i] |= 1;
                    }
                }
            }

            if (tokenIsKeyword(token, "beginbfchar")) {
                while (tokenizer.readToken(&token) && !tokenIsKeyword(token, "endbfchar")) {
//                    tokenizer.PutBack(token);
//                    tokenizer.readToken(&token);
                    int from = skstoi(token.fObject);
                    tokenizer.readToken(&token);
                    int to = skstoi(token.fObject);

                    fCMapEncodingFlag[from] |= 2;
                    fCMapEncoding[from] = to;
                }
            }

            if (tokenIsKeyword(token, "beginbfrange")) {
                while (tokenizer.readToken(&token) && !tokenIsKeyword(token, "endbfrange")) {
//                    tokenizer.PutBack(token);
//                    tokenizer.readToken(&token);
                    int start = skstoi(token.fObject);
                    tokenizer.readToken(&token);
                    int end = skstoi(token.fObject);


                    tokenizer.readToken(&token); // [ or just an array directly?
                    // do not putback, we will reuse the read. See next commented read.
//                    tokenizer.PutBack(token);

                    // TODO(edisonn): read spec: any string or only hex string?
                    if (token.fType == kObject_TokenType && token.fObject->isAnyString()) {
//                        tokenizer.readToken(&token);
                        int value = skstoi(token.fObject);

                        for (int i = start; i <= end; i++) {
                            fCMapEncodingFlag[i] |= 2;
                            fCMapEncoding[i] = value;
                            value++;
                            // if i != end, verify last byte id not if, ignore/report error
                        }

                        // read one string
                    } else if (token.fType == kObject_TokenType && token.fObject->isArray()) {
//                        tokenizer.readToken(&token);
                        // read array
                        for (unsigned int i = 0; i < token.fObject->size(); i++) {
                            fCMapEncodingFlag[start + i] |= 2;
                            fCMapEncoding[start + i] = skstoi((*token.fObject)[i]);
                        }
                    } else {
                        tokenizer.PutBack(token);
                    }
                }
            }
        }
    }
}