/* add a library */ void LibraryAdd(Picoc *pc, struct Table *GlobalTable, const char *LibraryName, struct LibraryFunction *FuncList) { struct ParseState Parser; int Count; char *Identifier; struct ValueType *ReturnType; struct Value *NewValue; void *Tokens; char *IntrinsicName = TableStrRegister(pc, "c library"); /* read all the library definitions */ for (Count = 0; FuncList[Count].Prototype != NULL; Count++) { Tokens = LexAnalyse(pc, IntrinsicName, FuncList[Count].Prototype, strlen((char *)FuncList[Count].Prototype), NULL); LexInitParser(&Parser, pc, FuncList[Count].Prototype, Tokens, IntrinsicName, TRUE, FALSE); TypeParse(&Parser, &ReturnType, &Identifier, NULL); NewValue = ParseFunctionDefinition(&Parser, ReturnType, Identifier); NewValue->Val->FuncDef.Intrinsic = FuncList[Count].Func; HeapFreeMem(pc, Tokens); } }
/* quick scan a source file for definitions */ void PicocParse(Picoc *pc, const char *FileName, const char *Source, int SourceLen, int RunIt, int CleanupNow, int CleanupSource, int EnableDebugger) { struct ParseState Parser; enum ParseResult Ok; struct CleanupTokenNode *NewCleanupNode; char *RegFileName = TableStrRegister(pc, FileName); void *Tokens = LexAnalyse(pc, RegFileName, Source, SourceLen, NULL); /* allocate a cleanup node so we can clean up the tokens later */ if (!CleanupNow) { NewCleanupNode = (struct CleanupTokenNode *) HeapCallocMem(pc, sizeof(struct CleanupTokenNode)); if (NewCleanupNode == NULL) ProgramFailNoParser(pc, "out of memory"); NewCleanupNode->Tokens = Tokens; if (CleanupSource) NewCleanupNode->SourceText = Source; else NewCleanupNode->SourceText = NULL; NewCleanupNode->Next = pc->CleanupTokenList; pc->CleanupTokenList = NewCleanupNode; } /* do the parsing */ LexInitParser(&Parser, pc, Source, Tokens, RegFileName, RunIt, EnableDebugger); do { Ok = ParseStatement(&Parser, TRUE); } while (Ok == ParseResultOk); if (Ok == ParseResultError) ProgramFail(&Parser, "parse error"); /* clean up */ if (CleanupNow) HeapFreeMem(pc, Tokens); }
/* quick scan a source file for definitions */ void PicocParse(const char *FileName, const char *Source, int SourceLen, int RunIt, int CleanupNow, int CleanupSource) { // printf("in PicocParse function after IncludeFile %s\n",FileName); struct ParseState Parser; enum ParseResult Ok; struct CleanupTokenNode *NewCleanupNode; void *Tokens = LexAnalyse(FileName, Source, SourceLen, NULL); /* allocate a cleanup node so we can clean up the tokens later */ if (!CleanupNow) { NewCleanupNode = HeapAllocMem(sizeof(struct CleanupTokenNode)); if (NewCleanupNode == NULL) ProgramFail(NULL, "out of memory"); NewCleanupNode->Tokens = Tokens; if (CleanupSource) NewCleanupNode->SourceText = Source; else NewCleanupNode->SourceText = NULL; NewCleanupNode->Next = CleanupTokenList; CleanupTokenList = NewCleanupNode; } /* do the parsing */ LexInitParser(&Parser, Source, Tokens, FileName, RunIt); do { Ok = ParseStatement(&Parser, TRUE); } while (Ok == ParseResultOk); if (Ok == ParseResultError) ProgramFail(&Parser, "parse error"); /* clean up */ if (CleanupNow) HeapFreeMem(Tokens); }
/* get the next token, without pre-processing */ enum LexToken LexGetRawToken(struct ParseState *Parser, struct Value **Value, int IncPos) { enum LexToken Token = TokenNone; int ValueSize; char *Prompt = NULL; do { /* get the next token */ if (Parser->Pos == NULL && InteractiveHead != NULL) Parser->Pos = InteractiveHead->Tokens; if (Parser->FileName != StrEmpty || InteractiveHead != NULL) { /* skip leading newlines */ while ((Token = (enum LexToken)*(unsigned char *)Parser->Pos) == TokenEndOfLine) { Parser->Line++; Parser->Pos += TOKEN_DATA_OFFSET; } } if (Parser->FileName == StrEmpty && (InteractiveHead == NULL || Token == TokenEOF)) { /* we're at the end of an interactive input token list */ char LineBuffer[LINEBUFFER_MAX]; void *LineTokens; int LineBytes; struct TokenLine *LineNode; if (InteractiveHead == NULL || (unsigned char *)Parser->Pos == &InteractiveTail->Tokens[InteractiveTail->NumBytes-TOKEN_DATA_OFFSET]) { /* get interactive input */ if (LexUseStatementPrompt) { Prompt = INTERACTIVE_PROMPT_STATEMENT; LexUseStatementPrompt = FALSE; } else Prompt = INTERACTIVE_PROMPT_LINE; if (PlatformGetLine(&LineBuffer[0], LINEBUFFER_MAX, Prompt) == NULL) return TokenEOF; /* put the new line at the end of the linked list of interactive lines */ LineTokens = LexAnalyse(StrEmpty, &LineBuffer[0], strlen(LineBuffer), &LineBytes); LineNode = VariableAlloc(Parser, sizeof(struct TokenLine), TRUE); LineNode->Tokens = LineTokens; LineNode->NumBytes = LineBytes; if (InteractiveHead == NULL) { /* start a new list */ InteractiveHead = LineNode; Parser->Line = 1; Parser->CharacterPos = 0; } else InteractiveTail->Next = LineNode; InteractiveTail = LineNode; InteractiveCurrentLine = LineNode; Parser->Pos = LineTokens; } else { /* go to the next token line */ if (Parser->Pos != &InteractiveCurrentLine->Tokens[InteractiveCurrentLine->NumBytes-TOKEN_DATA_OFFSET]) { /* scan for the line */ for (InteractiveCurrentLine = InteractiveHead; Parser->Pos != &InteractiveCurrentLine->Tokens[InteractiveCurrentLine->NumBytes-TOKEN_DATA_OFFSET]; InteractiveCurrentLine = InteractiveCurrentLine->Next) { assert(InteractiveCurrentLine->Next != NULL); } } assert(InteractiveCurrentLine != NULL); InteractiveCurrentLine = InteractiveCurrentLine->Next; assert(InteractiveCurrentLine != NULL); Parser->Pos = InteractiveCurrentLine->Tokens; } Token = (enum LexToken)*(unsigned char *)Parser->Pos; } } while ((Parser->FileName == StrEmpty && Token == TokenEOF) || Token == TokenEndOfLine); Parser->CharacterPos = *((unsigned char *)Parser->Pos + 1); ValueSize = LexTokenSize(Token); if (ValueSize > 0) { /* this token requires a value - unpack it */ if (Value != NULL) { switch (Token) { case TokenStringConstant: LexValue.Typ = CharPtrType; break; case TokenIdentifier: LexValue.Typ = NULL; break; case TokenIntegerConstant: LexValue.Typ = &IntType; break; case TokenCharacterConstant: LexValue.Typ = &CharType; break; #ifndef NO_FP case TokenFPConstant: LexValue.Typ = &FPType; break; #endif default: break; } memcpy((void *)LexValue.Val, (void *)((char *)Parser->Pos + TOKEN_DATA_OFFSET), ValueSize); LexValue.ValOnHeap = FALSE; LexValue.ValOnStack = FALSE; LexValue.IsLValue = FALSE; LexValue.LValueFrom = NULL; *Value = &LexValue; } if (IncPos) Parser->Pos += ValueSize + TOKEN_DATA_OFFSET; } else { if (IncPos && Token != TokenEOF) Parser->Pos += TOKEN_DATA_OFFSET; } #ifdef DEBUG_LEXER printf("Got token=%02x inc=%d pos=%d\n", Token, IncPos, Parser->CharacterPos); #endif assert(Token >= TokenNone && Token <= TokenEndOfFunction); return Token; }