/* register a new build-in include file */ void IncludeRegister(Picoc *pc, const char *IncludeName, void (*SetupFunction)(Picoc *pc), struct LibraryFunction *FuncList, const char *SetupCSource){ struct IncludeLibrary *NewLib = HeapAllocMem(pc, sizeof(struct IncludeLibrary)); NewLib->IncludeName = TableStrRegister(pc, IncludeName); NewLib->SetupFunction = SetupFunction; NewLib->FuncList = FuncList; NewLib->SetupCSource = SetupCSource; NewLib->NextLib = pc->IncludeLibList; pc->IncludeLibList = NewLib; }
/* set an identifier and return the identifier. share if possible */ char *TableSetIdentifier(Picoc *pc, struct Table *Tbl, const char *Ident, int IdentLen){ int AddAt; struct TableEntry *FoundEntry = TableSearchIdentifier(Tbl, Ident, IdentLen, &AddAt); if (FoundEntry != NULL) return &FoundEntry->p.Key[0]; else{ /* add it to the table - we economise by not allocating the whole structure here */ struct TableEntry *NewEntry = HeapAllocMem(pc, sizeof(struct TableEntry) - sizeof(union TableEntryPayload) + IdentLen + 1); if (NewEntry == NULL) ProgramFailNoParser(pc, "out of memory"); strncpy((char *)&NewEntry->p.Key[0], (char *)Ident, IdentLen); NewEntry->p.Key[IdentLen] = '\0'; NewEntry->Next = Tbl->HashTable[AddAt]; Tbl->HashTable[AddAt] = NewEntry; return &NewEntry->p.Key[0]; } }
/* allocate some memory, either on the heap or the stack and check if we've run out */ void *VariableAlloc(struct ParseState *Parser, int Size, int OnHeap) { void *NewValue; if (OnHeap) NewValue = HeapAllocMem(Size); else NewValue = HeapAllocStack(Size); if (NewValue == NULL) ProgramFail(Parser, "out of memory"); #ifdef DEBUG_HEAP if (!OnHeap) printf("pushing %d at 0x%lx\n", Size, (unsigned long)NewValue); #endif return NewValue; }
/* set a breakpoint in the table */ void DebugSetBreakpoint(struct ParseState *Parser) { int AddAt; struct TableEntry *FoundEntry = DebugTableSearchBreakpoint(Parser, &AddAt); if (FoundEntry == NULL) { /* add it to the table */ struct TableEntry *NewEntry = HeapAllocMem(sizeof(struct TableEntry)); if (NewEntry == NULL) ProgramFail(NULL, "out of memory"); NewEntry->p.b.FileName = Parser->FileName; NewEntry->p.b.Line = Parser->Line; NewEntry->p.b.CharacterPos = Parser->CharacterPos; NewEntry->Next = BreakpointHashTable[AddAt]; BreakpointHashTable[AddAt] = NewEntry; BreakpointCount++; } }
/* quick scan a source file for definitions */ void PicocParse(Picoc *pc, const char *FileName, const char *Source, int SourceLen, int RunIt, int CleanupNow, int CleanupSource, int EnableDebugger) { struct ParseState Parser; enum ParseResult Ok; struct CleanupTokenNode *NewCleanupNode; char *RegFileName = TableStrRegister(pc, FileName); void *Tokens = LexAnalyse(pc, RegFileName, Source, SourceLen, NULL); /* allocate a cleanup node so we can clean up the tokens later */ if (!CleanupNow) { NewCleanupNode = HeapAllocMem(pc, sizeof(struct CleanupTokenNode)); if (NewCleanupNode == NULL) ProgramFailNoParser(pc, "out of memory"); NewCleanupNode->Tokens = Tokens; if (CleanupSource) NewCleanupNode->SourceText = Source; else NewCleanupNode->SourceText = NULL; NewCleanupNode->Next = pc->CleanupTokenList; pc->CleanupTokenList = NewCleanupNode; } /* do the parsing */ LexInitParser(&Parser, pc, Source, Tokens, RegFileName, RunIt, EnableDebugger); do { Ok = ParseStatement(&Parser, TRUE); } while (Ok == ParseResultOk); if (Ok == ParseResultError) ProgramFail(&Parser, "parse error"); /* clean up */ if (CleanupNow) HeapFreeMem(pc, Tokens); }
/* quick scan a source file for definitions */ void PicocParse(const char *FileName, const char *Source, int SourceLen, int RunIt, int CleanupNow, int CleanupSource) { // printf("in PicocParse function after IncludeFile %s\n",FileName); struct ParseState Parser; enum ParseResult Ok; struct CleanupTokenNode *NewCleanupNode; void *Tokens = LexAnalyse(FileName, Source, SourceLen, NULL); /* allocate a cleanup node so we can clean up the tokens later */ if (!CleanupNow) { NewCleanupNode = HeapAllocMem(sizeof(struct CleanupTokenNode)); if (NewCleanupNode == NULL) ProgramFail(NULL, "out of memory"); NewCleanupNode->Tokens = Tokens; if (CleanupSource) NewCleanupNode->SourceText = Source; else NewCleanupNode->SourceText = NULL; NewCleanupNode->Next = CleanupTokenList; CleanupTokenList = NewCleanupNode; } /* do the parsing */ LexInitParser(&Parser, Source, Tokens, FileName, RunIt); do { Ok = ParseStatement(&Parser, TRUE); } while (Ok == ParseResultOk); if (Ok == ParseResultError) ProgramFail(&Parser, "parse error"); /* clean up */ if (CleanupNow) HeapFreeMem(Tokens); }
/* produce tokens from the lexer and return a heap buffer with the result - used for scanning */ void *LexTokenise(struct LexState *Lexer, int *TokenLen) { enum LexToken Token; void *HeapMem; struct Value *GotValue; int MemUsed = 0; int ValueSize; int ReserveSpace = (Lexer->End - Lexer->Pos) * 4 + 16; void *TokenSpace = HeapAllocStack(ReserveSpace); char *TokenPos = (char *)TokenSpace; int LastCharacterPos = 0; if (TokenSpace == NULL) LexFail(Lexer, "out of memory"); do { /* store the token at the end of the stack area */ Token = LexScanGetToken(Lexer, &GotValue); #ifdef DEBUG_LEXER printf("Token: %02x\n", Token); #endif *(unsigned char *)TokenPos = Token; TokenPos++; MemUsed++; *(unsigned char *)TokenPos = (unsigned char)LastCharacterPos; TokenPos++; MemUsed++; ValueSize = LexTokenSize(Token); if (ValueSize > 0) { /* store a value as well */ memcpy((void *)TokenPos, (void *)GotValue->Val, ValueSize); TokenPos += ValueSize; MemUsed += ValueSize; } LastCharacterPos = Lexer->CharacterPos; } while (Token != TokenEOF); HeapMem = HeapAllocMem(MemUsed); if (HeapMem == NULL) LexFail(Lexer, "out of memory"); assert(ReserveSpace >= MemUsed); memcpy(HeapMem, TokenSpace, MemUsed); HeapPopStack(TokenSpace, ReserveSpace); #ifdef DEBUG_LEXER { int Count; printf("Tokens: "); for (Count = 0; Count < MemUsed; Count++) printf("%02x ", *((unsigned char *)HeapMem+Count)); printf("\n"); } #endif if (TokenLen) *TokenLen = MemUsed; return HeapMem; }