diff --git a/intrinsic.c b/intrinsic.c index 74aa4ff..d83471c 100644 --- a/intrinsic.c +++ b/intrinsic.c @@ -35,10 +35,13 @@ void IntrinsicInit(struct Table *GlobalTable) const char *Identifier; struct ValueType *ReturnType; struct Value *NewValue; + void *Tokens; + const char *IntrinsicName = StrRegister("intrinsic"); for (Count = 0; Count < sizeof(Intrinsics) / sizeof(struct IntrinsicFunction); Count++) { - LexInit(&Parser, Intrinsics[Count].Prototype, strlen(Intrinsics[Count].Prototype), StrEmpty, Count+1); + Tokens = LexAnalyse(IntrinsicName, Intrinsics[Count].Prototype, strlen(Intrinsics[Count].Prototype)); + LexInitParser(&Parser, Tokens, IntrinsicName, Count+1); TypeParse(&Parser, &ReturnType, &Identifier); NewValue = ParseFunctionDefinition(&Parser, ReturnType, Identifier, TRUE); NewValue->Val->FuncDef.Intrinsic = Intrinsics[Count].Func; diff --git a/lex.c b/lex.c index f61d4dc..dedd7db 100644 --- a/lex.c +++ b/lex.c @@ -74,14 +74,6 @@ void LexInit() ReservedWords[Count].SharedWord = StrRegister(ReservedWords[Count].Word); } -/* prepare to parse a pre-tokenised buffer */ -void LexInitParser(struct ParseState *Parser, void *TokenSource, int TokenSourceLen, const char *FileName, int Line) -{ - Parser->Pos = TokenSource; - Parser->Line = Line; - Parser->FileName = FileName; -} - /* exit with a message */ void LexFail(struct LexState *Lexer, const char *Message, ...) { @@ -282,7 +274,7 @@ enum LexToken LexScanGetToken(struct LexState *Lexer, struct Value **Value) } /* produce tokens from the lexer and return a heap buffer with the result - used for scanning */ -void *LexTokeniseToHeap(struct LexState *Lexer) +void *LexTokenise(struct LexState *Lexer) { enum LexToken Token; void *HeapMem; @@ -318,10 +310,30 @@ void *LexTokeniseToHeap(struct LexState *Lexer) LexFail(Lexer, "out of memory while lexing"); HeapMem = HeapAlloc(MemUsed); - memcpy(HeapMem, HeapStackGetFreeSpace(&MemAvailable), MemUsed); + memcpy(HeapMem, HeapStackGetFreeSpace(&MemAvailable), MemUsed); return HeapMem; } +/* lexically analyse some source text */ +void *LexAnalyse(const char *FileName, const char *Source, int SourceLen) +{ + struct LexState Lexer; + + Lexer.Pos = Source; + Lexer.End = Source + SourceLen; + Lexer.Line = 1; + Lexer.FileName = FileName; + return LexTokenise(&Lexer); +} + +/* prepare to parse a pre-tokenised buffer */ +void LexInitParser(struct ParseState *Parser, void *TokenSource, const char *FileName, int Line) +{ + Parser->Pos = TokenSource; + Parser->Line = Line; + Parser->FileName = FileName; +} + /* get the next token given a parser state */ enum LexToken LexGetToken(struct ParseState *Parser, struct Value **Value, int IncPos) { diff --git a/parse.c b/parse.c index 23e73ab..f5a18eb 100644 --- a/parse.c +++ b/parse.c @@ -578,8 +578,9 @@ void Parse(const char *FileName, const char *Source, int SourceLen, int RunIt) { struct ParseState Parser; - LexInit(&Parser, Source, SourceLen, FileName, 1); - + void *Tokens = LexAnalyse(FileName, Source, SourceLen); // XXX - some better way of storing tokenised input? + LexInitParser(&Parser, Tokens, FileName, 1); + while (ParseStatement(&Parser, RunIt)) {} diff --git a/picoc.h b/picoc.h index fd4bc77..21115c0 100644 --- a/picoc.h +++ b/picoc.h @@ -214,10 +214,10 @@ int TableGet(struct Table *Tbl, const char *Key, struct Value **Val); const char *TableSetIdentifier(struct Table *Tbl, const char *Ident, int IdentLen); /* lex.c */ -void LexInit(); -void LexInitParser(struct ParseState *Parser, void *TokenSource, int TokenSourceLen, const char *FileName, int Line); +void LexInit(void); +void *LexAnalyse(const char *FileName, const char *Source, int SourceLen); +void LexInitParser(struct ParseState *Parser, void *TokenSource, const char *FileName, int Line); enum LexToken LexGetToken(struct ParseState *Parser, struct Value **Value, int IncPos); -void LexToEndOfLine(struct ParseState *Parser); /* parse.c */ void ParseInit(void);