feat(parser): use token-based parser

This commit is contained in:
Khaïs COLIN 2025-05-04 14:22:19 +02:00
parent d568653a17
commit a0869b1b66
3 changed files with 37 additions and 39 deletions

View file

@ -206,13 +206,13 @@ impl Tokenizer {
c.is_alphanumeric() || c == '_'
}
fn scan_token(&mut self) -> Result<Token, ScanError> {
fn scan_token(&mut self) -> Result<Option<Token>, ScanError> {
loop {
if let Some(c) = self.peek() {
if Self::ident_or_keyword_start(c) {
return self.scan_identifier_or_keyword();
return self.scan_identifier_or_keyword().map(Some);
} else if c == '.' {
return self.scan_meta_command();
return self.scan_meta_command().map(Some);
} else if c.is_whitespace() {
self.advance();
} else {
@ -223,10 +223,7 @@ impl Tokenizer {
});
}
} else {
return Err(ScanError {
location: self.current_location(0),
kind: ScanErrorKind::UnexpectedEndOfInput,
});
return Ok(None);
}
}
}
@ -244,8 +241,10 @@ pub fn tokenize(input: String, file: String) -> Result<Vec<Token>, Vec<ScanError
let mut tokenizer = Tokenizer::new(input, file);
let mut errors = Vec::new();
while !tokenizer.is_at_end() {
match tokenizer.scan_token() {
Ok(token) => tokenizer.tokens.push(token),
let token = tokenizer.scan_token();
match token {
Ok(Some(token)) => tokenizer.tokens.push(token),
Ok(None) => break,
Err(err) => errors.push(err),
}
}