feat(tokenizer): recognize meta-commands

This commit is contained in:
Khaïs COLIN 2025-05-04 12:06:47 +02:00
parent 825511a515
commit cbc4a4755c
5 changed files with 221 additions and 62 deletions

View file

@ -1,3 +1,5 @@
use crate::meta_commands::MetaCommand;
#[derive(Debug, Eq, PartialEq)]
pub enum TokenData {
/// INSERT
@ -8,6 +10,7 @@ pub enum TokenData {
Integer(i64),
/// Hello World!
String(String),
MetaCommand(MetaCommand),
/// No file O.O?
EndOfFile,
}
@ -61,6 +64,7 @@ pub enum ScanErrorKind {
UnexpectedChar(char),
UnexpectedEndOfInput,
UnknownKeyword(String),
UnknownMetaCommand(String),
}
#[derive(Debug, Eq, PartialEq)]
@ -105,6 +109,44 @@ impl Tokenizer {
}
}
fn recognize_metacommand(word: &str) -> Option<TokenData> {
match word.to_lowercase().as_str() {
".exit" => Some(TokenData::MetaCommand(MetaCommand::Exit)),
_ => None,
}
}
fn scan_meta_command(&mut self) -> Result<Token, ScanError> {
let start_offset = self.offset;
let mut word = String::new();
let mut length = 0;
if let Some(c) = self.advance() {
word.push(c);
length += 1;
}
while let Some(c) = self.peek() {
if c.is_alphabetic() || c == '_' {
word.push(c);
self.advance();
} else {
break;
}
length += 1;
}
if let Some(meta) = Self::recognize_metacommand(&word) {
Ok(Token {
location: Location::new(self.file.clone(), start_offset, length),
data: meta,
lexeme: word,
})
} else {
Err(ScanError {
location: Location::new(self.file.clone(), start_offset, length),
kind: ScanErrorKind::UnknownMetaCommand(word),
})
}
}
fn scan_identifier_or_keyword(&mut self) -> Result<Token, ScanError> {
let start_offset = self.offset;
let mut word = String::new();
@ -149,6 +191,8 @@ impl Tokenizer {
if let Some(c) = self.peek() {
if Self::ident_or_keyword_start(c) {
return self.scan_identifier_or_keyword();
} else if c == '.' {
return self.scan_meta_command();
} else if c.is_whitespace() {
self.advance();
} else {
@ -193,66 +237,82 @@ pub fn tokenize(input: String, file: String) -> Result<Vec<Token>, Vec<ScanError
}
}
#[test]
fn test_tokenizer() {
let mut scanresult =
tokenize("INSERT Select".to_string(), "src/statement.sql".to_string()).unwrap();
scanresult.reverse();
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 0, 6),
data: TokenData::Insert,
lexeme: String::from("INSERT"),
})
);
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 7, 6),
data: TokenData::Select,
lexeme: String::from("Select"),
})
);
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 13, 0),
data: TokenData::EndOfFile,
lexeme: String::from(""),
})
);
assert_eq!(scanresult.pop(), None);
assert!(scanresult.is_empty());
}
#[cfg(test)]
mod tests {
use super::*;
use insta::assert_debug_snapshot;
#[test]
fn test_tokenizer_errors() {
let mut scanerrors = tokenize("salact +".to_string(), "src/statement.sql".to_string())
.err()
.unwrap();
scanerrors.reverse();
assert_eq!(
scanerrors.pop(),
Some(ScanError {
location: Location {
file: "src/statement.sql".to_string(),
offset: 0,
length: 6,
},
kind: ScanErrorKind::UnknownKeyword("salact".to_string()),
})
);
assert_eq!(
scanerrors.pop(),
Some(ScanError {
location: Location {
file: "src/statement.sql".to_string(),
offset: 8,
length: 1,
},
kind: ScanErrorKind::UnexpectedChar('+'),
})
);
assert!(scanerrors.is_empty());
#[test]
fn test_tokenize_meta_command() {
assert_debug_snapshot!(tokenize(".exit".to_string(), "<stdin>".to_string()));
}
#[test]
fn test_tokenize_unknown_meta_command() {
assert_debug_snapshot!(tokenize(".halp".to_string(), "<stdin>".to_string()));
}
#[test]
fn test_tokenizer() {
let mut scanresult =
tokenize("INSERT Select".to_string(), "src/statement.sql".to_string()).unwrap();
scanresult.reverse();
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 0, 6),
data: TokenData::Insert,
lexeme: String::from("INSERT"),
})
);
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 7, 6),
data: TokenData::Select,
lexeme: String::from("Select"),
})
);
assert_eq!(
scanresult.pop(),
Some(Token {
location: Location::new(String::from("src/statement.sql"), 13, 0),
data: TokenData::EndOfFile,
lexeme: String::from(""),
})
);
assert_eq!(scanresult.pop(), None);
assert!(scanresult.is_empty());
}
#[test]
fn test_tokenizer_errors() {
let mut scanerrors = tokenize("salact +".to_string(), "src/statement.sql".to_string())
.err()
.unwrap();
scanerrors.reverse();
assert_eq!(
scanerrors.pop(),
Some(ScanError {
location: Location {
file: "src/statement.sql".to_string(),
offset: 0,
length: 6,
},
kind: ScanErrorKind::UnknownKeyword("salact".to_string()),
})
);
assert_eq!(
scanerrors.pop(),
Some(ScanError {
location: Location {
file: "src/statement.sql".to_string(),
offset: 8,
length: 1,
},
kind: ScanErrorKind::UnexpectedChar('+'),
})
);
assert!(scanerrors.is_empty());
}
}