refactor tokenizers away from one giant switch statement

This commit is contained in:
Sebastian Hugentobler 2025-02-07 09:21:23 +01:00
parent 3cf8ef02d1
commit a257beb170
14 changed files with 366 additions and 8 deletions

View file

@ -3,6 +3,7 @@ use lazy_static::lazy_static;
use std::collections::HashMap;
lazy_static! {
/// Mapping of reserved keywords to their respective TokenType.
pub static ref KEYWORDS: HashMap<std::string::String, TokenType> = {
let mut m = HashMap::new();
m.insert("and".into(), And);