refactor tokenizers away from one giant switch statement

This commit is contained in:
Sebastian Hugentobler 2025-02-07 09:21:23 +01:00
parent 3cf8ef02d1
commit a257beb170
14 changed files with 366 additions and 8 deletions

View file

@ -1,3 +1,6 @@
//! Interpret the Lox language. Either compile (interpret for now though) some source code or run a
//! REPL.
use std::{
fs::{self},
io::{self, Write},
@ -22,6 +25,7 @@ pub mod tokenizer {
pub mod whitespace;
}
/// Read the source code in a file and scan it to tokens.
pub fn compile(source: &Path) -> Result<(), io::Error> {
let input = fs::read_to_string(source)?;
let _tokens = scanner::tokenize(&input);
@ -29,6 +33,7 @@ pub fn compile(source: &Path) -> Result<(), io::Error> {
Ok(())
}
/// Run a Lox REPL until SIGINT.
pub fn repl() {
loop {
print!("> ");