From 711968ffb31ee06630ed43740fd2538becc2765b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustav=20S=C3=B6rn=C3=A4s?= Date: Mon, 11 Jan 2021 16:03:58 +0100 Subject: compile from strings --- src/main.rs | 39 +++++++++++++++++++++++++++------------ src/tokenizer.rs | 7 +++++-- 2 files changed, 32 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/main.rs b/src/main.rs index 26b0d02..a2b5dae 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,6 +6,7 @@ mod vm; mod compiler; use error::Error; +use tokenizer::TokenStream; fn main() { let file = file_from_args().unwrap_or_else(|| Path::new("tests/simple.tdy").to_owned()); @@ -22,7 +23,14 @@ fn file_from_args() -> Option { } fn run_file(path: &Path) -> Result<(), Vec> { - let tokens = tokenizer::file_to_tokens(path); + run(tokenizer::file_to_tokens(path), path) +} + +fn run_string(s: &str) -> Result<(), Vec> { + run(tokenizer::string_to_tokens(s), Path::new("builtin")) +} + +fn run(tokens: TokenStream, path: &Path) -> Result<(), Vec> { match compiler::compile("main", path, tokens) { Ok(block) => vm::run_block(block).or_else(|e| Err(vec![e])), Err(errors) => Err(errors), @@ -31,22 +39,29 @@ fn run_file(path: &Path) -> Result<(), Vec> { #[cfg(test)] mod tests { - use super::run_file; + use super::{run_file, run_string}; use crate::error::{Error, ErrorKind}; use std::path::Path; + macro_rules! assert_errs { + ($result:expr, [ $( $kind:pat ),* ]) => { + println!("{} => {:?}", stringify!($result), $result); + assert!(matches!( + $result.unwrap_err().as_slice(), + &[$(Error { + kind: $kind, + file: _, + line: _, + message: _, + }, + )*] + )) + }; + } + #[test] fn unreachable_token() { - let file = Path::new("tests/unreachable.tdy"); - assert!(matches!( - run_file(&file).unwrap_err().as_slice(), - &[Error { - kind: ErrorKind::Unreachable, - file: _, - line: _, - message: _, - }] - )); + assert_errs!(run_string("\n"), [ErrorKind::Unreachable]); } macro_rules! test_file { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 1da9835..e52410c 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -125,8 +125,7 @@ pub enum Token { pub type PlacedToken = (Token, usize); pub type TokenStream = Vec; -pub fn file_to_tokens(file: &Path) -> TokenStream { - let content = fs::read_to_string(file).unwrap(); +pub fn string_to_tokens(content: &str) -> TokenStream { let lexer = Token::lexer(&content); let mut placed_tokens = lexer.spanned().peekable(); @@ -152,6 +151,10 @@ pub fn file_to_tokens(file: &Path) -> TokenStream { lined_tokens } +pub fn file_to_tokens(file: &Path) -> TokenStream { + string_to_tokens(&fs::read_to_string(file).unwrap()) +} + #[cfg(test)] mod tests { use super::Token; -- cgit v1.2.1