aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGustav Sörnäs <gustav@sornas.net>2021-01-11 16:03:58 +0100
committerGustav Sörnäs <gustav@sornas.net>2021-01-11 16:05:10 +0100
commit711968ffb31ee06630ed43740fd2538becc2765b (patch)
tree7d4277bb060ce8e3f919cfb205d99a39f3aa99fc
parentd7d18a80bb70238dec107f6724938964d157a287 (diff)
downloadsylt-711968ffb31ee06630ed43740fd2538becc2765b.tar.gz
compile from strings
-rw-r--r--src/main.rs39
-rw-r--r--src/tokenizer.rs7
2 files changed, 32 insertions, 14 deletions
diff --git a/src/main.rs b/src/main.rs
index 26b0d02..a2b5dae 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -6,6 +6,7 @@ mod vm;
mod compiler;
use error::Error;
+use tokenizer::TokenStream;
fn main() {
let file = file_from_args().unwrap_or_else(|| Path::new("tests/simple.tdy").to_owned());
@@ -22,7 +23,14 @@ fn file_from_args() -> Option<PathBuf> {
}
fn run_file(path: &Path) -> Result<(), Vec<Error>> {
- let tokens = tokenizer::file_to_tokens(path);
+ run(tokenizer::file_to_tokens(path), path)
+}
+
+fn run_string(s: &str) -> Result<(), Vec<Error>> {
+ run(tokenizer::string_to_tokens(s), Path::new("builtin"))
+}
+
+fn run(tokens: TokenStream, path: &Path) -> Result<(), Vec<Error>> {
match compiler::compile("main", path, tokens) {
Ok(block) => vm::run_block(block).or_else(|e| Err(vec![e])),
Err(errors) => Err(errors),
@@ -31,22 +39,29 @@ fn run_file(path: &Path) -> Result<(), Vec<Error>> {
#[cfg(test)]
mod tests {
- use super::run_file;
+ use super::{run_file, run_string};
use crate::error::{Error, ErrorKind};
use std::path::Path;
+ macro_rules! assert_errs {
+ ($result:expr, [ $( $kind:pat ),* ]) => {
+ println!("{} => {:?}", stringify!($result), $result);
+ assert!(matches!(
+ $result.unwrap_err().as_slice(),
+ &[$(Error {
+ kind: $kind,
+ file: _,
+ line: _,
+ message: _,
+ },
+ )*]
+ ))
+ };
+ }
+
#[test]
fn unreachable_token() {
- let file = Path::new("tests/unreachable.tdy");
- assert!(matches!(
- run_file(&file).unwrap_err().as_slice(),
- &[Error {
- kind: ErrorKind::Unreachable,
- file: _,
- line: _,
- message: _,
- }]
- ));
+ assert_errs!(run_string("<!>\n"), [ErrorKind::Unreachable]);
}
macro_rules! test_file {
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index 1da9835..e52410c 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -125,8 +125,7 @@ pub enum Token {
pub type PlacedToken = (Token, usize);
pub type TokenStream = Vec<PlacedToken>;
-pub fn file_to_tokens(file: &Path) -> TokenStream {
- let content = fs::read_to_string(file).unwrap();
+pub fn string_to_tokens(content: &str) -> TokenStream {
let lexer = Token::lexer(&content);
let mut placed_tokens = lexer.spanned().peekable();
@@ -152,6 +151,10 @@ pub fn file_to_tokens(file: &Path) -> TokenStream {
lined_tokens
}
+pub fn file_to_tokens(file: &Path) -> TokenStream {
+ string_to_tokens(&fs::read_to_string(file).unwrap())
+}
+
#[cfg(test)]
mod tests {
use super::Token;