diff options
| author | Gustav Sörnäs <gustav@sornas.net> | 2021-01-09 21:25:39 +0100 |
|---|---|---|
| committer | Gustav Sörnäs <gustav@sornas.net> | 2021-01-09 21:25:39 +0100 |
| commit | 3ede943556bebb85427415f21ec231973da9e080 (patch) | |
| tree | 4750e3123303799d08407776f3ed820801f8a067 /src | |
| parent | 3971c2a09ab6fedc6bdcf5b7597fbf3d0bafeb0d (diff) | |
| download | sylt-3ede943556bebb85427415f21ec231973da9e080.tar.gz | |
wip line numbers and errors
Diffstat (limited to 'src')
| -rw-r--r-- | src/compiler.rs | 8 | ||||
| -rw-r--r-- | src/tokenizer.rs | 6 | ||||
| -rw-r--r-- | src/vm.rs | 40 |
3 files changed, 44 insertions, 10 deletions
diff --git a/src/compiler.rs b/src/compiler.rs index 9ab997c..c34063d 100644 --- a/src/compiler.rs +++ b/src/compiler.rs @@ -183,8 +183,8 @@ impl Compiler { } } - pub fn compile(&mut self, name: &str) -> Block { - let mut block = Block::new(name); + pub fn compile(&mut self, name: &str, filename: &str) -> Block { + let mut block = Block::new(name, filename); loop { if self.peek() == Token::EOF { @@ -204,6 +204,6 @@ impl Compiler { } } -pub fn compile(name: &str, tokens: TokenStream) -> Block { - Compiler::new(tokens).compile(name) +pub fn compile(name: &str, filename: &str, tokens: TokenStream) -> Block { + Compiler::new(tokens).compile(name, filename) } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index a53015d..acebcca 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -119,6 +119,12 @@ pub type TokenStream = Vec<PlacedToken>; pub fn file_to_tokens(filename: &str) -> TokenStream { let content = fs::read_to_string(filename).unwrap(); let lexer = Token::lexer(&content); + let mut line = 1; + for (c_idx, c) in content.chars().enumerate() { + if c == '\n' { + line += 1; + } + } lexer.spanned().collect() } @@ -1,3 +1,7 @@ +use std::collections::HashMap; + +use crate::tokenizer::PlacedToken; + #[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] pub enum Value { @@ -34,27 +38,38 @@ pub enum Op { #[derive(Debug)] pub struct Block { name: String, + filename: String, ops: Vec<Op>, + last_line_offset: Option<usize>, + line_offsets: HashMap<usize, usize>, } impl Block { - pub fn new(name: &str) -> Self { + pub fn new(name: &str, filename: &str) -> Self { Self { name: String::from(name), + filename: String::from(filename), ops: Vec::new(), + last_line_offset: None, + line_offsets: HashMap::new(), } } - pub fn add(&mut self, op: Op) -> usize { + pub fn add(&mut self, op: Op, token_position: Option<usize>) -> usize { let len = self.ops.len(); + if token_position != self.last_line_offset { + if let Some(token_position) = token_position { + self.line_offsets.insert(len, token_position); + } + } self.ops.push(op); len } - pub fn add_from(&mut self, ops: &[Op]) -> usize { + pub fn add_from(&mut self, ops: &[Op], token_position: Option<usize>) -> usize { let len = self.ops.len(); for op in ops { - self.add(*op); + self.add(*op, token_position); } len } @@ -68,6 +83,19 @@ pub struct VM { ip: usize, } +#[derive(Debug)] +pub enum VMErrorKind { + TypeError(Value, Value), + AssertFailed(Value, Value), +} + +#[derive(Debug)] +pub struct VMError { + kind: VMErrorKind, + token: PlacedToken, + message: String, +} + pub fn run_block(block: Block) { let mut vm = VM { stack: Vec::new(), @@ -89,7 +117,7 @@ impl VM { self.stack.get(self.stack.len() - amount) } - pub fn run(&mut self) { + pub fn run(&mut self) -> Result<(), VMError>{ const PRINT_WHILE_RUNNING: bool = true; const PRINT_BLOCK: bool = true; @@ -211,7 +239,7 @@ impl VM { } Op::Return => { - return; + return Ok(()); } } self.ip += 1; |
