aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/compiler.rs8
-rw-r--r--src/tokenizer.rs6
-rw-r--r--src/vm.rs40
3 files changed, 44 insertions, 10 deletions
diff --git a/src/compiler.rs b/src/compiler.rs
index 9ab997c..c34063d 100644
--- a/src/compiler.rs
+++ b/src/compiler.rs
@@ -183,8 +183,8 @@ impl Compiler {
}
}
- pub fn compile(&mut self, name: &str) -> Block {
- let mut block = Block::new(name);
+ pub fn compile(&mut self, name: &str, filename: &str) -> Block {
+ let mut block = Block::new(name, filename);
loop {
if self.peek() == Token::EOF {
@@ -204,6 +204,6 @@ impl Compiler {
}
}
-pub fn compile(name: &str, tokens: TokenStream) -> Block {
- Compiler::new(tokens).compile(name)
+pub fn compile(name: &str, filename: &str, tokens: TokenStream) -> Block {
+ Compiler::new(tokens).compile(name, filename)
}
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index a53015d..acebcca 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -119,6 +119,12 @@ pub type TokenStream = Vec<PlacedToken>;
pub fn file_to_tokens(filename: &str) -> TokenStream {
let content = fs::read_to_string(filename).unwrap();
let lexer = Token::lexer(&content);
+ let mut line = 1;
+ for (c_idx, c) in content.chars().enumerate() {
+ if c == '\n' {
+ line += 1;
+ }
+ }
lexer.spanned().collect()
}
diff --git a/src/vm.rs b/src/vm.rs
index 633b920..d45e4d6 100644
--- a/src/vm.rs
+++ b/src/vm.rs
@@ -1,3 +1,7 @@
+use std::collections::HashMap;
+
+use crate::tokenizer::PlacedToken;
+
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub enum Value {
@@ -34,27 +38,38 @@ pub enum Op {
#[derive(Debug)]
pub struct Block {
name: String,
+ filename: String,
ops: Vec<Op>,
+ last_line_offset: Option<usize>,
+ line_offsets: HashMap<usize, usize>,
}
impl Block {
- pub fn new(name: &str) -> Self {
+ pub fn new(name: &str, filename: &str) -> Self {
Self {
name: String::from(name),
+ filename: String::from(filename),
ops: Vec::new(),
+ last_line_offset: None,
+ line_offsets: HashMap::new(),
}
}
- pub fn add(&mut self, op: Op) -> usize {
+ pub fn add(&mut self, op: Op, token_position: Option<usize>) -> usize {
let len = self.ops.len();
+ if token_position != self.last_line_offset {
+ if let Some(token_position) = token_position {
+ self.line_offsets.insert(len, token_position);
+ }
+ }
self.ops.push(op);
len
}
- pub fn add_from(&mut self, ops: &[Op]) -> usize {
+ pub fn add_from(&mut self, ops: &[Op], token_position: Option<usize>) -> usize {
let len = self.ops.len();
for op in ops {
- self.add(*op);
+ self.add(*op, token_position);
}
len
}
@@ -68,6 +83,19 @@ pub struct VM {
ip: usize,
}
+#[derive(Debug)]
+pub enum VMErrorKind {
+ TypeError(Value, Value),
+ AssertFailed(Value, Value),
+}
+
+#[derive(Debug)]
+pub struct VMError {
+ kind: VMErrorKind,
+ token: PlacedToken,
+ message: String,
+}
+
pub fn run_block(block: Block) {
let mut vm = VM {
stack: Vec::new(),
@@ -89,7 +117,7 @@ impl VM {
self.stack.get(self.stack.len() - amount)
}
- pub fn run(&mut self) {
+ pub fn run(&mut self) -> Result<(), VMError>{
const PRINT_WHILE_RUNNING: bool = true;
const PRINT_BLOCK: bool = true;
@@ -211,7 +239,7 @@ impl VM {
}
Op::Return => {
- return;
+ return Ok(());
}
}
self.ip += 1;