From 3b480795fd82b5fd66b2b6263a2cac3335717202 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edvard=20Th=C3=B6rnros?= Date: Sat, 9 Jan 2021 16:03:41 +0100 Subject: Super simple VM --- src/tokenizer.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) (limited to 'src/tokenizer.rs') diff --git a/src/tokenizer.rs b/src/tokenizer.rs index f1f0658..efea700 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1,5 +1,5 @@ use std::fs; -use logos::Logos; +use logos::{Logos, Span}; #[derive(Logos, Debug, PartialEq)] pub enum Token { @@ -26,6 +26,10 @@ pub enum Token { #[token("loop")] Loop, + // TODO(ed): Remove + #[token("print")] + Print, + #[token("+")] Plus, #[token("++")] @@ -94,8 +98,8 @@ pub enum Token { Error, } -pub fn file_to_tokens(filename: &str) -> Vec { +pub fn file_to_tokens(filename: &str) -> Vec<(Token, Span)> { let content = fs::read_to_string(filename).unwrap(); let lexer = Token::lexer(&content); - lexer.collect() + lexer.spanned().collect() } -- cgit v1.2.1