diff options
| author | Edvard Thörnros <edvard.thornros@gmail.com> | 2021-01-09 17:30:09 +0100 |
|---|---|---|
| committer | Edvard Thörnros <edvard.thornros@gmail.com> | 2021-01-09 17:30:09 +0100 |
| commit | efaab433309170e8330a7722e90c26a93dbec252 (patch) | |
| tree | 86794243d8351f5761cfac6ddb9cce67a2ee6359 /src/tokenizer.rs | |
| parent | 158c2c284fa61be972dd868e2f262f9e04a0edb4 (diff) | |
| download | sylt-efaab433309170e8330a7722e90c26a93dbec252.tar.gz | |
Start on compiler
Diffstat (limited to 'src/tokenizer.rs')
| -rw-r--r-- | src/tokenizer.rs | 8 |
1 files changed, 6 insertions, 2 deletions
diff --git a/src/tokenizer.rs b/src/tokenizer.rs index efea700..cb2d7b8 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1,7 +1,7 @@ use std::fs; use logos::{Logos, Span}; -#[derive(Logos, Debug, PartialEq)] +#[derive(Logos, Debug, PartialEq, Clone)] pub enum Token { #[regex(r"[[:alpha:]][[:alnum:]]*", |lex| lex.slice().to_string())] Identifier(String), @@ -94,11 +94,15 @@ pub enum Token { #[regex(r"[ \t\r]", logos::skip)] Whitespace, + EOF, + #[error] Error, } -pub fn file_to_tokens(filename: &str) -> Vec<(Token, Span)> { +pub type PlacedToken = (Token, Span); +pub type TokenStream = Vec<PlacedToken>; +pub fn file_to_tokens(filename: &str) -> TokenStream { let content = fs::read_to_string(filename).unwrap(); let lexer = Token::lexer(&content); lexer.spanned().collect() |
