From 19b1c0aafc465f35042c0de39c2b8a843b76142d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edvard=20Th=C3=B6rnros?= Date: Fri, 5 Feb 2021 19:49:51 +0100 Subject: don't eat newlines in comments --- src/tokenizer.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/tokenizer.rs') diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 28172a3..778c5a0 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -123,7 +123,7 @@ pub enum Token { #[token("\n")] Newline, - #[regex(r"//[^\n]*\n", logos::skip)] + #[regex(r"//[^\n]*", logos::skip)] Comment, #[regex(r"[ \t\r]", logos::skip)] -- cgit v1.2.1 From 5a6f41f02897eb0591470670789d22902b3f8742 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edvard=20Th=C3=B6rnros?= Date: Fri, 5 Feb 2021 19:51:41 +0100 Subject: update comment tests --- src/tokenizer.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/tokenizer.rs') diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 778c5a0..9574af1 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -229,8 +229,8 @@ mod tests { #[test] fn comment() { - lex_once("// a\n1"); - assert_eq!(lex("1// a\n2").len(), 2); - assert_eq!(lex("1\n// a\n2").len(), 3); // newline is also a token + assert_eq!(lex("// a\n1").len(), 2); + assert_eq!(lex("1// a\n2").len(), 3); + assert_eq!(lex("1\n// a\n2").len(), 4); // newline is also a token } } -- cgit v1.2.1