diff options
| -rw-r--r-- | src/lib.rs | 6 | ||||
| -rw-r--r-- | src/tokenizer.rs | 8 |
2 files changed, 10 insertions, 4 deletions
@@ -1000,4 +1000,10 @@ a.a <=> 1 a.a -= 1 a.a <=> 0" ); + + test_multiple!( + newline_regression, + simple: "a := 1 // blargh \na += 1 // blargh \n a <=> 2 // HARGH", + expressions: "1 + 1 // blargh \n 2 // blargh \n // HARGH \n", + ); } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 28172a3..9574af1 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -123,7 +123,7 @@ pub enum Token { #[token("\n")] Newline, - #[regex(r"//[^\n]*\n", logos::skip)] + #[regex(r"//[^\n]*", logos::skip)] Comment, #[regex(r"[ \t\r]", logos::skip)] @@ -229,8 +229,8 @@ mod tests { #[test] fn comment() { - lex_once("// a\n1"); - assert_eq!(lex("1// a\n2").len(), 2); - assert_eq!(lex("1\n// a\n2").len(), 3); // newline is also a token + assert_eq!(lex("// a\n1").len(), 2); + assert_eq!(lex("1// a\n2").len(), 3); + assert_eq!(lex("1\n// a\n2").len(), 4); // newline is also a token } } |
