Tokenizing tests

This commit is contained in:
Greg Shuflin 2021-10-14 03:12:05 -07:00
parent fcbf2d959b
commit 075e323239
1 changed files with 7 additions and 1 deletions

View File

@ -217,7 +217,7 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix
}
},
Some(c) => buf.push(c),
None => return TokenKind::Error(format!("Unclosed string")),
None => return TokenKind::Error("Unclosed string".to_string()),
}
}
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
@ -329,6 +329,9 @@ mod schala_tokenizer_tests {
fn comments() {
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ */ 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), digit!("2")]);
let token_kinds: Vec<TokenKind> = tokenize("1 + /* hella /* bro */ 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
}
#[test]
@ -344,5 +347,8 @@ mod schala_tokenizer_tests {
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
let token_kinds: Vec<TokenKind> = tokenize(r#""Do \n \" escapes work\t""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]);
}
}