From c4ab1ed1056de4a658cf91730d51bf7d6841174f Mon Sep 17 00:00:00 2001 From: greg Date: Sat, 16 Jan 2016 03:12:06 -0800 Subject: [PATCH] Fix tokenizer tests --- src/tokenizer.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 9d5668b..84b98db 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -162,16 +162,16 @@ mod tests { #[test] fn tokeniziation_tests() { tokentest!("let a = 3\n", - "[Identifier(\"let\"), Identifier(\"a\"), Identifier(\"=\"), NumLiteral(3), Newline, EOF]"); + "[Keyword(Let), Identifier(\"a\"), Operator(Op { repr: \"=\" }), NumLiteral(3), Newline]"); tokentest!("2+1", - "[NumLiteral(2), Identifier(\"+\"), NumLiteral(1), EOF]"); + "[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]"); tokentest!("2 + 1", - "[NumLiteral(2), Identifier(\"+\"), NumLiteral(1), EOF]"); + "[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]"); tokentest!("2.3*49.2", - "[NumLiteral(2.3), Identifier(\"*\"), NumLiteral(49.2), EOF]"); + "[NumLiteral(2.3), Operator(Op { repr: \"*\" }), NumLiteral(49.2)]"); assert_eq!(tokenize("2.4.5"), None); }