Fix newline tokenizing bug

Still need to fix <statements> parsing because of the final newline
This commit is contained in:
greg 2015-08-14 23:19:17 -07:00
parent caa331ecdc
commit bb349cda5f
1 changed files with 2 additions and 1 deletions

View File

@ -42,7 +42,8 @@ pub fn tokenize(input: &str) -> Vec<Token> {
}
while let Some(c) = iterator.next() {
if char::is_whitespace(c) {
if char::is_whitespace(c) && c != '\n' {
continue;
} else if c == '"' {