Fix tokenization bug

This commit is contained in:
greg 2016-01-16 10:40:17 -08:00
parent c4ab1ed105
commit 032d01c9f5
1 changed files with 1 additions and 1 deletions

View File

@ -119,7 +119,7 @@ pub fn tokenize(input: &str) -> Option<Vec<Token>> {
let mut buffer = String::with_capacity(20);
buffer.push(c);
loop {
if iter.peek().map_or(false, |x| ends_identifier(x)) {
if iter.peek().map_or(true, |x| ends_identifier(x)) {
break;
} else {
buffer.push(iter.next().unwrap());