Fix interspersing of newlines in tokenizer infra

This commit is contained in:
greg 2018-03-24 18:38:28 -07:00
parent 78f12c8f1d
commit ecebbb2eae
3 changed files with 4 additions and 3 deletions

View File

@ -82,7 +82,6 @@ impl ProgrammingLanguageInterface for Schala {
} }
}; };
//symbol table //symbol table
match self.type_context.add_top_level_types(&ast) { match self.type_context.add_top_level_types(&ast) {
Ok(()) => (), Ok(()) => (),

View File

@ -111,9 +111,11 @@ pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new(); let mut tokens: Vec<Token> = Vec::new();
let mut input = input.lines().enumerate() let mut input = input.lines().enumerate()
.intersperse((0, "\n"))
.flat_map(|(line_idx, ref line)| { .flat_map(|(line_idx, ref line)| {
line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch)) line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch))
}).peekable(); })
.peekable();
while let Some((line_idx, ch_idx, c)) = input.next() { while let Some((line_idx, ch_idx, c)) = input.next() {
let cur_tok_type = match c { let cur_tok_type = match c {

View File

@ -4,7 +4,7 @@ fn main() {
a + b a + b
} }
foo //foo
print(main()) print(main())