Make TokenHandler use an array and index

Instead of a peekable iterator, so I can implement LL(k) parsing
This commit is contained in:
greg 2019-06-14 00:44:54 -07:00
parent 66f71606ef
commit 3a207cf7a7
1 changed files with 6 additions and 13 deletions

View File

@ -143,8 +143,6 @@
//!
use std::rc::Rc;
use std::iter::Peekable;
use std::vec::IntoIter;
use crate::tokenizing::*;
use crate::tokenizing::Kw::*;
@ -190,8 +188,7 @@ struct ParserRestrictions {
}
struct TokenHandler {
tokens: Peekable<IntoIter<Token>>,
token_array: Vec<Token>,
tokens: Vec<Token>,
idx: usize,
end_of_file: (usize, usize),
}
@ -202,22 +199,18 @@ impl TokenHandler {
None => (0, 0),
Some(t) => (t.line_num, t.char_num)
};
let token_array = tokens.clone();
let tokens = tokens.into_iter().peekable();
TokenHandler { idx: 0, token_array, tokens, end_of_file }
TokenHandler { idx: 0, tokens, end_of_file }
}
fn peek_kind(&mut self) -> TokenKind {
let old = self.tokens.peek().map(|ref t| { t.kind.clone() }).unwrap_or(TokenKind::EOF);
old
self.peek().kind
}
fn peek(&mut self) -> Token {
let old = self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1});
old
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
}
fn next(&mut self) -> Token {
let old = self.tokens.next().unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1});
old
self.idx += 1;
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
}
}