From 9b760244d5173bf53a96c4adabef55cdb4d31bfd Mon Sep 17 00:00:00 2001 From: greg Date: Fri, 2 Mar 2018 15:21:48 -0800 Subject: [PATCH] Include line count in token debug --- src/schala_lang/mod.rs | 2 +- src/schala_lang/parsing.rs | 2 +- src/schala_lang/tokenizing.rs | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/schala_lang/mod.rs b/src/schala_lang/mod.rs index 87161bb..1b29e33 100644 --- a/src/schala_lang/mod.rs +++ b/src/schala_lang/mod.rs @@ -42,7 +42,7 @@ impl ProgrammingLanguageInterface for Schala { let mut output = ReplOutput::default(); let tokens = tokenizing::tokenize(input); if options.debug_tokens { - let token_string = tokens.iter().map(|t| format!("{:?}<{}>", t.token_type, t.offset)).join(", "); + let token_string = tokens.iter().map(|t| format!("{:?}", t.token_type, t.offset.0, t.offset.1)).join(", "); output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string))); } diff --git a/src/schala_lang/parsing.rs b/src/schala_lang/parsing.rs index 304d8d8..434b702 100644 --- a/src/schala_lang/parsing.rs +++ b/src/schala_lang/parsing.rs @@ -135,7 +135,7 @@ impl Parser { self.tokens.peek().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF) } fn peek_with_token_offset(&mut self) -> Token { - self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: 0}) + self.tokens.peek().map(|t: &Token| { t.clone()}).unwrap_or(Token { token_type: TokenType::EOF, offset: (0,0)}) } fn next(&mut self) -> TokenType { self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF) diff --git a/src/schala_lang/tokenizing.rs b/src/schala_lang/tokenizing.rs index a5125e1..0c2a6df 100644 --- a/src/schala_lang/tokenizing.rs +++ b/src/schala_lang/tokenizing.rs @@ -70,7 +70,7 @@ lazy_static! { #[derive(Debug, Clone)] pub struct Token { pub token_type: TokenType, - pub offset: usize, + pub offset: (usize, usize), } impl Token { @@ -104,7 +104,7 @@ pub fn tokenize(input: &str) -> Vec { //let mut input: CharIter = input.chars().enumerate().peekable(); - while let Some((_, idx, c)) = input.next() { + while let Some((line_idx, ch_idx, c)) = input.next() { let cur_tok_type = match c { '#' => { if let Some(&(_, _, '{')) = input.peek() { @@ -129,7 +129,7 @@ pub fn tokenize(input: &str) -> Vec { c if is_operator(&c) => handle_operator(c, &mut input), unknown => Error(format!("Unexpected character: {}", unknown)), }; - tokens.push(Token { token_type: cur_tok_type, offset: idx }); + tokens.push(Token { token_type: cur_tok_type, offset: (line_idx, ch_idx) }); } tokens }