From 9ab1ca28f8e3a39f91f8a957825984e24e27788d Mon Sep 17 00:00:00 2001 From: greg Date: Fri, 2 Mar 2018 22:11:25 -0800 Subject: [PATCH] Improve tokenizer debug output --- src/schala_lang/parsing.rs | 7 +++---- src/schala_lang/tokenizing.rs | 22 ++++++++++++++++++++++ src/schala_lang/typechecking.rs | 2 +- 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/src/schala_lang/parsing.rs b/src/schala_lang/parsing.rs index 434b702..b59b0c0 100644 --- a/src/schala_lang/parsing.rs +++ b/src/schala_lang/parsing.rs @@ -264,7 +264,7 @@ macro_rules! parse_method { let next_token = $self.peek_with_token_offset(); let record = ParseRecord { production_name: stringify!($name).to_string(), - next_token: format!("{:?}", next_token), + next_token: format!("{}", next_token.to_string_with_metadata()), level: $self.parse_level, }; $self.parse_level += 1; @@ -525,10 +525,9 @@ impl Parser { // this implements Pratt parsing, see http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/ fn precedence_expr(&mut self, precedence: i32) -> ParseResult { - let next_token = self.peek(); let record = ParseRecord { production_name: "precedence_expr".to_string(), - next_token: format!("{:?}", next_token), + next_token: format!("{}", self.peek_with_token_offset().to_string_with_metadata()), level: self.parse_level, }; self.parse_level += 1; @@ -830,7 +829,7 @@ pub fn parse(input: Vec) -> (Result, Vec) { for _ in 0..r.level { indent.push(' '); } - format!("{}Production `{}`, token: {:?}", indent, r.production_name, r.next_token) + format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token) }).collect(); (ast, trace) } diff --git a/src/schala_lang/tokenizing.rs b/src/schala_lang/tokenizing.rs index 0c02ae2..dd62519 100644 --- a/src/schala_lang/tokenizing.rs +++ b/src/schala_lang/tokenizing.rs @@ -3,6 +3,8 @@ use std::collections::HashMap; use std::rc::Rc; use std::iter::{Iterator, Enumerate, Peekable, FlatMap}; use std::str::{Lines, Chars}; +use std::fmt; +use std::fmt::Write; #[derive(Debug, PartialEq, Clone)] pub enum TokenType { @@ -28,6 +30,20 @@ pub enum TokenType { } use self::TokenType::*; +impl fmt::Display for TokenType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + &Operator(ref s) => write!(f, "Operator({})", **s), + &DigitGroup(ref s) => write!(f, "DigitGroup({})", s), + &HexLiteral(ref s) => write!(f, "HexLiteral({})", s), + &StrLiteral(ref s) => write!(f, "StrLiteral({})", s), + &Identifier(ref s) => write!(f, "Identifier({})", s), + &Error(ref s) => write!(f, "Error({})", s), + other => write!(f, "{:?}", other), + } + } +} + #[derive(Debug, Clone, Copy, PartialEq)] pub enum Kw { If, Else, @@ -80,6 +96,12 @@ impl Token { _ => None, } } + pub fn to_string(&self) -> String { + format!("{}", self.token_type) + } + pub fn to_string_with_metadata(&self) -> String { + format!("{}(L:{},c:{})", self.token_type, self.offset.0, self.offset.1) + } } const OPERATOR_CHARS: [char; 19] = ['!', '$', '%', '&', '*', '+', '-', '.', '/', ':', '<', '>', '=', '?', '@', '^', '|', '~', '`']; diff --git a/src/schala_lang/typechecking.rs b/src/schala_lang/typechecking.rs index 3f1853b..44cbd11 100644 --- a/src/schala_lang/typechecking.rs +++ b/src/schala_lang/typechecking.rs @@ -118,7 +118,7 @@ impl TypeContext { pub fn debug_symbol_table(&self) -> String { let mut output = format!("Symbols\n"); for (sym, ty) in &self.bindings { - write!(output, "{} : {}\n", sym, ty); + write!(output, "{} : {}\n", sym, ty).unwrap(); } output }