Cleaning up some types

This commit is contained in:
greg 2017-01-23 19:45:26 -08:00
parent fd4610e175
commit 178434171e
5 changed files with 17 additions and 21 deletions

View File

@ -4,6 +4,12 @@ pub struct TokenError {
pub msg: String, pub msg: String,
} }
impl TokenError {
pub fn new(msg: &str) -> TokenError {
TokenError { msg: msg.to_string() }
}
}
pub struct ParseError { pub struct ParseError {
pub msg: String, pub msg: String,
} }

View File

@ -12,7 +12,7 @@ mod schala_lang;
use schala_lang::eval::Evaluator; use schala_lang::eval::Evaluator;
use schala_lang::Schala; use schala_lang::Schala;
use language::{ProgrammingLanguage, ParseError, TokenError, LLVMCodeString}; use language::{ProgrammingLanguage, LLVMCodeString};
mod language; mod language;
mod llvm_wrap; mod llvm_wrap;

View File

@ -1,7 +1,6 @@
extern crate llvm_sys; extern crate llvm_sys;
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::File;
use self::llvm_sys::prelude::*; use self::llvm_sys::prelude::*;
use self::llvm_sys::{LLVMIntPredicate, LLVMRealPredicate}; use self::llvm_sys::{LLVMIntPredicate, LLVMRealPredicate};

View File

@ -12,7 +12,7 @@ impl<'a> ProgrammingLanguage<eval::Evaluator<'a>> for Schala {
type AST = parser::AST; type AST = parser::AST;
fn tokenize(input: &str) -> Result<Vec<Self::Token>, TokenError> { fn tokenize(input: &str) -> Result<Vec<Self::Token>, TokenError> {
tokenizer::tokenize(input).map_err(|x| TokenError { msg: x.msg }) tokenizer::tokenize(input)
} }
fn parse(input: Vec<Self::Token>) -> Result<Self::AST, ParseError> { fn parse(input: Vec<Self::Token>) -> Result<Self::AST, ParseError> {

View File

@ -5,6 +5,8 @@ use std::str::Chars;
use self::itertools::Itertools; use self::itertools::Itertools;
use std::rc::Rc; use std::rc::Rc;
use language::TokenError;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum Token { pub enum Token {
Newline, Newline,
@ -38,18 +40,7 @@ pub enum Kw {
Null, Null,
} }
pub type TokenizeResult = Result<Vec<Token>, TokenizeError>; pub type TokenizeResult = Result<Vec<Token>, TokenError>;
#[derive(Debug)]
pub struct TokenizeError {
pub msg: String,
}
impl TokenizeError {
fn new(msg: &str) -> TokenizeError {
TokenizeError { msg: msg.to_string() }
}
}
fn is_digit(c: &char) -> bool { fn is_digit(c: &char) -> bool {
c.is_digit(10) c.is_digit(10)
@ -89,27 +80,27 @@ pub fn tokenize(input: &str) -> TokenizeResult {
Ok(tokens) Ok(tokens)
} }
fn tokenize_str(iter: &mut Peekable<Chars>) -> Result<Token, TokenizeError> { fn tokenize_str(iter: &mut Peekable<Chars>) -> Result<Token, TokenError> {
let mut buffer = String::new(); let mut buffer = String::new();
loop { loop {
// TODO handle string escapes, interpolation // TODO handle string escapes, interpolation
match iter.next() { match iter.next() {
Some(x) if x == '"' => break, Some(x) if x == '"' => break,
Some(x) => buffer.push(x), Some(x) => buffer.push(x),
None => return Err(TokenizeError::new("Unclosed quote")), None => return Err(TokenError::new("Unclosed quote")),
} }
} }
Ok(Token::StrLiteral(Rc::new(buffer))) Ok(Token::StrLiteral(Rc::new(buffer)))
} }
fn tokenize_operator(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenizeError> { fn tokenize_operator(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenError> {
let mut buffer = String::new(); let mut buffer = String::new();
buffer.push(c); buffer.push(c);
buffer.extend(iter.peeking_take_while(|x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x))); buffer.extend(iter.peeking_take_while(|x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x)));
Ok(Token::Operator(OpTok(Rc::new(buffer)))) Ok(Token::Operator(OpTok(Rc::new(buffer))))
} }
fn tokenize_number_or_period(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenizeError> { fn tokenize_number_or_period(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenError> {
if c == '.' && !iter.peek().map_or(false, is_digit) { if c == '.' && !iter.peek().map_or(false, is_digit) {
return Ok(Token::Period); return Ok(Token::Period);
} }
@ -120,11 +111,11 @@ fn tokenize_number_or_period(c: char, iter: &mut Peekable<Chars>) -> Result<Toke
match buffer.parse::<f64>() { match buffer.parse::<f64>() {
Ok(f) => Ok(Token::NumLiteral(f)), Ok(f) => Ok(Token::NumLiteral(f)),
Err(_) => Err(TokenizeError::new("Failed to parse digit")), Err(_) => Err(TokenError::new("Failed to parse digit")),
} }
} }
fn tokenize_identifier(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenizeError> { fn tokenize_identifier(c: char, iter: &mut Peekable<Chars>) -> Result<Token, TokenError> {
fn ends_identifier(c: &char) -> bool { fn ends_identifier(c: &char) -> bool {
let c = *c; let c = *c;
char::is_whitespace(c) || is_digit(&c) || c == ';' || c == '(' || c == ')' || char::is_whitespace(c) || is_digit(&c) || c == ';' || c == '(' || c == ')' ||