Transition to edition 2018

This commit is contained in:
greg 2019-01-07 13:00:37 -08:00
parent 801c90aaa7
commit 30676722a3
9 changed files with 36 additions and 35 deletions

View File

@ -2,6 +2,7 @@
name = "schala-lang"
version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2018"
[dependencies]
itertools = "0.5.8"

View File

@ -1,7 +1,7 @@
use std::rc::Rc;
use std::convert::From;
use builtin::{BinOp, PrefixOp};
use crate::builtin::{BinOp, PrefixOp};
#[derive(Clone, Debug, PartialEq)]
pub struct Node<T> {

View File

@ -2,7 +2,7 @@ use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use tokenizing::TokenKind;
use crate::tokenizing::TokenKind;
use self::BuiltinTypeSpecifier::*;
use self::BuiltinTConst::*;

View File

@ -6,9 +6,9 @@ use std::io;
use itertools::Itertools;
use util::ScopeStack;
use reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
use symbol_table::{SymbolSpec, Symbol, SymbolTable};
use crate::util::ScopeStack;
use crate::reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable};
pub struct State<'a> {
values: ScopeStack<'a, Rc<String>, ValueEntry>,
@ -86,7 +86,7 @@ impl Node {
}
fn is_true(&self) -> bool {
match self {
Node::Expr(Expr::Lit(::reduced_ast::Lit::Bool(true))) => true,
Node::Expr(Expr::Lit(crate::reduced_ast::Lit::Bool(true))) => true,
_ => false,
}
}
@ -497,14 +497,14 @@ mod eval_tests {
use std::cell::RefCell;
use std::rc::Rc;
use tokenizing::{Token, tokenize};
use ::parsing::ParseResult;
use ::ast::AST;
use symbol_table::SymbolTable;
use eval::State;
use crate::tokenizing::{Token, tokenize};
use crate::parsing::ParseResult;
use crate::ast::AST;
use crate::symbol_table::SymbolTable;
use crate::eval::State;
fn parse(tokens: Vec<Token>) -> ParseResult<AST> {
let mut parser = ::parsing::Parser::new(tokens);
let mut parser = crate::parsing::Parser::new(tokens);
parser.parse()
}

View File

@ -110,7 +110,7 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedCom
}
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
use parsing::Parser;
use crate::parsing::Parser;
let mut parser = match handle.active_parser.take() {
None => Parser::new(input),

View File

@ -2,13 +2,13 @@ use std::rc::Rc;
use std::iter::Peekable;
use std::vec::IntoIter;
use tokenizing::*;
use tokenizing::Kw::*;
use tokenizing::TokenKind::*;
use crate::tokenizing::*;
use crate::tokenizing::Kw::*;
use crate::tokenizing::TokenKind::*;
use ast::*;
use crate::ast::*;
use builtin::{BinOp, PrefixOp};
use crate::builtin::{BinOp, PrefixOp};
#[derive(Debug)]
pub struct ParseError {
@ -1126,8 +1126,8 @@ mod parse_tests {
use ::std::rc::Rc;
use super::tokenize;
use super::ParseResult;
use builtin::{PrefixOp, BinOp};
use ast::{AST, Node, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
use crate::builtin::{PrefixOp, BinOp};
use crate::ast::{AST, Node, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
use super::Statement::*;
use super::Declaration::*;
use super::Signature;
@ -1138,7 +1138,7 @@ mod parse_tests {
use super::ForBody::*;
fn parse(input: &str) -> ParseResult<AST> {
let tokens: Vec<::tokenizing::Token> = tokenize(input);
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
let mut parser = super::Parser::new(tokens);
parser.parse()
}
@ -1169,7 +1169,7 @@ mod parse_tests {
($expr_type:expr) => { Expression($expr_type, None) };
(s $expr_text:expr) => {
{
let tokens: Vec<::tokenizing::Token> = tokenize($expr_text);
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
let mut parser = super::Parser::new(tokens);
parser.expression().unwrap()
}
@ -1188,7 +1188,7 @@ mod parse_tests {
($op:expr, $lhs:expr, $rhs:expr) => { Node::new(Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs)))) };
(s $statement_text:expr) => {
{
let tokens: Vec<::tokenizing::Token> = tokenize($statement_text);
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
let mut parser = super::Parser::new(tokens);
Node::new(parser.statement().unwrap())
}

View File

@ -1,8 +1,8 @@
use std::rc::Rc;
use ast::*;
use symbol_table::{Symbol, SymbolSpec, SymbolTable};
use builtin::{BinOp, PrefixOp};
use crate::ast::*;
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable};
use crate::builtin::{BinOp, PrefixOp};
#[derive(Debug)]
pub struct ReducedAST(pub Vec<Stmt>);
@ -106,7 +106,7 @@ impl AST {
impl Statement {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use ast::Statement::*;
use crate::ast::Statement::*;
match self {
ExpressionStatement(expr) => Stmt::Expr(expr.node().reduce(symbol_table)),
Declaration(decl) => decl.reduce(symbol_table),
@ -120,7 +120,7 @@ fn reduce_block(block: &Block, symbol_table: &SymbolTable) -> Vec<Stmt> {
impl Expression {
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
use ast::ExpressionType::*;
use crate::ast::ExpressionType::*;
let ref input = self.0;
match input {
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
@ -356,7 +356,7 @@ impl PatternLiteral {
impl Declaration {
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
use self::Declaration::*;
use ::ast::Signature;
use crate::ast::Signature;
match self {
Binding {name, constant, expr } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.reduce(symbol_table) },
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {

View File

@ -3,8 +3,8 @@ use std::rc::Rc;
use std::fmt;
use std::fmt::Write;
use ast;
use typechecking::TypeName;
use crate::ast;
use crate::typechecking::TypeName;
//cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable {

View File

@ -1,7 +1,7 @@
use std::rc::Rc;
use ast::*;
use util::ScopeStack;
use crate::ast::*;
use crate::util::ScopeStack;
pub type TypeName = Rc<String>;
@ -229,8 +229,8 @@ mod tests {
use super::*;
fn parse(input: &str) -> AST {
let tokens: Vec<::tokenizing::Token> = ::tokenizing::tokenize(input);
let mut parser = ::parsing::Parser::new(tokens);
let tokens: Vec<crate::tokenizing::Token> = crate::tokenizing::tokenize(input);
let mut parser = crate::parsing::Parser::new(tokens);
parser.parse().unwrap()
}