Implement Access AST node

For name.value lookups
This commit is contained in:
Greg Shuflin 2021-10-30 21:22:15 -07:00
parent 1c6545fb74
commit f0e4b50c99
6 changed files with 1144 additions and 1078 deletions

View File

@ -196,6 +196,7 @@ pub enum ExpressionKind {
WhileExpression { condition: Option<Box<Expression>>, body: Block },
ForExpression { enumerators: Vec<Enumerator>, body: Box<ForBody> },
Lambda { params: Vec<FormalParam>, type_anno: Option<TypeIdentifier>, body: Block },
Access { name: Rc<String>, expr: Box<Expression> },
ListLiteral(Vec<Expression>),
}

View File

@ -128,6 +128,9 @@ pub fn walk_expression<V: ASTVisitor>(v: &mut V, expr: &Expression) {
Lambda { params: _, type_anno: _, body } => {
walk_block(v, body);
}
Access { name: _, expr } => {
walk_expression(v, expr);
}
ListLiteral(exprs) =>
for expr in exprs {
walk_expression(v, expr);

View File

@ -120,6 +120,7 @@ fn render_expression(expr: &Expression, indent: usize, buf: &mut String) {
newline(buf);
do_indent(indent, buf);
}
Access { .. } => buf.push_str("<access-expr>"),
ListLiteral(..) => buf.push_str("<list-literal>"),
}
buf.push(')');

View File

@ -166,29 +166,28 @@
//!
//! module := 'module' IDENTIFIER '{' statement* '}'
//! ```
mod test;
mod new_tests;
mod test;
use std::rc::Rc;
use crate::tokenizing::*;
use crate::tokenizing::Kw::*;
use crate::tokenizing::TokenKind::*;
use crate::tokenizing::Location;
use crate::ast::*;
use crate::identifier::IdStore;
use crate::{
ast::*,
identifier::IdStore,
tokenizing::{Kw::*, Location, TokenKind::*, *},
};
/// Represents a parsing error
#[derive(Debug)]
pub struct ParseError {
pub production_name: Option<String>,
pub msg: String,
pub token: Token
pub token: Token,
}
impl ParseError {
fn new_with_token<T, M>(msg: M, token: Token) -> ParseResult<T> where M: Into<String> {
fn new_with_token<T, M>(msg: M, token: Token) -> ParseResult<T>
where M: Into<String> {
Err(ParseError { msg: msg.into(), token, production_name: None })
}
}
@ -212,15 +211,14 @@ pub struct Parser {
id_store: IdStore<ASTItem>,
}
struct ParserRestrictions {
no_struct_literal: bool
no_struct_literal: bool,
}
struct TokenHandler {
tokens: Vec<Token>,
idx: usize,
end_of_file: Location
end_of_file: Location,
}
impl TokenHandler {
@ -240,15 +238,24 @@ impl TokenHandler {
self.peek_n(n).kind
}
fn peek(&mut self) -> Token {
self.tokens.get(self.idx).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
self.tokens
.get(self.idx)
.cloned()
.unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
/// calling peek_n(0) is the same thing as peek()
fn peek_n(&mut self, n: usize) -> Token {
self.tokens.get(self.idx + n).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
self.tokens
.get(self.idx + n)
.cloned()
.unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
fn next(&mut self) -> Token {
self.idx += 1;
self.tokens.get(self.idx - 1).cloned().unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
self.tokens
.get(self.idx - 1)
.cloned()
.unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
}
@ -296,7 +303,9 @@ impl Parser {
}
macro_rules! print_token_pattern {
($tokenpattern:pat) => { stringify!($tokenpattern) }
($tokenpattern:pat) => {
stringify!($tokenpattern)
};
}
macro_rules! expect {
@ -322,8 +331,7 @@ macro_rules! delimited {
($self:expr, $start:pat, $parse_fn:ident, $delim:pat, $end:pat) => {
delimited!($self, $start, $parse_fn, $delim, $end, true)
};
($self:expr, $start:pat, $parse_fn:ident, $delim:pat, $end:pat, $strictness:expr) => {
{
($self:expr, $start:pat, $parse_fn:ident, $delim:pat, $end:pat, $strictness:expr) => {{
expect!($self, $start);
let mut acc = vec![];
loop {
@ -331,29 +339,34 @@ macro_rules! delimited {
match peek.get_kind() {
$end | EOF => break,
Newline | Semicolon => {
$self.token_handler.next(); continue;
},
$self.token_handler.next();
continue;
}
_ => (),
}
if !$strictness {
match peek.get_kind() {
$delim => { $self.token_handler.next(); continue },
_ => ()
$delim => {
$self.token_handler.next();
continue;
}
_ => (),
}
}
acc.push($self.$parse_fn()?);
match $self.token_handler.peek().get_kind() {
$delim => { $self.token_handler.next(); continue },
$delim => {
$self.token_handler.next();
continue;
}
_ if $strictness => break,
_ => continue,
};
}
expect!($self, $end);
acc
}};
}
};
}
impl Parser {
/// `program := (statement delimiter)* EOF`
@ -367,10 +380,8 @@ impl Parser {
Newline | Semicolon => {
self.token_handler.next();
continue;
},
_ => statements.push(
self.statement()?
),
}
_ => statements.push(self.statement()?),
}
}
Ok(AST { id: self.id_store.fresh(), statements: statements.into() })
@ -383,20 +394,19 @@ impl Parser {
let tok = self.token_handler.peek();
let kind = match tok.get_kind() {
AtSign => self.annotation().map(StatementKind::Declaration),
Keyword(Type) => self.type_declaration().map(|decl| { StatementKind::Declaration(decl) }),
Keyword(Func)=> self.func_declaration().map(|func| { StatementKind::Declaration(func) }),
Keyword(Type) => self.type_declaration().map(|decl| StatementKind::Declaration(decl)),
Keyword(Func) => self.func_declaration().map(|func| StatementKind::Declaration(func)),
Keyword(Let) => self.binding_declaration().map(StatementKind::Declaration),
Keyword(Interface) => self.interface_declaration().map(StatementKind::Declaration),
Keyword(Impl) => self.impl_declaration().map(StatementKind::Declaration),
Keyword(Import) => self.import_declaration().map(StatementKind::Import),
Keyword(Module) => self.module_declaration().map(StatementKind::Module),
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
_ => self.expression().map(|expr| StatementKind::Expression(expr)),
}?;
let id = self.id_store.fresh();
Ok(Statement { kind, id, location: tok.location })
}
#[recursive_descent_method]
fn annotation(&mut self) -> ParseResult<Declaration> {
expect!(self, AtSign);
@ -407,10 +417,7 @@ impl Parser {
vec![]
};
Ok(Declaration::Annotation {
name,
arguments,
})
Ok(Declaration::Annotation { name, arguments })
}
#[recursive_descent_method]
@ -463,19 +470,16 @@ impl Parser {
LParen => {
let tuple_members = delimited!(self, LParen, type_name, Comma, RParen);
VariantKind::TupleStruct(tuple_members)
},
}
LCurlyBrace => {
println!("ARAH");
let typed_identifier_list = delimited!(self, LCurlyBrace, typed_identifier, Comma, RCurlyBrace);
let typed_identifier_list =
delimited!(self, LCurlyBrace, typed_identifier, Comma, RCurlyBrace);
VariantKind::Record(typed_identifier_list)
},
_ => VariantKind::UnitStruct
}
_ => VariantKind::UnitStruct,
};
Ok(Variant {
id: self.id_store.fresh(),
name,
kind
})
Ok(Variant { id: self.id_store.fresh(), name, kind })
}
#[recursive_descent_method]
@ -505,8 +509,8 @@ impl Parser {
let name = s;
self.token_handler.next();
(name, true)
},
_ => (self.identifier()?, false)
}
_ => (self.identifier()?, false),
};
let params = self.formal_param_list()?;
let type_anno = match self.token_handler.peek_kind() {
@ -518,7 +522,8 @@ impl Parser {
#[recursive_descent_method]
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Statement>> {
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
let statements =
delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
Ok(statements)
}
@ -532,14 +537,14 @@ impl Parser {
let name = self.identifier()?;
let anno = match self.token_handler.peek_kind() {
Colon => Some(self.type_anno()?),
_ => None
_ => None,
};
let default = match self.token_handler.peek_kind() {
Equals => {
self.token_handler.next();
Some(self.expression()?)
},
_ => None
}
_ => None,
};
Ok(FormalParam { name, anno, default })
}
@ -552,14 +557,11 @@ impl Parser {
self.token_handler.next();
false
}
_ => true
_ => true,
};
let name = self.identifier()?;
let type_anno = if let Colon = self.token_handler.peek_kind() {
Some(self.type_anno()?)
} else {
None
};
let type_anno =
if let Colon = self.token_handler.peek_kind() { Some(self.type_anno()?) } else { None };
expect!(self, Equals);
let expr = self.expression()?;
@ -595,8 +597,11 @@ impl Parser {
Ok(match (first, second) {
(interface_name, Some(type_name)) =>
Declaration::Impl { type_name, interface_name: Some(interface_name), block },
(type_singleton_name, None) =>
Declaration::Impl { type_name: TypeIdentifier::Singleton(type_singleton_name), interface_name: None, block }
(type_singleton_name, None) => Declaration::Impl {
type_name: TypeIdentifier::Singleton(type_singleton_name),
interface_name: None,
block,
},
})
}
@ -610,7 +615,7 @@ impl Parser {
let mut expr_body = self.precedence_expr(BinOp::min_precedence())?;
let type_anno = match self.token_handler.peek_kind() {
Colon => Some(self.type_anno()?),
_ => None
_ => None,
};
if expr_body.type_anno.is_some() {
return ParseError::new_with_token("Bad parse state encountered", self.token_handler.peek());
@ -641,7 +646,7 @@ impl Parser {
params: match self.token_handler.peek_kind() {
LAngleBracket => delimited!(self, LAngleBracket, type_name, Comma, RAngleBracket),
_ => vec![],
}
},
})
}
@ -670,10 +675,13 @@ impl Parser {
let operation = match BinOp::from_sigil_token(&next_tok.kind) {
Some(sigil) => sigil,
//TODO I think I can fix this unreachable
None => unreachable!()
None => unreachable!(),
};
let rhs = self.precedence_expr(new_precedence)?;
lhs = Expression::new(self.id_store.fresh(), ExpressionKind::BinExp(operation, Box::new(lhs), Box::new(rhs)));
lhs = Expression::new(
self.id_store.fresh(),
ExpressionKind::BinExp(operation, Box::new(lhs), Box::new(rhs)),
);
}
self.parse_level -= 1;
Ok(lhs)
@ -691,10 +699,10 @@ impl Parser {
let prefix_op = PrefixOp::from_sigil(sigil.as_str());
Ok(Expression::new(
self.id_store.fresh(),
ExpressionKind::PrefixExp(prefix_op, Box::new(expr))
ExpressionKind::PrefixExp(prefix_op, Box::new(expr)),
))
},
_ => self.extended_expr()
}
_ => self.extended_expr(),
}
}
@ -705,24 +713,34 @@ impl Parser {
//TODO need a next non whitespace
let next = self.token_handler.peek_kind();
match next {
Period => unimplemented!(),
Period => {
self.token_handler.next();
let name = self.identifier()?;
expression = Expression::new(
self.id_store.fresh(),
ExpressionKind::Access { name, expr: Box::new(expression) },
);
}
LSquareBracket => {
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
if indexers.is_empty() {
return ParseError::new_with_token("Empty index expressions are not allowed", self.token_handler.peek());
return ParseError::new_with_token(
"Empty index expressions are not allowed",
self.token_handler.peek(),
);
}
expression = Expression::new(
self.id_store.fresh(),
ExpressionKind::Index { indexee: Box::new(expression), indexers },
);
}
expression = Expression::new(self.id_store.fresh(), ExpressionKind::Index {
indexee: Box::new(expression),
indexers,
});
},
LParen => {
let arguments = delimited!(self, LParen, invocation_argument, Comma, RParen);
expression = Expression::new(self.id_store.fresh(), ExpressionKind::Call {
f: Box::new(expression),
arguments,
});
},
expression = Expression::new(
self.id_store.fresh(),
ExpressionKind::Call { f: Box::new(expression), arguments },
);
}
_ => break,
}
}
@ -735,22 +753,20 @@ impl Parser {
Underscore => {
self.token_handler.next();
InvocationArgument::Ignored
},
Identifier(s) => {
match self.token_handler.peek_kind_n(1) {
}
Identifier(s) => match self.token_handler.peek_kind_n(1) {
Equals => {
self.token_handler.next();
self.token_handler.next();
let expr = self.expression()?;
InvocationArgument::Keyword { name: s, expr }
},
}
_ => {
let expr = self.expression()?;
InvocationArgument::Positional(expr)
}
}
},
_ => InvocationArgument::Positional(self.expression()?)
_ => InvocationArgument::Positional(self.expression()?),
})
}
@ -760,12 +776,18 @@ impl Parser {
Ok(if let LSquareBracket = self.token_handler.peek_kind() {
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
if indexers.is_empty() {
return ParseError::new_with_token("Empty index expressions are not allowed", self.token_handler.peek());
return ParseError::new_with_token(
"Empty index expressions are not allowed",
self.token_handler.peek(),
);
}
Expression::new(self.id_store.fresh(), ExpressionKind::Index {
Expression::new(
self.id_store.fresh(),
ExpressionKind::Index {
indexee: Box::new(Expression::new(self.id_store.fresh(), primary.kind)),
indexers,
})
},
)
} else {
primary
})
@ -806,7 +828,8 @@ impl Parser {
_ => None,
};
let body = self.nonempty_func_body()?.into();
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::Lambda { params, type_anno, body })) //TODO need to handle types somehow
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::Lambda { params, type_anno, body }))
//TODO need to handle types somehow
}
#[recursive_descent_method]
@ -829,7 +852,7 @@ impl Parser {
match inner.len() {
0 => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(vec![]))),
1 => Ok(inner.pop().unwrap()),
_ => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(inner)))
_ => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(inner))),
}
};
self.restrictions.no_struct_literal = old_struct_value;
@ -845,8 +868,8 @@ impl Parser {
LCurlyBrace if !self.restrictions.no_struct_literal => {
let fields = self.record_block()?;
Expression::new(self.id_store.fresh(), NamedStruct { name: qualified_identifier, fields })
},
_ => Expression::new(self.id_store.fresh(), Value(qualified_identifier))
}
_ => Expression::new(self.id_store.fresh(), Value(qualified_identifier)),
})
}
@ -934,11 +957,17 @@ impl Parser {
loop {
match self.token_handler.peek_kind() {
RCurlyBrace | EOF => break,
Semicolon | Newline => { self.token_handler.next(); continue},
Semicolon | Newline => {
self.token_handler.next();
continue;
}
_ => {
cond_arms.push(self.cond_arm()?);
match self.token_handler.peek_kind() {
Comma | Semicolon | Newline => { self.token_handler.next(); continue; },
Comma | Semicolon | Newline => {
self.token_handler.next();
continue;
}
_ => break,
}
}
@ -969,15 +998,13 @@ impl Parser {
Keyword(Kw::Is) => {
self.token_handler.next();
Condition::Pattern(self.pattern()?)
},
}
ref tok if BinOp::from_sigil_token(tok).is_some() => {
let op = BinOp::from_sigil_token(&self.token_handler.next().kind).unwrap();
let expr = self.expression()?;
Condition::TruncatedOp(op, expr)
},
_ => {
Condition::Expression(self.expression()?)
},
}
_ => Condition::Expression(self.expression()?),
})
}
@ -987,8 +1014,8 @@ impl Parser {
Keyword(Kw::If) => {
self.token_handler.next();
Some(self.expression()?)
},
_ => None
}
_ => None,
})
}
@ -1012,17 +1039,15 @@ impl Parser {
LCurlyBrace => {
let members = delimited!(self, LCurlyBrace, record_pattern_entry, Comma, RCurlyBrace);
Pattern::Record(qualified_identifier, members)
},
}
LParen => {
let members = delimited!(self, LParen, pattern, Comma, RParen);
Pattern::TupleStruct(qualified_identifier, members)
},
_ => {
Pattern::VarOrName(qualified_identifier)
},
}
},
_ => self.pattern_literal()?
_ => Pattern::VarOrName(qualified_identifier),
}
}
_ => self.pattern_literal()?,
})
}
@ -1033,22 +1058,22 @@ impl Parser {
Keyword(Kw::True) => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::BoolPattern(true))
},
}
Keyword(Kw::False) => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::BoolPattern(false))
},
}
StrLiteral { s, .. } => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::StringPattern(s))
},
}
DigitGroup(_) | HexLiteral(_) | BinNumberSigil | Period => self.signed_number_literal()?,
Operator(ref op) if **op == "-" => self.signed_number_literal()?,
Underscore => {
self.token_handler.next();
Pattern::Ignored
},
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok)
}
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok),
})
}
@ -1058,8 +1083,8 @@ impl Parser {
Operator(ref op) if **op == "-" => {
self.token_handler.next();
true
},
_ => false
}
_ => false,
};
let Expression { kind, .. } = self.number_literal()?;
Ok(Pattern::Literal(PatternLiteral::NumPattern { neg, num: kind }))
@ -1073,8 +1098,8 @@ impl Parser {
expect!(self, Colon);
let pat = self.pattern()?;
(name, pat)
},
_ => (name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name)))
}
_ => (name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name))),
})
}
@ -1091,7 +1116,11 @@ impl Parser {
LCurlyBrace => self.block(),
_ => {
let expr = self.expression()?;
let s = Statement { id: self.id_store.fresh(), location: tok.location, kind: StatementKind::Expression(expr) };
let s = Statement {
id: self.id_store.fresh(),
location: tok.location,
kind: StatementKind::Expression(expr),
};
Ok(s.into())
}
}
@ -1151,13 +1180,14 @@ impl Parser {
let tok = self.token_handler.peek();
Ok(match tok.get_kind() {
LCurlyBrace => {
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
let statements =
delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
StatementBlock(statements.into())
},
}
Keyword(Kw::Return) => {
self.token_handler.next();
MonadicReturn(self.expression()?)
},
}
_ => return ParseError::new_with_token("for expressions must end in a block or 'return'", tok),
})
}
@ -1182,12 +1212,12 @@ impl Parser {
self.token_handler.next();
let id = self.id_store.fresh();
Ok(Expression::new(id, BoolLiteral(true)))
},
}
Keyword(Kw::False) => {
self.token_handler.next();
let id = self.id_store.fresh();
Ok(Expression::new(id, BoolLiteral(false)))
},
}
StrLiteral { s, .. } => {
self.token_handler.next();
let id = self.id_store.fresh();
@ -1214,12 +1244,12 @@ impl Parser {
let digits = self.digits()?;
let n = parse_binary(digits, tok)?;
Ok(Expression::new(self.id_store.fresh(), NatLiteral(n)))
},
}
HexLiteral(text) => {
let digits: String = text.chars().filter(|c| c.is_digit(16)).collect();
let n = parse_hex(digits, tok)?;
Ok(Expression::new(self.id_store.fresh(), NatLiteral(n)))
},
}
_ => return ParseError::new_with_token("Expected '0x' or '0b'", tok),
}
}
@ -1236,12 +1266,12 @@ impl Parser {
match digits.parse::<f64>() {
Ok(f) => Ok(Expression::new(self.id_store.fresh(), FloatLiteral(f))),
Err(e) => ParseError::new_with_token(format!("Float failed to parse with error: {}", e), tok),
}
} else {
match digits.parse::<u64>() {
Ok(d) => Ok(Expression::new(self.id_store.fresh(), NatLiteral(d))),
Err(e) => ParseError::new_with_token(format!("Integer failed to parse with error: {}", e), tok),
Err(e) =>
ParseError::new_with_token(format!("Integer failed to parse with error: {}", e), tok),
}
}
}
@ -1251,8 +1281,14 @@ impl Parser {
let mut ds = String::new();
loop {
match self.token_handler.peek_kind() {
Underscore => { self.token_handler.next(); continue; },
DigitGroup(ref s) => { self.token_handler.next(); ds.push_str(s)},
Underscore => {
self.token_handler.next();
continue;
}
DigitGroup(ref s) => {
self.token_handler.next();
ds.push_str(s)
}
_ => break,
}
}
@ -1278,19 +1314,15 @@ impl Parser {
LCurlyBrace => {
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
ImportedNames::List(names)
},
}
Operator(ref s) if **s == "*" => {
self.token_handler.next();
ImportedNames::All
},
_ => ImportedNames::LastOfPath
}
_ => ImportedNames::LastOfPath,
};
Ok(ImportSpecifier {
id: self.id_store.fresh(),
path_components,
imported_names
})
Ok(ImportSpecifier { id: self.id_store.fresh(), path_components, imported_names })
}
#[recursive_descent_method]
@ -1299,11 +1331,11 @@ impl Parser {
Operator(ref s) if **s == "*" => {
self.token_handler.next();
ImportedNames::All
},
}
LCurlyBrace => {
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
ImportedNames::List(names)
},
}
_ => return ParseError::new_with_token("Expected '{{' or '*'", self.token_handler.peek()),
})
}
@ -1324,11 +1356,15 @@ fn parse_binary(digits: String, tok: Token) -> ParseResult<u64> {
match d {
'1' => result += multiplier,
'0' => (),
_ => return ParseError::new_with_token("Encountered a character not '1' or '0 while parsing a binary literal", tok),
_ =>
return ParseError::new_with_token(
"Encountered a character not '1' or '0 while parsing a binary literal",
tok,
),
}
multiplier = match multiplier.checked_mul(2) {
Some(m) => m,
None => return ParseError::new_with_token("This binary expression will overflow", tok)
None => return ParseError::new_with_token("This binary expression will overflow", tok),
}
}
Ok(result)
@ -1344,9 +1380,8 @@ fn parse_hex(digits: String, tok: Token) -> ParseResult<u64> {
}
multiplier = match multiplier.checked_mul(16) {
Some(m) => m,
None => return ParseError::new_with_token("This hex expression will overflow", tok)
None => return ParseError::new_with_token("This hex expression will overflow", tok),
}
}
Ok(result)
}

View File

@ -203,11 +203,36 @@ fn operators() {
#[test]
fn accessors() {
/*
assert_expr!("a.b");
assert_expr!("a.b.c.d()");
assert_expr!("a.b().c.d()");
*/
use ExpressionKind::*;
assert_expr!("a.b", expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }));
assert_expr!(
"a.b.c",
expr(Access {
name: rc("c"),
expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }))
})
);
assert_expr!(
"a.b.c(3)",
expr(Call {
f: bx(expr(Access {
name: rc("c"),
expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }))
})),
arguments: vec![InvocationArgument::Positional(expr(NatLiteral(3)))],
})
);
assert_expr!(
"a.b().c",
expr(Access {
name: rc("c"),
expr: bx(expr(Call {
f: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) })),
arguments: vec![]
}))
})
);
}
#[test]

View File

@ -179,6 +179,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
WhileExpression { .. } => Expression::ReductionError("While expr not implemented".to_string()),
ForExpression { .. } => Expression::ReductionError("For expr not implemented".to_string()),
ListLiteral { .. } => Expression::ReductionError("ListLiteral expr not implemented".to_string()),
Access { .. } => unimplemented!(),
}
}