Rename Scanner to Lexer (#276)
This commit is contained in:
parent
c5eeb8944e
commit
66391de3f8
@ -19,7 +19,7 @@ fn mixed_whitespace(text: &str) -> bool {
|
||||
!(text.chars().all(|c| c == ' ') || text.chars().all(|c| c == '\t'))
|
||||
}
|
||||
|
||||
pub struct Scanner<'a> {
|
||||
pub struct Lexer<'a> {
|
||||
tokens: Vec<Token<'a>>,
|
||||
text: &'a str,
|
||||
rest: &'a str,
|
||||
@ -37,9 +37,9 @@ enum State<'a> {
|
||||
Interpolation,
|
||||
}
|
||||
|
||||
impl<'a> Scanner<'a> {
|
||||
pub fn scan(text: &'a str) -> CompilationResult<Vec<Token<'a>>> {
|
||||
let scanner = Scanner{
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn lex(text: &'a str) -> CompilationResult<Vec<Token<'a>>> {
|
||||
let lexer = Lexer{
|
||||
tokens: vec![],
|
||||
text: text,
|
||||
rest: text,
|
||||
@ -49,7 +49,7 @@ impl<'a> Scanner<'a> {
|
||||
state: vec![State::Start],
|
||||
};
|
||||
|
||||
scanner.inner()
|
||||
lexer.inner()
|
||||
}
|
||||
|
||||
fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> {
|
||||
@ -75,7 +75,7 @@ impl<'a> Scanner<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_indent(&mut self) -> CompilationResult<'a, Option<Token<'a>>> {
|
||||
fn lex_indent(&mut self) -> CompilationResult<'a, Option<Token<'a>>> {
|
||||
lazy_static! {
|
||||
static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]");
|
||||
}
|
||||
@ -150,7 +150,7 @@ impl<'a> Scanner<'a> {
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(token) = self.scan_indent()? {
|
||||
if let Some(token) = self.lex_indent()? {
|
||||
self.tokens.push(token);
|
||||
}
|
||||
|
||||
@ -306,7 +306,7 @@ mod test {
|
||||
fn $name() {
|
||||
let input = $input;
|
||||
let expected = $expected;
|
||||
let tokens = ::Scanner::scan(input).unwrap();
|
||||
let tokens = ::Lexer::lex(input).unwrap();
|
||||
let roundtrip = tokens.iter().map(|t| {
|
||||
let mut s = String::new();
|
||||
s += t.prefix;
|
||||
@ -369,7 +369,7 @@ mod test {
|
||||
kind: $kind,
|
||||
};
|
||||
|
||||
if let Err(error) = Scanner::scan(input) {
|
||||
if let Err(error) = Lexer::lex(input) {
|
||||
assert_eq!(error.text, expected.text);
|
||||
assert_eq!(error.index, expected.index);
|
||||
assert_eq!(error.line, expected.line);
|
@ -24,6 +24,7 @@ mod cooked_string;
|
||||
mod expression;
|
||||
mod fragment;
|
||||
mod justfile;
|
||||
mod lexer;
|
||||
mod misc;
|
||||
mod parameter;
|
||||
mod parser;
|
||||
@ -33,7 +34,6 @@ mod recipe;
|
||||
mod recipe_resolver;
|
||||
mod run;
|
||||
mod runtime_error;
|
||||
mod scanner;
|
||||
mod shebang;
|
||||
mod token;
|
||||
|
||||
@ -68,7 +68,7 @@ mod common {
|
||||
pub use recipe::Recipe;
|
||||
pub use recipe_resolver::RecipeResolver;
|
||||
pub use runtime_error::{RuntimeError, RunResult};
|
||||
pub use scanner::Scanner;
|
||||
pub use lexer::Lexer;
|
||||
pub use shebang::Shebang;
|
||||
pub use token::{Token, TokenKind};
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ pub struct Parser<'a> {
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub fn parse(text: &'a str) -> CompilationResult<'a, Justfile> {
|
||||
let tokens = Scanner::scan(text)?;
|
||||
let tokens = Lexer::lex(text)?;
|
||||
let parser = Parser::new(text, tokens);
|
||||
parser.justfile()
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ macro_rules! compilation_error_test {
|
||||
kind: $kind,
|
||||
};
|
||||
|
||||
let tokens = ::Scanner::scan(input).unwrap();
|
||||
let tokens = ::Lexer::lex(input).unwrap();
|
||||
let parser = ::Parser::new(input, tokens);
|
||||
|
||||
if let Err(error) = parser.justfile() {
|
||||
|
Loading…
Reference in New Issue
Block a user