Comment out everything to rewrite parser
This commit is contained in:
parent
fdf73945d7
commit
3b92e00ee7
3
justfile
3
justfile
@ -1,6 +1,7 @@
|
|||||||
test:
|
test:
|
||||||
cargo test --lib
|
cargo test --lib
|
||||||
#cargo run -- quine clean > /dev/null 2> /dev/null
|
|
||||||
|
# cargo run -- quine clean > /dev/null 2> /dev/null
|
||||||
|
|
||||||
backtrace:
|
backtrace:
|
||||||
RUST_BACKTRACE=1 cargo test --lib
|
RUST_BACKTRACE=1 cargo test --lib
|
||||||
|
32
src/lib.rs
32
src/lib.rs
@ -7,7 +7,7 @@ extern crate tempdir;
|
|||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
|
||||||
use std::{fs, fmt, process, io};
|
use std::{fs, fmt, process, io};
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
use std::collections::{BTreeMap, HashSet};
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
@ -50,17 +50,19 @@ pub struct Recipe<'a> {
|
|||||||
name: &'a str,
|
name: &'a str,
|
||||||
leading_whitespace: &'a str,
|
leading_whitespace: &'a str,
|
||||||
lines: Vec<&'a str>,
|
lines: Vec<&'a str>,
|
||||||
fragments: Vec<Vec<Fragment<'a>>>,
|
// fragments: Vec<Vec<Fragment<'a>>>,
|
||||||
variables: BTreeSet<&'a str>,
|
// variables: BTreeSet<&'a str>,
|
||||||
dependencies: Vec<&'a str>,
|
dependencies: Vec<&'a str>,
|
||||||
arguments: Vec<&'a str>,
|
// arguments: Vec<&'a str>,
|
||||||
shebang: bool,
|
shebang: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
enum Fragment<'a> {
|
enum Fragment<'a> {
|
||||||
Text{text: &'a str},
|
Text{text: &'a str},
|
||||||
Variable{name: &'a str},
|
Variable{name: &'a str},
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
impl<'a> Display for Recipe<'a> {
|
impl<'a> Display for Recipe<'a> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
@ -452,9 +454,9 @@ impl<'a> Display for RunError<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct Token<'a> {
|
struct Token<'a> {
|
||||||
index: usize,
|
// index: usize,
|
||||||
line: usize,
|
line: usize,
|
||||||
col: usize,
|
// col: usize,
|
||||||
prefix: &'a str,
|
prefix: &'a str,
|
||||||
lexeme: &'a str,
|
lexeme: &'a str,
|
||||||
class: TokenClass,
|
class: TokenClass,
|
||||||
@ -521,7 +523,7 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
|
|||||||
|
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
let mut rest = text;
|
let mut rest = text;
|
||||||
let mut index = 0;
|
// let mut index = 0;
|
||||||
let mut line = 0;
|
let mut line = 0;
|
||||||
let mut col = 0;
|
let mut col = 0;
|
||||||
let mut indent: Option<&str> = None;
|
let mut indent: Option<&str> = None;
|
||||||
@ -561,9 +563,9 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
|
|||||||
}
|
}
|
||||||
} {
|
} {
|
||||||
tokens.push(Token {
|
tokens.push(Token {
|
||||||
index: index,
|
// index: index,
|
||||||
line: line,
|
line: line,
|
||||||
col: col,
|
// col: col,
|
||||||
prefix: "",
|
prefix: "",
|
||||||
lexeme: "",
|
lexeme: "",
|
||||||
class: class,
|
class: class,
|
||||||
@ -722,9 +724,9 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
|
|||||||
let len = prefix.len() + lexeme.len();
|
let len = prefix.len() + lexeme.len();
|
||||||
|
|
||||||
tokens.push(Token {
|
tokens.push(Token {
|
||||||
index: index,
|
// index: index,
|
||||||
line: line,
|
line: line,
|
||||||
col: col,
|
// col: col,
|
||||||
prefix: prefix,
|
prefix: prefix,
|
||||||
lexeme: lexeme,
|
lexeme: lexeme,
|
||||||
class: class,
|
class: class,
|
||||||
@ -744,7 +746,7 @@ fn tokenize(text: &str) -> Result<Vec<Token>, Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
rest = &rest[len..];
|
rest = &rest[len..];
|
||||||
index += len;
|
// index += len;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tokens)
|
Ok(tokens)
|
||||||
@ -1044,9 +1046,9 @@ pub fn parse<'a>(text: &'a str) -> Result<Justfile, Error> {
|
|||||||
name: name,
|
name: name,
|
||||||
leading_whitespace: "",
|
leading_whitespace: "",
|
||||||
lines: vec![],
|
lines: vec![],
|
||||||
fragments: vec![],
|
// fragments: vec![],
|
||||||
variables: BTreeSet::new(),
|
// variables: BTreeSet::new(),
|
||||||
arguments: vec![],
|
// arguments: vec![],
|
||||||
dependencies: dependencies,
|
dependencies: dependencies,
|
||||||
shebang: false,
|
shebang: false,
|
||||||
});
|
});
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
/*
|
||||||
extern crate tempdir;
|
extern crate tempdir;
|
||||||
|
|
||||||
use super::{ErrorKind, Justfile};
|
use super::{ErrorKind, Justfile};
|
||||||
@ -209,6 +210,9 @@ a:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
fn tokenize_success(text: &str, expected_summary: &str) {
|
fn tokenize_success(text: &str, expected_summary: &str) {
|
||||||
let tokens = super::tokenize(text).unwrap();
|
let tokens = super::tokenize(text).unwrap();
|
||||||
let roundtrip = tokens.iter().map(|t| {
|
let roundtrip = tokens.iter().map(|t| {
|
||||||
@ -218,10 +222,10 @@ fn tokenize_success(text: &str, expected_summary: &str) {
|
|||||||
s
|
s
|
||||||
}).collect::<Vec<_>>().join("");
|
}).collect::<Vec<_>>().join("");
|
||||||
assert_eq!(text, roundtrip);
|
assert_eq!(text, roundtrip);
|
||||||
assert_eq!(token_summary(tokens), expected_summary);
|
assert_eq!(token_summary(&tokens), expected_summary);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_summary(tokens: Vec<super::Token>) -> String {
|
fn token_summary(tokens: &[super::Token]) -> String {
|
||||||
tokens.iter().map(|t| {
|
tokens.iter().map(|t| {
|
||||||
match t.class {
|
match t.class {
|
||||||
super::TokenClass::Line{..} => "*",
|
super::TokenClass::Line{..} => "*",
|
||||||
|
Loading…
Reference in New Issue
Block a user