From 3b92e00ee7c23855c0f6ee459f725273acc7b70c Mon Sep 17 00:00:00 2001 From: Casey Rodarmor Date: Sat, 22 Oct 2016 19:36:54 -0700 Subject: [PATCH] Comment out everything to rewrite parser --- justfile | 3 ++- src/lib.rs | 32 +++++++++++++++++--------------- src/tests.rs | 8 ++++++-- 3 files changed, 25 insertions(+), 18 deletions(-) diff --git a/justfile b/justfile index 2a27c8f7..d2ff8e69 100644 --- a/justfile +++ b/justfile @@ -1,6 +1,7 @@ test: cargo test --lib - #cargo run -- quine clean > /dev/null 2> /dev/null + +# cargo run -- quine clean > /dev/null 2> /dev/null backtrace: RUST_BACKTRACE=1 cargo test --lib diff --git a/src/lib.rs b/src/lib.rs index f5c4c16f..1ed4baf9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,7 +7,7 @@ extern crate tempdir; use std::io::prelude::*; use std::{fs, fmt, process, io}; -use std::collections::{BTreeMap, BTreeSet, HashSet}; +use std::collections::{BTreeMap, HashSet}; use std::fmt::Display; use regex::Regex; @@ -50,17 +50,19 @@ pub struct Recipe<'a> { name: &'a str, leading_whitespace: &'a str, lines: Vec<&'a str>, - fragments: Vec>>, - variables: BTreeSet<&'a str>, + // fragments: Vec>>, + // variables: BTreeSet<&'a str>, dependencies: Vec<&'a str>, - arguments: Vec<&'a str>, + // arguments: Vec<&'a str>, shebang: bool, } +/* enum Fragment<'a> { Text{text: &'a str}, Variable{name: &'a str}, } +*/ impl<'a> Display for Recipe<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { @@ -452,9 +454,9 @@ impl<'a> Display for RunError<'a> { } struct Token<'a> { - index: usize, + // index: usize, line: usize, - col: usize, + // col: usize, prefix: &'a str, lexeme: &'a str, class: TokenClass, @@ -521,7 +523,7 @@ fn tokenize(text: &str) -> Result, Error> { let mut tokens = vec![]; let mut rest = text; - let mut index = 0; + // let mut index = 0; let mut line = 0; let mut col = 0; let mut indent: Option<&str> = None; @@ -561,9 +563,9 @@ fn tokenize(text: &str) -> Result, Error> { } } { tokens.push(Token { - index: index, + // index: index, line: line, - col: col, + // col: col, prefix: "", lexeme: "", class: class, @@ -722,9 +724,9 @@ fn tokenize(text: &str) -> Result, Error> { let len = prefix.len() + lexeme.len(); tokens.push(Token { - index: index, + // index: index, line: line, - col: col, + // col: col, prefix: prefix, lexeme: lexeme, class: class, @@ -744,7 +746,7 @@ fn tokenize(text: &str) -> Result, Error> { } rest = &rest[len..]; - index += len; + // index += len; } Ok(tokens) @@ -1044,9 +1046,9 @@ pub fn parse<'a>(text: &'a str) -> Result { name: name, leading_whitespace: "", lines: vec![], - fragments: vec![], - variables: BTreeSet::new(), - arguments: vec![], + // fragments: vec![], + // variables: BTreeSet::new(), + // arguments: vec![], dependencies: dependencies, shebang: false, }); diff --git a/src/tests.rs b/src/tests.rs index 7a45bf2e..e61d5576 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,3 +1,4 @@ +/* extern crate tempdir; use super::{ErrorKind, Justfile}; @@ -209,6 +210,9 @@ a: } } + +*/ + fn tokenize_success(text: &str, expected_summary: &str) { let tokens = super::tokenize(text).unwrap(); let roundtrip = tokens.iter().map(|t| { @@ -218,10 +222,10 @@ fn tokenize_success(text: &str, expected_summary: &str) { s }).collect::>().join(""); assert_eq!(text, roundtrip); - assert_eq!(token_summary(tokens), expected_summary); + assert_eq!(token_summary(&tokens), expected_summary); } -fn token_summary(tokens: Vec) -> String { +fn token_summary(tokens: &[super::Token]) -> String { tokens.iter().map(|t| { match t.class { super::TokenClass::Line{..} => "*",