diff --git a/src/cell.rs b/src/cell.rs index 28858ac..68d2ef6 100644 --- a/src/cell.rs +++ b/src/cell.rs @@ -20,7 +20,6 @@ use std::boxed::Box; // Container -#[derive(Debug)] pub enum Ctr { SYMBOL(String), STRING(String), @@ -35,7 +34,6 @@ pub enum Ctr { * Holds two Containers. * Basic building block for more complex data structures. */ -#[derive(Debug)] pub struct Cell { /* "Cell Address Register" * Historical way of referring to the first value in a cell. diff --git a/src/lex.rs b/src/lex.rs index 4f3e1b2..ccf00c0 100644 --- a/src/lex.rs +++ b/src/lex.rs @@ -85,7 +85,8 @@ fn process(document: String) -> Result, String> { needs_alloc = true; // reset comment line status if delim == '\n' { - ign = false + ign = false; + continue; } // catch too many list end diff --git a/tests/test_lex.rs b/tests/test_lex.rs index e240fda..fc3e8fa 100644 --- a/tests/test_lex.rs +++ b/tests/test_lex.rs @@ -45,4 +45,109 @@ mod lex_tests { } } } + + #[test] + fn test_bad_symbol() { + let document: &str = "(as;dd)"; + let output: &str = "Problem lexing document: \"Unparsable token:as;dd\""; + match lex(document.to_string()) { + Ok(box_cell) => { + print!("Bad token yielded: {}\n", *box_cell); + assert!(false); + }, + Err(s) => { + assert_eq!(s, output.to_string()); + } + } + } + + #[test] + fn test_unmatched_list_delim_flat() { + let document: &str = "(one two"; + let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\""; + match lex(document.to_string()) { + Ok(box_cell) => { + print!("Bad token yielded: {}\n", *box_cell); + assert!(false); + }, + Err(s) => { + assert_eq!(s, output.to_string()); + } + } + } + + #[test] + fn test_unmatched_list_delim_complex() { + let document: &str = "(one two (three)"; + let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\""; + match lex(document.to_string()) { + Ok(box_cell) => { + print!("Bad token yielded: {}\n", *box_cell); + assert!(false); + }, + Err(s) => { + assert_eq!(s, output.to_string()); + } + } + } + + #[test] + fn test_comment() { + let document: &str = "#!/bin/relish\n(one two)"; + let output: &str = "(one two nil)"; + match lex(document.to_string()) { + Ok(box_cell) => { + assert_eq!(format!("{}", *box_cell), output.to_string()); + }, + Err(s) => { + print!("{}\n", s); + assert!(false); + } + } + } + + #[test] + fn test_postline_comment() { + let document: &str = "#!/bin/relish\n((one two)# another doc comment\n(three four))"; + let output: &str = "(one two nil)"; + match lex(document.to_string()) { + Ok(box_cell) => { + assert_eq!(format!("{}", *box_cell), output.to_string()); + }, + Err(s) => { + print!("{}\n", s); + assert!(false); + } + } + } + + #[test] + fn test_inline_comment() { + let document: &str = "#!/bin/relish\n((one two)\n# another doc comment\nthree)"; + let output: &str = "(one two nil)"; + match lex(document.to_string()) { + Ok(box_cell) => { + assert_eq!(format!("{}", *box_cell), output.to_string()); + }, + Err(s) => { + print!("{}\n", s); + assert!(false); + } + } + } + + #[test] + fn test_bad_token_list() { + let document: &str = "(one t(wo)"; + let output: &str = "Problem lexing document: \"list started in middle of another token\""; + match lex(document.to_string()) { + Ok(box_cell) => { + print!("Bad token yielded: {}\n", *box_cell); + assert!(false); + }, + Err(s) => { + assert_eq!(s, output.to_string()); + } + } + } }