add more unit tests for lexing

This commit is contained in:
Aidan 2021-01-24 22:32:09 -08:00
parent 172aa4ea4b
commit 34573a999e
No known key found for this signature in database
GPG key ID: 327711E983899316
3 changed files with 107 additions and 3 deletions

View file

@ -20,7 +20,6 @@ use std::boxed::Box;
// Container // Container
#[derive(Debug)]
pub enum Ctr { pub enum Ctr {
SYMBOL(String), SYMBOL(String),
STRING(String), STRING(String),
@ -35,7 +34,6 @@ pub enum Ctr {
* Holds two Containers. * Holds two Containers.
* Basic building block for more complex data structures. * Basic building block for more complex data structures.
*/ */
#[derive(Debug)]
pub struct Cell { pub struct Cell {
/* "Cell Address Register" /* "Cell Address Register"
* Historical way of referring to the first value in a cell. * Historical way of referring to the first value in a cell.

View file

@ -85,7 +85,8 @@ fn process(document: String) -> Result<Box<Cell>, String> {
needs_alloc = true; needs_alloc = true;
// reset comment line status // reset comment line status
if delim == '\n' { if delim == '\n' {
ign = false ign = false;
continue;
} }
// catch too many list end // catch too many list end

View file

@ -45,4 +45,109 @@ mod lex_tests {
} }
} }
} }
#[test]
fn test_bad_symbol() {
let document: &str = "(as;dd)";
let output: &str = "Problem lexing document: \"Unparsable token:as;dd\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_unmatched_list_delim_flat() {
let document: &str = "(one two";
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_unmatched_list_delim_complex() {
let document: &str = "(one two (three)";
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_comment() {
let document: &str = "#!/bin/relish\n(one two)";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_postline_comment() {
let document: &str = "#!/bin/relish\n((one two)# another doc comment\n(three four))";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_inline_comment() {
let document: &str = "#!/bin/relish\n((one two)\n# another doc comment\nthree)";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_bad_token_list() {
let document: &str = "(one t(wo)";
let output: &str = "Problem lexing document: \"list started in middle of another token\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
} }