add more unit tests for lexing

This commit is contained in:
Aidan 2021-01-24 22:32:09 -08:00
parent 172aa4ea4b
commit 34573a999e
No known key found for this signature in database
GPG key ID: 327711E983899316
3 changed files with 107 additions and 3 deletions

View file

@ -45,4 +45,109 @@ mod lex_tests {
}
}
}
#[test]
fn test_bad_symbol() {
let document: &str = "(as;dd)";
let output: &str = "Problem lexing document: \"Unparsable token:as;dd\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_unmatched_list_delim_flat() {
let document: &str = "(one two";
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_unmatched_list_delim_complex() {
let document: &str = "(one two (three)";
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
#[test]
fn test_comment() {
let document: &str = "#!/bin/relish\n(one two)";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_postline_comment() {
let document: &str = "#!/bin/relish\n((one two)# another doc comment\n(three four))";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_inline_comment() {
let document: &str = "#!/bin/relish\n((one two)\n# another doc comment\nthree)";
let output: &str = "(one two nil)";
match lex(document.to_string()) {
Ok(box_cell) => {
assert_eq!(format!("{}", *box_cell), output.to_string());
},
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
}
#[test]
fn test_bad_token_list() {
let document: &str = "(one t(wo)";
let output: &str = "Problem lexing document: \"list started in middle of another token\"";
match lex(document.to_string()) {
Ok(box_cell) => {
print!("Bad token yielded: {}\n", *box_cell);
assert!(false);
},
Err(s) => {
assert_eq!(s, output.to_string());
}
}
}
}