* clean up all tests * bugfix for zero value functions, and test * removed expand function, put in snippets * added doc strings to Symbol type * added doc strings to symbol declarations * implemented display for Args type * wrote a help function * wrote docstrings for all builtins and config vars
120 lines
3.1 KiB
Rust
120 lines
3.1 KiB
Rust
mod lex_tests {
|
|
use relish::ast::lex;
|
|
|
|
#[test]
|
|
fn test_lex_basic_pair() {
|
|
let document = String::from("(hello 'world')");
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
document
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_basic_list() {
|
|
let document = String::from("(hello 'world' 1 2 3)");
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
document
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_complex_list() {
|
|
let document = String::from("(hello 'world' (1 2 (1 2 3)) 1 2 3)");
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
document
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_bad_symbol() {
|
|
let document = String::from("(as;dd)");
|
|
let output: &str = "Problem lexing document: \"Unparsable token: as;dd\"";
|
|
assert_eq!(
|
|
lex(&document).err().unwrap(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_list_delim_in_str() {
|
|
let document = String::from("('(')");
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
document
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_empty_string() {
|
|
let document = String::from("('')");
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
document
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_unmatched_list_delim_flat() {
|
|
let document = String::from("(one two");
|
|
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
|
assert_eq!(
|
|
lex(&document).err().unwrap(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_unmatched_list_delim_complex() {
|
|
let document = String::from("(one two (three)");
|
|
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
|
assert_eq!(
|
|
lex(&document).err().unwrap(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_comment() {
|
|
let document = String::from("#!/bin/relish\n(one two)");
|
|
let output: &str = "(one two)";
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_postline_comment() {
|
|
let document =
|
|
String::from("#!/bin/relish\n((one two)# another doc comment\n(three four))");
|
|
let output: &str = "((one two) (three four))";
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_inline_comment() {
|
|
let document = String::from("#!/bin/relish\n((one two)\n# another doc comment\nthree)");
|
|
let output: &str = "((one two) three)";
|
|
assert_eq!(
|
|
lex(&document).unwrap().to_string(),
|
|
output.to_string(),
|
|
);
|
|
|
|
}
|
|
|
|
#[test]
|
|
fn test_bad_token_list() {
|
|
let document = String::from("(one t(wo)");
|
|
let output: &str = "Problem lexing document: \"list started in middle of another token\"";
|
|
assert_eq!(
|
|
lex(&document).err().unwrap(),
|
|
output.to_string(),
|
|
);
|
|
}
|
|
}
|