Several changes, see commit msg

* clean up all tests
* bugfix for zero value functions, and test
* removed expand function, put in snippets
* added doc strings to Symbol type
* added doc strings to symbol declarations
* implemented display for Args type
* wrote a help function
* wrote docstrings for all builtins and config vars
This commit is contained in:
Ava Hahn 2023-03-05 22:18:49 -08:00
parent 4b587f11ab
commit dc6342bc74
Signed by untrusted user who does not match committer: affine
GPG key ID: 3A4645B8CF806069
16 changed files with 575 additions and 677 deletions

View file

@ -4,131 +4,86 @@ mod lex_tests {
#[test]
fn test_lex_basic_pair() {
let document = String::from("(hello 'world')");
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), document);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
document
);
}
#[test]
fn test_lex_basic_list() {
let document = String::from("(hello 'world' 1 2 3)");
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), document);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
document
);
}
#[test]
fn test_lex_complex_list() {
let document = String::from("(hello 'world' (1 2 (1 2 3)) 1 2 3)");
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), document);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
document
);
}
#[test]
fn test_bad_symbol() {
let document = String::from("(as;dd)");
let output: &str = "Problem lexing document: \"Unparsable token: as;dd\"";
match lex(&document) {
Ok(tree) => {
print!("Bad token yielded: {}\n", tree.to_string());
assert!(false);
}
Err(s) => {
assert_eq!(s, output);
}
}
assert_eq!(
lex(&document).err().unwrap(),
output.to_string(),
);
}
#[test]
fn test_list_delim_in_str() {
let document = String::from("('(')");
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), document);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
document
);
}
#[test]
fn test_empty_string() {
let document = String::from("('')");
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), document);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
document
);
}
#[test]
fn test_unmatched_list_delim_flat() {
let document = String::from("(one two");
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(&document) {
Ok(tree) => {
print!("Bad token yielded: {}\n", tree.to_string());
assert!(false);
}
Err(s) => {
assert_eq!(s, output);
}
}
assert_eq!(
lex(&document).err().unwrap(),
output.to_string(),
);
}
#[test]
fn test_unmatched_list_delim_complex() {
let document = String::from("(one two (three)");
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
match lex(&document) {
Ok(tree) => {
print!("Bad token yielded: {}\n", tree);
assert!(false);
}
Err(s) => {
assert_eq!(s, output);
}
}
assert_eq!(
lex(&document).err().unwrap(),
output.to_string(),
);
}
#[test]
fn test_comment() {
let document = String::from("#!/bin/relish\n(one two)");
let output: &str = "(one two)";
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), output);
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
output.to_string(),
);
}
#[test]
@ -136,44 +91,30 @@ mod lex_tests {
let document =
String::from("#!/bin/relish\n((one two)# another doc comment\n(three four))");
let output: &str = "((one two) (three four))";
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), output.to_string());
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
output.to_string(),
);
}
#[test]
fn test_inline_comment() {
let document = String::from("#!/bin/relish\n((one two)\n# another doc comment\nthree)");
let output: &str = "((one two) three)";
match lex(&document) {
Ok(tree) => {
assert_eq!(tree.to_string(), output.to_string());
}
Err(s) => {
print!("{}\n", s);
assert!(false);
}
}
assert_eq!(
lex(&document).unwrap().to_string(),
output.to_string(),
);
}
#[test]
fn test_bad_token_list() {
let document = String::from("(one t(wo)");
let output: &str = "Problem lexing document: \"list started in middle of another token\"";
match lex(&document) {
Ok(tree) => {
print!("Bad token yielded: {}\n", tree);
assert!(false);
}
Err(s) => {
assert_eq!(s, output);
}
}
assert_eq!(
lex(&document).err().unwrap(),
output.to_string(),
);
}
}