* split into multi member workspace in preparation for a no_std core * env and posix stuff neatly crammed into a seperate shell project * some pokes at interactive-devel.f * updated ci * removed 'l' shortcut for 'load' and update docs * remove out of date readme content * updated tests * more sensible cond implementation and extra tests * substr stdlib function with tests Signed-off-by: Ava Affine <ava@sunnypup.io>
111 lines
3.8 KiB
Rust
111 lines
3.8 KiB
Rust
mod lex_tests {
|
|
use flesh::ast::lex;
|
|
|
|
#[test]
|
|
fn test_lex_basic_pair() {
|
|
let document = String::from("(hello \"world\")");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_basic_list() {
|
|
let document = String::from("(hello \"world\" 1 2 3)");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_bad_symbol() {
|
|
let document = String::from("(as@dd)");
|
|
let output: &str = "Problem lexing document: \"Unparsable token \\\"as@dd\\\" at char 7\"";
|
|
assert_eq!(lex(&document).err().unwrap().0.first().unwrap().message, output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_complex_list() {
|
|
let document = String::from("(hello \"world\" (1 2 (1 2 3)) 1 2 3)");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_list_delim_in_str() {
|
|
let document = String::from("(\"(\")");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_comment_delim_in_str() {
|
|
let document = String::from("(\"#\")");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_empty_string() {
|
|
let document = String::from("(\"\")");
|
|
assert_eq!(lex(&document).unwrap().to_string(), document);
|
|
}
|
|
|
|
#[test]
|
|
fn test_unmatched_list_delim_flat() {
|
|
let document = String::from("(one two");
|
|
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
|
assert_eq!(lex(&document).err().unwrap().0.first().unwrap().message, output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_unmatched_list_delim_complex() {
|
|
let document = String::from("(one two (three)");
|
|
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
|
assert_eq!(lex(&document).err().unwrap().0.first().unwrap().message, output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_comment_1() {
|
|
let document = String::from("#!/bin/flesh\n(one two)");
|
|
let output: &str = "(one two)";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_comment_2() {
|
|
let document = String::from(";; big doc string\n(one two)");
|
|
let output: &str = "(one two)";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_postline_comment_1() {
|
|
let document =
|
|
String::from("#!/bin/flesh\n((one two)# another doc comment\n(\"three\" four))");
|
|
let output: &str = "((one two) (\"three\" four))";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_postline_comment_2() {
|
|
let document =
|
|
String::from("#!/bin/flesh\n((one two);;another doc comment\n(three four))");
|
|
let output: &str = "((one two) (three four))";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_inline_comment_1() {
|
|
let document = String::from("#!/flesh/flesh\n((one two)\n# another comment\nthree)");
|
|
let output: &str = "((one two) three)";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_inline_comment_2() {
|
|
let document = String::from("# head\n((one two)\n;; another comment\nthree)");
|
|
let output: &str = "((one two) three)";
|
|
assert_eq!(lex(&document).unwrap().to_string(), output.to_string(),);
|
|
}
|
|
|
|
#[test]
|
|
fn test_bad_token_list() {
|
|
let document = String::from("(one t(wo)");
|
|
let output: &str = "Problem lexing document: \"list started in middle of another token: t\"";
|
|
assert_eq!(lex(&document).err().unwrap().0.first().unwrap().message, output.to_string(),);
|
|
}
|
|
}
|