big temp status
Signed-off-by: Ava Hahn <ava@aidanis.online>
This commit is contained in:
parent
45453f819f
commit
5261efbc65
12 changed files with 960 additions and 224 deletions
|
|
@ -1,12 +1,12 @@
|
|||
mod lex_tests {
|
||||
use relish::ast::{ast_to_string, lex};
|
||||
use relish::ast::lex;
|
||||
|
||||
#[test]
|
||||
fn test_lex_basic_pair() {
|
||||
let document: &str = "(hello 'world')";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), document);
|
||||
assert_eq!(tree, document);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -18,9 +18,9 @@ mod lex_tests {
|
|||
#[test]
|
||||
fn test_lex_basic_list() {
|
||||
let document: &str = "(hello 'world' 1 2 3)";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), document);
|
||||
assert_eq!(tree, document);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -32,9 +32,9 @@ mod lex_tests {
|
|||
#[test]
|
||||
fn test_lex_complex_list() {
|
||||
let document: &str = "(hello 'world' (1 2 (1 2 3)) 1 2 3)";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), document);
|
||||
assert_eq!(tree, document);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -47,13 +47,13 @@ mod lex_tests {
|
|||
fn test_bad_symbol() {
|
||||
let document: &str = "(as;dd)";
|
||||
let output: &str = "Problem lexing document: \"Unparsable token: as;dd\"";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
print!("Bad token yielded: {}\n", ast_to_string(tree));
|
||||
print!("Bad token yielded: {}\n", tree);
|
||||
assert!(false);
|
||||
}
|
||||
Err(s) => {
|
||||
assert_eq!(s, output.to_string());
|
||||
assert_eq!(s, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -61,9 +61,9 @@ mod lex_tests {
|
|||
#[test]
|
||||
fn test_list_delim_in_str() {
|
||||
let document: &str = "('(')";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), document);
|
||||
assert_eq!(tree, document);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -75,9 +75,9 @@ mod lex_tests {
|
|||
#[test]
|
||||
fn test_empty_string() {
|
||||
let document: &str = "('')";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), document);
|
||||
assert_eq!(tree, document);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -90,13 +90,13 @@ mod lex_tests {
|
|||
fn test_unmatched_list_delim_flat() {
|
||||
let document: &str = "(one two";
|
||||
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
print!("Bad token yielded: {}\n", ast_to_string(tree));
|
||||
print!("Bad token yielded: {}\n", tree);
|
||||
assert!(false);
|
||||
}
|
||||
Err(s) => {
|
||||
assert_eq!(s, output.to_string());
|
||||
assert_eq!(s, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -105,13 +105,13 @@ mod lex_tests {
|
|||
fn test_unmatched_list_delim_complex() {
|
||||
let document: &str = "(one two (three)";
|
||||
let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\"";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
print!("Bad token yielded: {}\n", ast_to_string(tree));
|
||||
print!("Bad token yielded: {}\n", tree);
|
||||
assert!(false);
|
||||
}
|
||||
Err(s) => {
|
||||
assert_eq!(s, output.to_string());
|
||||
assert_eq!(s, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -120,9 +120,9 @@ mod lex_tests {
|
|||
fn test_comment() {
|
||||
let document: &str = "#!/bin/relish\n(one two)";
|
||||
let output: &str = "(one two)";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), output.to_string());
|
||||
assert_eq!(tree, output);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -135,9 +135,9 @@ mod lex_tests {
|
|||
fn test_postline_comment() {
|
||||
let document: &str = "#!/bin/relish\n((one two)# another doc comment\n(three four))";
|
||||
let output: &str = "((one two) (three four))";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), output.to_string());
|
||||
assert_eq!(tree, output.to_string());
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -150,9 +150,9 @@ mod lex_tests {
|
|||
fn test_inline_comment() {
|
||||
let document: &str = "#!/bin/relish\n((one two)\n# another doc comment\nthree)";
|
||||
let output: &str = "((one two) three)";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
assert_eq!(ast_to_string(tree), output.to_string());
|
||||
assert_eq!(tree, output);
|
||||
}
|
||||
Err(s) => {
|
||||
print!("{}\n", s);
|
||||
|
|
@ -165,13 +165,13 @@ mod lex_tests {
|
|||
fn test_bad_token_list() {
|
||||
let document: &str = "(one t(wo)";
|
||||
let output: &str = "Problem lexing document: \"list started in middle of another token\"";
|
||||
match lex(document.to_string()) {
|
||||
match lex(document) {
|
||||
Ok(tree) => {
|
||||
print!("Bad token yielded: {}\n", ast_to_string(tree));
|
||||
print!("Bad token yielded: {}\n", tree);
|
||||
assert!(false);
|
||||
}
|
||||
Err(s) => {
|
||||
assert_eq!(s, output.to_string());
|
||||
assert_eq!(s, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue