mod lex_tests { use relish::ast::{lex, ast_to_string}; #[test] fn test_lex_basic_pair() { let document: &str = "(hello 'world')"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), document); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_lex_basic_list() { let document: &str = "(hello 'world' 1 2 3)"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), document); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_lex_complex_list() { let document: &str = "(hello 'world' (1 2 (1 2 3)) 1 2 3)"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), document); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_bad_symbol() { let document: &str = "(as;dd)"; let output: &str = "Problem lexing document: \"Unparsable token: as;dd\""; match lex(document.to_string()) { Ok(tree) => { print!("Bad token yielded: {}\n", ast_to_string(tree)); assert!(false); }, Err(s) => { assert_eq!(s, output.to_string()); } } } #[test] fn test_unmatched_list_delim_flat() { let document: &str = "(one two"; let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\""; match lex(document.to_string()) { Ok(tree) => { print!("Bad token yielded: {}\n", ast_to_string(tree)); assert!(false); }, Err(s) => { assert_eq!(s, output.to_string()); } } } #[test] fn test_unmatched_list_delim_complex() { let document: &str = "(one two (three)"; let output: &str = "Problem lexing document: \"Unmatched list delimiter in input\""; match lex(document.to_string()) { Ok(tree) => { print!("Bad token yielded: {}\n", ast_to_string(tree)); assert!(false); }, Err(s) => { assert_eq!(s, output.to_string()); } } } #[test] fn test_comment() { let document: &str = "#!/bin/relish\n(one two)"; let output: &str = "(one two)"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), output.to_string()); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_postline_comment() { let document: &str = "#!/bin/relish\n((one two)# another doc comment\n(three four))"; let output: &str = "((one two) (three four))"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), output.to_string()); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_inline_comment() { let document: &str = "#!/bin/relish\n((one two)\n# another doc comment\nthree)"; let output: &str = "((one two) three)"; match lex(document.to_string()) { Ok(tree) => { assert_eq!(ast_to_string(tree), output.to_string()); }, Err(s) => { print!("{}\n", s); assert!(false); } } } #[test] fn test_bad_token_list() { let document: &str = "(one t(wo)"; let output: &str = "Problem lexing document: \"list started in middle of another token\""; match lex(document.to_string()) { Ok(tree) => { print!("Bad token yielded: {}\n", ast_to_string(tree)); assert!(false); }, Err(s) => { assert_eq!(s, output.to_string()); } } } }