significant refactor and simplification
This commit is contained in:
parent
ca4c557d95
commit
7555a90328
8 changed files with 501 additions and 422 deletions
30
src/lex.rs
30
src/lex.rs
|
|
@ -23,7 +23,7 @@ const UNMATCHED_LIST_DELIM: &str = "Unmatched list delimiter in input";
|
|||
/* takes a line of user input
|
||||
* returns an unsimplified tree of tokens.
|
||||
*/
|
||||
pub fn lex<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
||||
pub fn lex(document: &String) -> Result<Box<Seg>, String> {
|
||||
if !document.is_ascii() {
|
||||
return Err("document may only contain ascii characters".to_string());
|
||||
}
|
||||
|
|
@ -42,7 +42,7 @@ pub fn lex<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
* Returns Ok(Rc<Seg>) if lexing passes
|
||||
* Returns Err(String) if an error occurs
|
||||
*/
|
||||
fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
||||
fn process(document: &String) -> Result<Box<Seg>, String> {
|
||||
let doc_len = document.len();
|
||||
|
||||
if doc_len == 0 {
|
||||
|
|
@ -87,7 +87,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
// set alloc_list
|
||||
if delim == ')' {
|
||||
alloc_list = true;
|
||||
if ref_stack.len() < 1 {
|
||||
if ref_stack.is_empty() {
|
||||
return Err("too many end parens".to_string());
|
||||
}
|
||||
}
|
||||
|
|
@ -101,7 +101,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
// try to generalize all whitespace
|
||||
if !needs_alloc && char::is_whitespace(c) && !is_str {
|
||||
// dont make empty tokens just because the document has consecutive whitespace
|
||||
if token.len() == 0 {
|
||||
if token.is_empty() {
|
||||
continue;
|
||||
}
|
||||
needs_alloc = true;
|
||||
|
|
@ -116,7 +116,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
continue;
|
||||
}
|
||||
|
||||
if token != "" {
|
||||
if !token.is_empty() {
|
||||
return Err("list started in middle of another token".to_string());
|
||||
}
|
||||
|
||||
|
|
@ -147,7 +147,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
* 2. Handle expansion of current list ref
|
||||
*/
|
||||
} else {
|
||||
if token.len() == 0 && !is_str && !alloc_list {
|
||||
if token.is_empty() && !is_str && !alloc_list {
|
||||
return Err("Empty token".to_string());
|
||||
}
|
||||
|
||||
|
|
@ -158,7 +158,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
is_str = false;
|
||||
token = String::new();
|
||||
current_seg.append(obj);
|
||||
} else if token.len() > 0 {
|
||||
} else if !token.is_empty() {
|
||||
if token == "true" {
|
||||
obj = Box::from(Ctr::Bool(true));
|
||||
} else if token == "false" {
|
||||
|
|
@ -179,7 +179,7 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
|
||||
if alloc_list {
|
||||
// return if we have finished the document
|
||||
if ref_stack.len() == 0 {
|
||||
if ref_stack.is_empty() {
|
||||
return Ok(Box::new(current_seg));
|
||||
}
|
||||
|
||||
|
|
@ -199,9 +199,10 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
}
|
||||
|
||||
if is_str {
|
||||
return Err(UNMATCHED_STR_DELIM.to_string());
|
||||
Err(UNMATCHED_STR_DELIM.to_string())
|
||||
} else {
|
||||
Err(UNMATCHED_LIST_DELIM.to_string())
|
||||
}
|
||||
return Err(UNMATCHED_LIST_DELIM.to_string());
|
||||
}
|
||||
|
||||
/* Returns true if token
|
||||
|
|
@ -209,13 +210,12 @@ fn process<'a>(document: &'a String) -> Result<Box<Seg>, String> {
|
|||
*
|
||||
* else returns false
|
||||
*/
|
||||
fn tok_is_symbol(token: &String) -> Option<String> {
|
||||
let tok = token.as_str();
|
||||
for t in tok.chars() {
|
||||
if !t.is_alphabetic() && !t.is_digit(10) && !(t == '-') && !(t == '_') {
|
||||
fn tok_is_symbol(token: &str) -> Option<String> {
|
||||
for t in token.chars() {
|
||||
if !t.is_alphanumeric() && t != '-' && t != '_' {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
return Some(String::from(tok));
|
||||
Some(String::from(token))
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue