diff --git a/pkg/shsh/.token.go.swp b/pkg/shsh/.token.go.swp new file mode 100644 index 0000000..95b314d Binary files /dev/null and b/pkg/shsh/.token.go.swp differ diff --git a/pkg/shsh/token.go b/pkg/shsh/token.go index 54ddd83..cb71c6a 100644 --- a/pkg/shsh/token.go +++ b/pkg/shsh/token.go @@ -2,7 +2,6 @@ package token; import ( "strings" - "bytes" ) type operation func(Token) Token @@ -13,50 +12,50 @@ type operation func(Token) Token * OPERATOR: an entry in a symtable * OPERATION: a list starting with an operator */ -type parse_tag enum { - LIST_T iota - OPERAND_T iota - OPERATOR_T iota - OPERATION_T iota -} +type parse_tag int +const ( + LIST_T parse_tag = iota + OPERAND_T parse_tag = iota + OPERATOR_T parse_tag = iota + OPERATION_T parse_tag = iota +) type Token struct { - Token *next - interface{} _inner + next *Token + tag parse_tag + _inner interface{} } func lex(string input) Token { ret := new(Token) iter := &ret + delim := ' ' var tok strings.Builder iter_alloced := false for pos, char := range input { switch char { - case '\'', '\"', '`': - // parse new token - fallthrough - case ' ', '\t', '\n', '\f', '\r': - *iter = new(Token) - (*iter)->_inner = tok.String() - iter_alloced = true + case '\'', '"', '`': + delim = char case '(': - // match paren + delim = ')' + + case delim: *iter = new(Token) - (*iter)->_inner = lex(tok.String()) + *iter._inner = tok.String() iter_alloced = true - case ')': - // SHOULDNT HAPPEN + delim = ' ' + default: tok.WriteRune(char) buf_is_dirty = true } if iter_alloced { - iter = &(*iter)->next + iter = &(*iter.next) iter_alloced = false - // clear buffer + tok.Reset() } } } @@ -67,7 +66,7 @@ func parse(Token *arg) { // Determine if a list is an operation or a list } -func eval(Token *tree) Token* { +func eval(Token *tree) *Token { // Find operations // Simplify operations deepest first // return tree of final Tokens