package token; import ( "strings" "bytes" ) type operation func(Token) Token /* * LIST: a list of elements * OPERAND: a string or number * OPERATOR: an entry in a symtable * OPERATION: a list starting with an operator */ type parse_tag enum { LIST_T iota OPERAND_T iota OPERATOR_T iota OPERATION_T iota } type Token struct { Token *next interface{} *_inner } func lex(string input) string { ret := new(Token) iter := &ret // buffered reader via new or make i dunno buff_is_dirty := false iter_alloced := false for pos, char := range input { switch char { case '\'', '\"', '`': // parse new token fallthrough // ? case ' ', '\t', '\n', '\f', '\r': // parse new token case '(': // match paren // parse list case ')': // SHOULDNT HAPPEN default: // add to buffer buf_is_dirty = true } if buf_is_dirty { // clear buffer buf_is_dirty = false } if iter_alloced { iter = &(*iter)->next iter_alloced = false } } } func parse(Token *arg) { // if operand determine if operator // determine operator precense in symbol table // Determine if a list is an operation or a list } func eval(Token *tree) Token* { // Find operations // Simplify operations deepest first // return tree of final Tokens }