package token; import ( "strings" "bytes" ) type operation func(Token) Token type Token struct { Token *next interface{} *_inner } func lex(string input) string { ret := new(Token) iter := &ret // buffered reader via new or make i dunno bool buff_is_dirty = false; for pos, char := range input { switch char { case ' ', '\t', '\n', '\f', '\r': // parse new token case '\'', '\"', '`': // parse new token case '(': // match paren // parse list case ')': // SHOULDNT HAPPEN default: // add to buffer buf_is_dirty = true } if buf_is_dirty { // clear buffer buf_is_dirty = false } } } func parse(Token *arg) { } func eval(Token *tree) Token* { }