2019-11-12 12:24:45 -08:00
|
|
|
package token;
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"strings"
|
2019-11-12 13:02:06 -08:00
|
|
|
"bytes"
|
2019-11-12 12:24:45 -08:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type operation func(Token) Token
|
|
|
|
|
|
|
|
|
|
type Token struct {
|
|
|
|
|
Token *next
|
|
|
|
|
interface{} *_inner
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-12 13:02:31 -08:00
|
|
|
func lex(string input) string {
|
2019-11-12 12:24:45 -08:00
|
|
|
ret := new(Token)
|
|
|
|
|
iter := &ret
|
2019-11-12 13:02:06 -08:00
|
|
|
// buffered reader via new or make i dunno
|
2019-11-12 13:09:31 -08:00
|
|
|
bool buff_is_dirty = false;
|
2019-11-12 12:24:45 -08:00
|
|
|
|
2019-11-12 13:02:06 -08:00
|
|
|
for pos, char := range input {
|
2019-11-12 13:09:31 -08:00
|
|
|
switch char {
|
|
|
|
|
case ' ', '\t', '\n', '\f', '\r':
|
|
|
|
|
// parse new token
|
|
|
|
|
case '\'', '\"', '`':
|
|
|
|
|
// parse new token
|
|
|
|
|
case '(':
|
|
|
|
|
// match paren
|
|
|
|
|
// parse list
|
|
|
|
|
case ')':
|
|
|
|
|
// SHOULDNT HAPPEN
|
|
|
|
|
default:
|
|
|
|
|
// add to buffer
|
|
|
|
|
buf_is_dirty = true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if buf_is_dirty {
|
|
|
|
|
// clear buffer
|
|
|
|
|
buf_is_dirty = false
|
|
|
|
|
}
|
2019-11-12 13:02:06 -08:00
|
|
|
}
|
2019-11-12 12:24:45 -08:00
|
|
|
}
|
2019-11-12 13:09:31 -08:00
|
|
|
|
|
|
|
|
func parse(Token *arg) {
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func eval(Token *tree) Token* {
|
|
|
|
|
|
|
|
|
|
}
|