chore: add mercury

This commit is contained in:
xuu
2024-01-22 16:00:58 -07:00
parent eb63312542
commit d4e021386b
47 changed files with 6456 additions and 152 deletions

255
rsql/ast.go Normal file
View File

@@ -0,0 +1,255 @@
package rsql
import (
"bytes"
"strings"
)
// Node is the smallest unit of ast
type Node interface {
TokenLiteral() string
String() string
}
// Statement is a executable tree
type Statement interface {
Node
statementNode()
}
// Expression is a portion of tree
type Expression interface {
Node
expressionNode()
}
// Identifier is a variable name
type Identifier struct {
Token Token
Value string
}
func (i *Identifier) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *Identifier) String() string { return i.Value }
// Integer is a numeric value
type Integer struct {
Token Token
Value int64
}
func (i *Integer) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *Integer) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *Integer) String() string { return i.Token.Literal }
// Float is a floating point value
type Float struct {
Token Token
Value float64
}
func (i *Float) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *Float) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *Float) String() string { return i.Token.Literal }
// Bool is a boolean value
type Bool struct {
Token Token
Value bool
}
func (i *Bool) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *Bool) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *Bool) String() string { return i.Token.Literal }
// Null is an empty value
type Null struct {
Token Token
}
func (i *Null) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *Null) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *Null) String() string { return i.Token.Literal }
// String is an array of codepoints
type String struct {
Token Token
Value string
}
func (i *String) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *String) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *String) String() string {
var out bytes.Buffer
out.WriteRune('"')
out.WriteString(i.Value)
out.WriteRune('"')
return out.String()
}
// Array is an array of tokens
type Array struct {
Token Token
Elements []Expression
}
func (a *Array) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (a *Array) TokenLiteral() string { return a.Token.Literal }
// String returns a string representation of value
func (a *Array) String() string {
var out bytes.Buffer
var elements []string
for _, el := range a.Elements {
elements = append(elements, el.String())
}
out.WriteRune('(')
out.WriteString(strings.Join(elements, ","))
out.WriteRune(')')
return out.String()
}
// Program is a collection of statements
type Program struct {
Statements []Statement
Args map[string]string
}
func (p *Program) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (p *Program) TokenLiteral() string {
if len(p.Statements) > 0 {
return p.Statements[0].TokenLiteral()
}
return ""
}
// String returns a string representation of value
func (p *Program) String() string {
var out bytes.Buffer
for _, s := range p.Statements {
out.WriteString(s.String())
}
if len(p.Args) > 0 {
out.WriteRune('|')
for a, b := range p.Args {
out.WriteRune(' ')
out.WriteString(a)
out.WriteRune(':')
out.WriteString(b)
}
}
return out.String()
}
// ExpressionStatement is a collection of expressions
type ExpressionStatement struct {
Token Token
Expression Expression
}
func (ExpressionStatement) statementNode() {}
// TokenLiteral returns the literal value of a token
func (e ExpressionStatement) TokenLiteral() string { return e.Token.Literal }
// String returns a string representation of value
func (e ExpressionStatement) String() string {
if e.Expression != nil {
return e.Expression.String()
}
return ""
}
// PrefixExpression is an expression with a preceeding operator
type PrefixExpression struct {
Token Token
Operator string
Right Expression
}
func (p *PrefixExpression) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (p *PrefixExpression) TokenLiteral() string { return p.Token.Literal }
// String returns a string representation of value
func (p *PrefixExpression) String() string {
var out bytes.Buffer
out.WriteRune('(')
out.WriteString(p.Operator)
out.WriteString(p.Right.String())
out.WriteRune(')')
return out.String()
}
// InfixExpression is two expressions with a infix operator
type InfixExpression struct {
Token Token
Left Expression
Operator string
Right Expression
}
func (i *InfixExpression) expressionNode() {}
// TokenLiteral returns the literal value of a token
func (i *InfixExpression) TokenLiteral() string { return i.Token.Literal }
// String returns a string representation of value
func (i *InfixExpression) String() string {
var out bytes.Buffer
out.WriteRune('(')
if i.Left != nil {
out.WriteString(i.Left.String())
} else {
out.WriteString("nil")
}
out.WriteString(i.Operator)
if i.Right != nil {
out.WriteString(i.Right.String())
} else {
out.WriteString("nil")
}
out.WriteRune(')')
return out.String()
}

21
rsql/ast_test.go Normal file
View File

@@ -0,0 +1,21 @@
package rsql
import "testing"
func TestString(t *testing.T) {
program := &Program{
Statements: []Statement {
ExpressionStatement{
Token: Token{TokEQ, "=="},
Expression: &InfixExpression{
Token: Token{TokEQ, "=="},
Left: &Identifier{Token{TokIdent,"foo"}, "foo"},
Operator: "==",
Right: &Integer{Token{TokInteger, "5"}, 5},
},
},
},
}
t.Log(program.String())
}

90
rsql/dbcolumns.go Normal file
View File

@@ -0,0 +1,90 @@
package rsql
import (
"fmt"
"reflect"
"strings"
"unicode"
)
// DbColumns database model metadata
type DbColumns struct {
Cols []string
index map[string]int
Table string
View string
}
// Col returns the mapped column names
func (d *DbColumns) Col(column string) (s string, err error) {
idx, ok := d.Index(column)
if !ok {
err = fmt.Errorf("column not found on table: %v", column)
return
}
return d.Cols[idx], err
}
// Index returns the column number
func (d *DbColumns) Index(column string) (idx int, ok bool) {
idx, ok = d.index[column]
return
}
// GetDbColumns builds a metadata struct
func GetDbColumns(o interface{}) *DbColumns {
d := DbColumns{}
t := reflect.TypeOf(o)
d.index = make(map[string]int)
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
sp := append(strings.Split(field.Tag.Get("db"), ","), "")
tag := sp[0]
json := field.Tag.Get("json")
if tag == "" {
tag = json
}
graphql := field.Tag.Get("graphql")
if tag == "" {
tag = graphql
}
if tag == "-" {
continue
}
if tag == "" {
tag = field.Name
}
d.index[field.Name] = len(d.Cols)
if _, ok := d.index[tag]; !ok && tag != "" {
d.index[tag] = len(d.Cols)
}
if _, ok := d.index[json]; !ok && json != "" {
d.index[json] = len(d.Cols)
}
if _, ok := d.index[graphql]; !ok && graphql != "" {
d.index[graphql] = len(d.Cols)
} else if !ok && graphql == "" {
a := []rune(field.Name)
for i := 0; i < len(a); i++ {
if unicode.IsLower(a[i]) {
break
}
a[i] = unicode.ToLower(a[i])
}
graphql = string(a)
d.index[graphql] = len(d.Cols)
}
d.Cols = append(d.Cols, tag)
}
return &d
}

258
rsql/lexer.go Normal file
View File

@@ -0,0 +1,258 @@
package rsql
import (
"unicode"
"unicode/utf8"
)
type Lexer struct {
input string
position int
readPosition int
rune rune
}
// NewLexer returns a new lexing generator
func NewLexer(in string) *Lexer {
l := &Lexer{input: in}
l.readRune()
return l
}
// NextToken returns the next token from lexer
func (l *Lexer) NextToken() Token {
var tok Token
l.skipSpace()
switch l.rune {
case '-':
l.readRune()
if isNumber(l.rune) {
var isFloat bool
tok.Literal, isFloat = l.readNumber()
if isFloat {
tok.Type = TokFloat
} else {
tok.Type = TokInteger
}
} else if isLetter(l.rune) {
tok.Literal = l.readIdentifier()
tok.Type = lookupIdent(tok.Literal)
} else {
tok = newToken(TokIllegal, l.rune)
return tok
}
tok.Literal = "-" + tok.Literal
return tok
case '=':
r := l.peekRune()
if r == '=' {
r := l.rune
l.readRune()
tok.Type, tok.Literal = TokEQ, string(r)+string(l.rune)
} else if isLetter(r) {
tok = l.readFIQL()
return tok
} else {
tok = newToken(TokIllegal, l.rune)
}
case ';':
tok = newToken(TokAND, l.rune)
case ',':
tok = newToken(TokOR, l.rune)
case ')':
tok = newToken(TokRParen, l.rune)
case '(':
tok = newToken(TokLParen, l.rune)
case ']':
tok = newToken(TokRBracket, l.rune)
case '[':
tok = newToken(TokLBracket, l.rune)
case '~':
tok = newToken(TokLIKE, l.rune)
case '!':
if l.peekRune() == '=' {
r := l.rune
l.readRune()
tok.Type, tok.Literal = TokNEQ, string(r)+string(l.rune)
} else if l.peekRune() == '~' {
r := l.rune
l.readRune()
tok.Type, tok.Literal = TokNLIKE, string(r)+string(l.rune)
} else {
tok = newToken(TokIllegal, l.rune)
return tok
}
case '<':
if l.peekRune() == '=' {
r := l.rune
l.readRune()
tok.Type, tok.Literal = TokLE, string(r)+string(l.rune)
} else {
tok = newToken(TokLT, l.rune)
}
case '>':
if l.peekRune() == '=' {
r := l.rune
l.readRune()
tok.Type, tok.Literal = TokGE, string(r)+string(l.rune)
} else {
tok = newToken(TokGT, l.rune)
}
case '"', '\'':
tok.Type = TokString
tok.Literal = l.readString(l.rune)
case 0:
tok.Type, tok.Literal = TokEOF, ""
default:
if isNumber(l.rune) {
var isFloat bool
tok.Literal, isFloat = l.readNumber()
if isFloat {
tok.Type = TokFloat
} else {
tok.Type = TokInteger
}
} else if isLetter(l.rune) {
tok.Literal = l.readIdentifier()
tok.Type = lookupIdent(tok.Literal)
} else {
tok = newToken(TokIllegal, l.rune)
return tok
}
return tok
}
l.readRune()
return tok
}
func (l *Lexer) readRune() {
var size int
if l.readPosition >= len(l.input) {
l.rune = 0
} else {
l.rune, size = utf8.DecodeRuneInString(l.input[l.readPosition:])
}
l.position = l.readPosition
l.readPosition += size
}
func (l *Lexer) peekRune() rune {
if l.readPosition >= len(l.input) {
return 0
}
r, _ := utf8.DecodeRuneInString(l.input[l.readPosition:])
return r
}
func (l *Lexer) skipSpace() {
for unicode.IsSpace(l.rune) {
l.readRune()
}
}
func (l *Lexer) readIdentifier() string {
position := l.position
if isLetter(l.rune) {
l.readRune()
}
for isLetter(l.rune) || isNumber(l.rune) {
l.readRune()
}
return l.input[position:l.position]
}
func (l *Lexer) readNumber() (string, bool) {
isFloat := false
position := l.position
for isNumber(l.rune) {
if l.rune == '.' {
isFloat = true
}
l.readRune()
}
return l.input[position:l.position], isFloat
}
func (l *Lexer) readString(st rune) string {
position := l.position + 1
escape := false
for {
l.readRune()
if l.rune == '\\' {
escape = true
continue
}
if escape {
escape = false
continue
}
if l.rune == st || l.rune == 0 {
break
}
}
return l.input[position:l.position]
}
func (l *Lexer) readFIQL() Token {
l.readRune()
s := l.readIdentifier()
if l.rune != '=' {
return Token{TokIllegal, "=" + s}
}
l.readRune()
switch s {
case "eq":
return Token{TokEQ, "=" + s + "="}
case "neq":
return Token{TokNEQ, "=" + s + "="}
case "gt":
return Token{TokGT, "=" + s + "="}
case "ge":
return Token{TokGE, "=" + s + "="}
case "lt":
return Token{TokLT, "=" + s + "="}
case "le":
return Token{TokLE, "=" + s + "="}
default:
return Token{TokExtend, "=" + s + "="}
}
}
func isLetter(r rune) bool {
if unicode.IsSpace(r) {
return false
}
switch r {
case '"', '\'', '(', ')', ';', ',', '=', '!', '~', '<', '>', '[', ']':
return false
}
if '0' < r && r < '9' || r == '.' {
return false
}
return unicode.IsPrint(r)
}
func isNumber(r rune) bool {
if '0' <= r && r <= '9' || r == '.' {
return true
}
return false
}

105
rsql/lexer_test.go Normal file
View File

@@ -0,0 +1,105 @@
package rsql
import (
"testing"
"github.com/matryer/is"
)
func TestReservedToken(t *testing.T) {
input := `( ) ; , == != ~ < > <= >= [ ]`
tests := []struct {
expectedType TokenType
expectedLiteral string
}{
{TokLParen, "("},
{TokRParen, ")"},
{TokAND, ";"},
{TokOR, ","},
{TokEQ, "=="},
{TokNEQ, "!="},
{TokLIKE, "~"},
{TokLT, "<"},
{TokGT, ">"},
{TokLE, "<="},
{TokGE, ">="},
{TokLBracket, "["},
{TokRBracket, "]"},
}
t.Run("Reserved Tokens", func(t *testing.T) {
is := is.New(t)
l := NewLexer(input)
for _, tt := range tests {
tok := l.NextToken()
is.Equal(tt.expectedType, tok.Type)
is.Equal(tt.expectedLiteral, tok.Literal)
}
})
}
func TestNextToken(t *testing.T) {
input := `director=='name\'s';actor=eq="name's";Year=le=2000,Year>=2010;(one <= -1.0, two != true),three=in=(1,2,3);c4==5`
tests := []struct {
expectedType TokenType
expectedLiteral string
}{
{TokIdent, `director`},
{TokEQ, `==`},
{TokString, `name\'s`},
{TokAND, `;`},
{TokIdent, `actor`},
{TokEQ, `=eq=`},
{TokString, `name's`},
{TokAND, `;`},
{TokIdent, "Year"},
{TokLE, "=le="},
{TokInteger, "2000"},
{TokOR, ","},
{TokIdent, "Year"},
{TokGE, ">="},
{TokInteger, "2010"},
{TokAND, ";"},
{TokLParen, "("},
{TokIdent, "one"},
{TokLE, "<="},
{TokFloat, "-1.0"},
{TokOR, ","},
{TokIdent, "two"},
{TokNEQ, "!="},
{TokTRUE, "true"},
{TokRParen, ")"},
{TokOR, ","},
{TokIdent, "three"},
{TokExtend, "=in="},
{TokLParen, "("},
{TokInteger, "1"},
{TokOR, ","},
{TokInteger, "2"},
{TokOR, ","},
{TokInteger, "3"},
{TokRParen, ")"},
{TokAND, ";"},
{TokIdent, "c4"},
{TokEQ, "=="},
{TokInteger, "5"},
}
t.Run("Next Token Parsing", func(t *testing.T) {
is := is.New(t)
l := NewLexer(input)
c := 0
for _, tt := range tests {
c++
tok := l.NextToken()
is.Equal(tt.expectedType, tok.Type)
is.Equal(tt.expectedLiteral, tok.Literal)
}
is.Equal(c, len(tests))
})
}

285
rsql/parser.go Normal file
View File

@@ -0,0 +1,285 @@
package rsql
import (
"fmt"
"strconv"
"strings"
)
// Precidence enumerations
const (
_ = iota
PrecedenceLowest
PrecedenceAND
PrecedenceOR
PrecedenceCompare
PrecedenceHighest
)
var precidences = map[TokenType]int{
TokEQ: PrecedenceCompare,
TokNEQ: PrecedenceCompare,
TokLT: PrecedenceCompare,
TokLE: PrecedenceCompare,
TokGT: PrecedenceCompare,
TokGE: PrecedenceCompare,
TokLIKE: PrecedenceCompare,
TokOR: PrecedenceOR,
TokAND: PrecedenceAND,
}
type (
prefixParseFn func() Expression
infixParseFn func(expression Expression) Expression
)
// Parser reads lexed values and builds an AST
type Parser struct {
l *Lexer
errors []string
curToken Token
peekToken Token
prefixParseFns map[TokenType]prefixParseFn
infixParseFns map[TokenType]infixParseFn
}
// NewParser returns a parser for a given lexer
func NewParser(l *Lexer) *Parser {
p := &Parser{l: l}
p.prefixParseFns = make(map[TokenType]prefixParseFn)
p.registerPrefix(TokIdent, p.parseIdentifier)
p.registerPrefix(TokInteger, p.parseInteger)
p.registerPrefix(TokFloat, p.parseFloat)
p.registerPrefix(TokTRUE, p.parseBool)
p.registerPrefix(TokFALSE, p.parseBool)
p.registerPrefix(TokNULL, p.parseNull)
p.registerPrefix(TokString, p.parseString)
p.registerPrefix(TokLParen, p.parseGroupedExpression)
p.registerPrefix(TokLBracket, p.parseArray)
p.infixParseFns = make(map[TokenType]infixParseFn)
p.registerInfix(TokEQ, p.parseInfixExpression)
p.registerInfix(TokNEQ, p.parseInfixExpression)
p.registerInfix(TokLT, p.parseInfixExpression)
p.registerInfix(TokLE, p.parseInfixExpression)
p.registerInfix(TokGT, p.parseInfixExpression)
p.registerInfix(TokGE, p.parseInfixExpression)
p.registerInfix(TokLIKE, p.parseInfixExpression)
p.registerInfix(TokAND, p.parseInfixExpression)
p.registerInfix(TokOR, p.parseInfixExpression)
p.nextToken()
p.nextToken()
return p
}
// DefaultParse sets up a default lex/parse and returns the program
func DefaultParse(in string) *Program {
args := make(map[string]string)
in, argstr, ok := strings.Cut(in, "|")
if ok {
for _, fd := range strings.Fields(argstr) {
a, b, _ := strings.Cut(fd, ":")
args[a] = b
}
}
l := NewLexer(in)
p := NewParser(l)
return p.ParseProgram(args)
}
func (p *Parser) registerPrefix(tokenType TokenType, fn prefixParseFn) {
p.prefixParseFns[tokenType] = fn
}
func (p *Parser) registerInfix(tokenType TokenType, fn infixParseFn) {
p.infixParseFns[tokenType] = fn
}
// Errors returns a list of errors while parsing
func (p *Parser) Errors() []string {
return p.errors
}
func (p *Parser) peekError(t TokenType) {
msg := fmt.Sprintf("expected next token to be %s, got %s instad",
t, p.peekToken.Type)
p.errors = append(p.errors, msg)
}
func (p *Parser) nextToken() {
p.curToken = p.peekToken
p.peekToken = p.l.NextToken()
}
func (p *Parser) curTokenIs(t TokenType) bool {
return p.curToken.Type == t
}
func (p *Parser) peekTokenIs(t TokenType) bool {
return p.peekToken.Type == t
}
func (p *Parser) expectPeek(t TokenType) bool {
if p.peekTokenIs(t) {
p.nextToken()
return true
}
p.peekError(t)
return false
}
func (p *Parser) peekPrecedence() int {
if p, ok := precidences[p.peekToken.Type]; ok {
return p
}
return PrecedenceLowest
}
func (p *Parser) curPrecedence() int {
if p, ok := precidences[p.curToken.Type]; ok {
return p
}
return PrecedenceLowest
}
// ParseProgram builds a program AST from lexer
func (p *Parser) ParseProgram(args map[string]string) *Program {
program := &Program{Args: args}
program.Statements = []Statement{}
for p.curToken.Type != TokEOF {
stmt := p.parseStatement()
if stmt != nil {
program.Statements = append(program.Statements, stmt)
}
p.nextToken()
}
return program
}
func (p *Parser) parseStatement() Statement {
switch p.curToken.Type {
default:
return p.parseExpressionStatement()
}
}
func (p *Parser) parseExpressionStatement() *ExpressionStatement {
stmt := &ExpressionStatement{Token: p.curToken}
stmt.Expression = p.parseExpression(PrecedenceLowest)
return stmt
}
func (p *Parser) parseExpression(precedence int) Expression {
prefix := p.prefixParseFns[p.curToken.Type]
if prefix == nil {
msg := fmt.Sprintf("no prefix parse function for %s found", p.curToken.Type)
p.errors = append(p.errors, msg)
return nil
}
leftExp := prefix()
for !p.peekTokenIs(TokEOF) && precedence < p.peekPrecedence() {
infix := p.infixParseFns[p.peekToken.Type]
if infix == nil {
return leftExp
}
p.nextToken()
leftExp = infix(leftExp)
}
return leftExp
}
func (p *Parser) parseIdentifier() Expression {
return &Identifier{Token: p.curToken, Value: p.curToken.Literal}
}
func (p *Parser) parseInteger() Expression {
lit := &Integer{Token: p.curToken}
value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
if err != nil {
msg := fmt.Sprintf("could not parse %q as integer", p.curToken.Literal)
p.errors = append(p.errors, msg)
return nil
}
lit.Value = value
return lit
}
func (p *Parser) parseFloat() Expression {
lit := &Float{Token: p.curToken}
value, err := strconv.ParseFloat(p.curToken.Literal, 64)
if err != nil {
msg := fmt.Sprintf("could not parse %q as float", p.curToken.Literal)
p.errors = append(p.errors, msg)
return nil
}
lit.Value = value
return lit
}
func (p *Parser) parseBool() Expression {
return &Bool{Token: p.curToken, Value: p.curTokenIs(TokTRUE)}
}
func (p *Parser) parseString() Expression {
s := p.curToken.Literal
s = strings.Replace(s, `\'`, `'`, -1)
s = strings.Replace(s, `\"`, `"`, -1)
return &String{Token: p.curToken, Value: s}
}
func (p *Parser) parseNull() Expression {
return &Null{Token: p.curToken}
}
func (p *Parser) parseArray() Expression {
array := &Array{Token: p.curToken}
array.Elements = p.parseExpressionList(TokRBracket)
return array
}
func (p *Parser) parseExpressionList(end TokenType) []Expression {
var list []Expression
if p.peekTokenIs(end) {
p.nextToken()
return list
}
p.nextToken()
list = append(list, p.parseExpression(PrecedenceHighest))
for p.peekTokenIs(TokOR) {
p.nextToken()
p.nextToken()
list = append(list, p.parseExpression(PrecedenceHighest))
}
if !p.expectPeek(end) {
return nil
}
return list
}
func (p *Parser) parseInfixExpression(left Expression) Expression {
expression := &InfixExpression{
Token: p.curToken,
Left: left,
Operator: p.curToken.Literal,
}
precidence := p.curPrecedence()
p.nextToken()
expression.Right = p.parseExpression(precidence)
return expression
}
func (p *Parser) parseGroupedExpression() Expression {
p.nextToken()
exp := p.parseExpression(PrecedenceLowest)
if !p.expectPeek(TokRParen) {
return nil
}
return exp
}

309
rsql/parser_test.go Normal file
View File

@@ -0,0 +1,309 @@
package rsql
import (
"fmt"
"strings"
"testing"
"github.com/matryer/is"
)
func typeOf(t any) string { return fmt.Sprintf("%T", t) }
func TestIdentifierExpression(t *testing.T) {
input := `foobar`
t.Run("Identifier Expressions", func(t *testing.T) {
is := is.New(t)
l := NewLexer(input)
p := NewParser(l)
program := p.ParseProgram(nil)
checkParserErrors(t, p)
is.Equal(len(program.Statements), 1)
// if len(program.Statements) != 1 {
// t.Fatalf("program has not envough statements. got=%d", len(program.Statements))
// }
})
}
func TestIntegerExpression(t *testing.T) {
input := `5`
t.Run("IntegerExpression", func(t *testing.T) {
is := is.New(t)
l := NewLexer(input)
p := NewParser(l)
program := p.ParseProgram(nil)
checkParserErrors(t, p)
is.Equal(len(program.Statements), 1)
// if len(program.Statements) != 1 {
// t.Fatalf("program has not enough statements. got=%d", len(program.Statements))
// }
stmt, ok := program.Statements[0].(*ExpressionStatement)
is.Equal(typeOf(program.Statements[0]), typeOf(&ExpressionStatement{}))
is.True(ok)
// if !ok {
// t.Fatalf("program.Statements[0] is not ExpressionStatement got=%T",
// program.Statements[0])
// }
literal, ok := stmt.Expression.(*Integer)
is.Equal(typeOf(literal), typeOf(&Integer{}))
is.True(ok)
// if !ok {
// t.Fatalf("stmt.Expression is not Integer got=%T",
// stmt.Expression)
// }
is.Equal(literal.Value, int64(5))
// if literal.Value != 5 {
// t.Errorf("literal.Value not %d. got=%d", 5, literal.Value)
// }
is.Equal(literal.TokenLiteral(), "5")
// if literal.TokenLiteral() != "5" {
// t.Errorf("literal.TokenLiteral not %v. got=%v", "5", literal.TokenLiteral())
// }
})
}
func TestInfixExpression(t *testing.T) {
tests := []struct {
input string
left string
operator string
right int64
}{
{"foo == 1", "foo", "==", 1},
{"bar > 2", "bar", ">", 2},
{"bin < 3", "bin", "<", 3},
{"baz != 4", "baz", "!=", 4},
{"buf >= 5", "buf", ">=", 5},
{"goz <= 6", "goz", "<=", 6},
}
t.Run("Infix Expressions", func(t *testing.T) {
is := is.New(t)
for _, tt := range tests {
l := NewLexer(tt.input)
p := NewParser(l)
program := p.ParseProgram(nil)
checkParserErrors(t, p)
is.Equal(len(program.Statements), 1)
// if len(program.Statements) != 1 {
// t.Fatalf("program has not envough statements. got=%d", len(program.Statements))
// }
stmt, ok := program.Statements[0].(*ExpressionStatement)
is.Equal(typeOf(stmt), typeOf(&ExpressionStatement{}))
is.True(ok)
// if !ok {
// t.Fatalf("program.Statements[0] is not ExpressionStatement got=%T",
// program.Statements[0])
// }
exp, ok := stmt.Expression.(*InfixExpression)
is.Equal(typeOf(exp), typeOf(&InfixExpression{}))
is.True(ok)
// if !ok {
// t.Fatalf("stmt.Expression is not InfixExpression got=%T",
// stmt.Expression)
// }
if !testIdentifier(t, exp.Left, tt.left) {
return
}
is.Equal(exp.Operator, tt.operator)
// if exp.Operator != tt.operator {
// t.Fatalf("exp.Operator is not '%v'. got '%v'", tt.operator, exp.Operator)
// }
if testInteger(t, exp.Right, tt.right) {
return
}
}
})
}
func TestOperatorPrecedenceParsing(t *testing.T) {
tests := []struct {
input string
expect string
}{
{
"foo == 1; bar == 2.0",
"((foo==1);(bar==2.0))",
},
{
`director=='name\'s';actor=eq="name\'s";Year=le=2000,Year>=2010;one <= -1.0, two != true`,
`((((director=="name's");(actor=eq="name's"));((Year=le=2000),(Year>=2010)));((one<=-1.0),(two!=true)))`,
},
}
t.Run("Operator Precidence Parsing", func(t *testing.T) {
is := is.New(t)
for _, tt := range tests {
l := NewLexer(tt.input)
p := NewParser(l)
program := p.ParseProgram(nil)
checkParserErrors(t, p)
actual := program.String()
is.Equal(actual, tt.expect)
// if actual != tt.expect {
// t.Errorf("expcected=%q, got=%q", tt.expect, actual)
// }
}
})
}
func TestParsingArray(t *testing.T) {
input := "[1, 2.1, true, null]"
l := NewLexer(input)
p := NewParser(l)
program := p.ParseProgram(nil)
checkParserErrors(t, p)
if len(program.Statements) != 1 {
t.Fatalf("program has not enough statements. got=%d", len(program.Statements))
}
stmt, ok := program.Statements[0].(*ExpressionStatement)
if !ok {
t.Fatalf("program.Statements[0] is not ExpressionStatement got=%T",
program.Statements[0])
}
array, ok := stmt.Expression.(*Array)
if !ok {
t.Fatalf("stmt.Expression is not Array got=%T",
stmt.Expression)
}
if len(array.Elements) != 4 {
t.Fatalf("len(array.Elements) not 4. got=%v", len(array.Elements))
}
testInteger(t, array.Elements[0], 1)
testFloat(t, array.Elements[1], 2.1)
testBool(t, array.Elements[2], true)
testNull(t, array.Elements[3])
}
func checkParserErrors(t *testing.T, p *Parser) {
errors := p.Errors()
if len(errors) == 0 {
return
}
t.Errorf("parser has %d errors", len(errors))
for _, msg := range errors {
t.Errorf("parser error: %q", msg)
}
t.FailNow()
}
func testInteger(t *testing.T, e Expression, value int64) bool {
literal, ok := e.(*Integer)
if !ok {
t.Errorf("stmt.Expression is not Integer got=%T", e)
return false
}
if literal.Value != value {
t.Errorf("literal.Value not %d. got=%d", value, literal.Value)
return false
}
if literal.TokenLiteral() != fmt.Sprintf("%v", value) {
t.Errorf("literal.TokenLiteral not %v. got=%v", value, literal.TokenLiteral())
return false
}
return true
}
func testFloat(t *testing.T, e Expression, value float64) bool {
literal, ok := e.(*Float)
if !ok {
t.Errorf("stmt.Expression is not Float got=%T", e)
return false
}
if literal.Value != value {
t.Errorf("literal.Value not %f. got=%f", value, literal.Value)
return false
}
if literal.TokenLiteral() != fmt.Sprintf("%v", value) {
t.Errorf("literal.TokenLiteral not %q. got=%q", fmt.Sprintf("%v", value), literal.TokenLiteral())
return false
}
return true
}
func testBool(t *testing.T, e Expression, value bool) bool {
literal, ok := e.(*Bool)
if !ok {
t.Errorf("stmt.Expression is not Float got=%T", e)
return false
}
if literal.Value != value {
t.Errorf("literal.Value not %t. got=%t", value, literal.Value)
return false
}
if literal.TokenLiteral() != fmt.Sprintf("%v", value) {
t.Errorf("literal.TokenLiteral not %v. got=%v", value, literal.TokenLiteral())
return false
}
return true
}
func testNull(t *testing.T, e Expression) bool {
literal, ok := e.(*Null)
if !ok {
t.Errorf("stmt.Expression is not Null got=%T", e)
return false
}
if literal.Token.Type != TokNULL {
t.Errorf("liternal.Token is not TokNULL got=%T", e)
return false
}
return true
}
func testIdentifier(t *testing.T, e Expression, value string) bool {
literal, ok := e.(*Identifier)
if !ok {
t.Errorf("stmt.Expression is not Integer got=%T", e)
return false
}
if literal.Value != value {
t.Errorf("literal.Value not %s. got=%s", value, literal.Value)
return false
}
if literal.TokenLiteral() != value {
t.Errorf("literal.TokenLiteral not %v. got=%v", value, literal.TokenLiteral())
return false
}
return true
}
func TestReplace(t *testing.T) {
is := is.New(t)
is.Equal(strings.Replace(`name\'s`, `\'`, `'`, -1), `name's`)
}

300
rsql/squirrel/sqlizer.go Normal file
View File

@@ -0,0 +1,300 @@
package squirrel
import (
"fmt"
"strconv"
"strings"
"github.com/Masterminds/squirrel"
"go.sour.is/pkg/rsql"
)
type dbInfo interface {
Col(string) (string, error)
}
type args map[string]string
func (d *decoder) mkArgs(a args) args {
m := make(args, len(a))
for k, v := range a {
if k == "limit" || k == "offset" {
m[k] = v
continue
}
var err error
if k, err = d.dbInfo.Col(k); err == nil {
m[k] = v
}
}
return m
}
func (a args) Limit() (uint64, bool) {
if a == nil {
return 0, false
}
if v, ok := a["limit"]; ok {
i, err := strconv.ParseUint(v, 10, 64)
return i, err == nil
}
return 0, false
}
func (a args) Offset() (uint64, bool) {
if a == nil {
return 0, false
}
if v, ok := a["offset"]; ok {
i, err := strconv.ParseUint(v, 10, 64)
return i, err == nil
}
return 0, false
}
func (a args) Order() []string {
var lis []string
for k, v := range a {
if k == "limit" || k == "offset" {
continue
}
lis = append(lis, k+" "+v)
}
return lis
}
func Query(in string, db dbInfo) (squirrel.Sqlizer, args, error) {
d := decoder{dbInfo: db}
program := rsql.DefaultParse(in)
sql, err := d.decode(program)
return sql, d.mkArgs(program.Args), err
}
type decoder struct {
dbInfo dbInfo
}
func (db *decoder) decode(in *rsql.Program) (squirrel.Sqlizer, error) {
switch len(in.Statements) {
case 0:
return nil, nil
case 1:
return db.decodeStatement(in.Statements[0])
default:
a := squirrel.And{}
for _, stmt := range in.Statements {
d, err := db.decodeStatement(stmt)
if d == nil {
return nil, err
}
a = append(a, d)
}
return a, nil
}
}
func (db *decoder) decodeStatement(in rsql.Statement) (squirrel.Sqlizer, error) {
switch s := in.(type) {
case *rsql.ExpressionStatement:
return db.decodeExpression(s.Expression)
}
return nil, nil
}
func (db *decoder) decodeExpression(in rsql.Expression) (squirrel.Sqlizer, error) {
switch e := in.(type) {
case *rsql.InfixExpression:
return db.decodeInfix(e)
}
return nil, nil
}
func (db *decoder) decodeInfix(in *rsql.InfixExpression) (squirrel.Sqlizer, error) {
switch in.Token.Type {
case rsql.TokAND:
a := squirrel.And{}
left, err := db.decodeExpression(in.Left)
if err != nil {
return nil, err
}
switch v := left.(type) {
case squirrel.And:
for _, el := range v {
if el != nil {
a = append(a, el)
}
}
default:
if v != nil {
a = append(a, v)
}
}
right, err := db.decodeExpression(in.Right)
if err != nil {
return nil, err
}
switch v := right.(type) {
case squirrel.And:
for _, el := range v {
if el != nil {
a = append(a, el)
}
}
default:
if v != nil {
a = append(a, v)
}
}
return a, nil
case rsql.TokOR:
left, err := db.decodeExpression(in.Left)
if err != nil {
return nil, err
}
right, err := db.decodeExpression(in.Right)
if err != nil {
return nil, err
}
return squirrel.Or{left, right}, nil
case rsql.TokEQ:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.Eq{col: v}, nil
case rsql.TokLIKE:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
switch value := v.(type) {
case string:
return Like{col, strings.Replace(value, "*", "%", -1)}, nil
default:
return nil, fmt.Errorf("LIKE requires a string value")
}
case rsql.TokNEQ:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.NotEq{col: v}, nil
case rsql.TokGT:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.Gt{col: v}, nil
case rsql.TokGE:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.GtOrEq{col: v}, nil
case rsql.TokLT:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.Lt{col: v}, nil
case rsql.TokLE:
col, err := db.dbInfo.Col(in.Left.String())
if err != nil {
return nil, err
}
v, err := db.decodeValue(in.Right)
if err != nil {
return nil, err
}
return squirrel.LtOrEq{col: v}, nil
default:
return nil, nil
}
}
func (db *decoder) decodeValue(in rsql.Expression) (interface{}, error) {
switch v := in.(type) {
case *rsql.Array:
var values []interface{}
for _, el := range v.Elements {
v, err := db.decodeValue(el)
if err != nil {
return nil, err
}
values = append(values, v)
}
return values, nil
case *rsql.InfixExpression:
return db.decodeInfix(v)
case *rsql.Identifier:
return v.Value, nil
case *rsql.Integer:
return v.Value, nil
case *rsql.Float:
return v.Value, nil
case *rsql.String:
return v.Value, nil
case *rsql.Bool:
return v.Value, nil
case *rsql.Null:
return nil, nil
}
return nil, nil
}
type Like struct {
column string
value string
}
func (l Like) ToSql() (sql string, args []interface{}, err error) {
sql = fmt.Sprintf("%s LIKE(?)", l.column)
args = append(args, l.value)
return
}

View File

@@ -0,0 +1,141 @@
package squirrel
import (
"fmt"
"testing"
"github.com/Masterminds/squirrel"
"github.com/matryer/is"
"go.sour.is/pkg/rsql"
)
type testTable struct {
Foo string `json:"foo"`
Bar string `json:"bar"`
Baz string `json:"baz"`
Director string `json:"director"`
Actor string `json:"actor"`
Year string `json:"year"`
Genres string `json:"genres"`
One string `json:"one"`
Two string `json:"two"`
Family string `json:"family_name"`
}
func TestQuery(t *testing.T) {
d := rsql.GetDbColumns(testTable{})
is := is.New(t)
tests := []struct {
input string
expect squirrel.Sqlizer
expectLimit *uint64
expectOffset *uint64
expectOrder []string
fail bool
}{
{input: "foo==[1, 2, 3]", expect: squirrel.Eq{"foo": []interface{}{1, 2, 3}}},
{input: "foo==1,(bar==2;baz==3)", expect: squirrel.Or{squirrel.Eq{"foo": 1}, squirrel.And{squirrel.Eq{"bar": 2}, squirrel.Eq{"baz": 3}}}},
{input: "foo==1", expect: squirrel.Eq{"foo": 1}},
{input: "foo!=1.1", expect: squirrel.NotEq{"foo": 1.1}},
{input: "foo==true", expect: squirrel.Eq{"foo": true}},
{input: "foo==null", expect: squirrel.Eq{"foo": nil}},
{input: "foo>2", expect: squirrel.Gt{"foo": 2}},
{input: "foo>=2.1", expect: squirrel.GtOrEq{"foo": 2.1}},
{input: "foo<3", expect: squirrel.Lt{"foo": 3}},
{input: "foo<=3.1", expect: squirrel.LtOrEq{"foo": 3.1}},
{input: "foo=eq=1", expect: squirrel.Eq{"foo": 1}},
{input: "foo=neq=1.1", expect: squirrel.NotEq{"foo": 1.1}},
{input: "foo=gt=2", expect: squirrel.Gt{"foo": 2}},
{input: "foo=ge=2.1", expect: squirrel.GtOrEq{"foo": 2.1}},
{input: "foo=lt=3", expect: squirrel.Lt{"foo": 3}},
{input: "foo=le=3.1", expect: squirrel.LtOrEq{"foo": 3.1}},
{input: "foo==1;bar==2", expect: squirrel.And{squirrel.Eq{"foo": 1}, squirrel.Eq{"bar": 2}}},
{input: "foo==1,bar==2", expect: squirrel.Or{squirrel.Eq{"foo": 1}, squirrel.Eq{"bar": 2}}},
{input: "foo==1,bar==2;baz=3", expect: nil},
{
input: `director=='name\'s';actor=eq="name\'s";Year=le=2000,Year>=2010;one <= -1.0, two != true`,
expect: squirrel.And{
squirrel.Eq{"director": "name's"},
squirrel.Eq{"actor": "name's"},
squirrel.Or{
squirrel.LtOrEq{"year": 2000},
squirrel.GtOrEq{"year": 2010},
},
squirrel.Or{
squirrel.LtOrEq{"one": -1.0},
squirrel.NotEq{"two": true},
},
},
},
{
input: `genres==[sci-fi,action] ; genres==[romance,animated,horror] , director~Que*Tarantino`,
expect: squirrel.And{
squirrel.Eq{"genres": []interface{}{"sci-fi", "action"}},
squirrel.Or{
squirrel.Eq{"genres": []interface{}{"romance", "animated", "horror"}},
Like{"director", "Que%Tarantino"},
},
},
},
{input: "", expect: nil},
{input: "family_name==LUNDY", expect: squirrel.Eq{"family_name": "LUNDY"}},
{input: "family_name==[1,2,null]", expect: squirrel.Eq{"family_name": []interface{}{1, 2, nil}}},
{input: "family_name=LUNDY", expect: nil},
{input: "family_name==LUNDY and family_name==SMITH", expect: squirrel.And{squirrel.Eq{"family_name": "LUNDY"}, squirrel.Eq{"family_name": "SMITH"}}},
{input: "family_name==LUNDY or family_name==SMITH", expect: squirrel.Or{squirrel.Eq{"family_name": "LUNDY"}, squirrel.Eq{"family_name": "SMITH"}}},
{input: "foo==1,family_name=LUNDY;baz==2", expect: nil},
{input: "foo ~ bar*", expect: Like{"foo", "bar%"}},
{input: "foo ~ [bar*,bin*]", expect: nil, fail: true},
{input: "foo==1|limit:10", expect: squirrel.Eq{"foo": 1}, expectLimit: ptr(uint64(10))},
{input: "foo==1|offset:2", expect: squirrel.Eq{"foo": 1}, expectOffset: ptr(uint64(2))},
{
input: "foo>=1|limit:10 offset:2 foo:desc",
expect: squirrel.GtOrEq{"foo": 1},
expectLimit: ptr(uint64(10)),
expectOffset: ptr(uint64(2)),
expectOrder: []string{"foo desc"},
},
}
for i, tt := range tests {
q, a, err := Query(tt.input, d)
if !tt.fail && err != nil {
t.Error(err)
}
if q != nil {
t.Log(q.ToSql())
}
actual := fmt.Sprintf("%#v", q)
expect := fmt.Sprintf("%#v", tt.expect)
if expect != actual {
t.Errorf("test[%d]: %v\n\tinput and expected are not the same. wanted=%s got=%s", i, tt.input, expect, actual)
}
if limit, ok := a.Limit(); tt.expectLimit != nil {
is.True(ok)
is.Equal(limit, *tt.expectLimit)
} else {
is.True(!ok)
}
if offset, ok := a.Offset(); tt.expectOffset != nil {
is.True(ok)
is.Equal(offset, *tt.expectOffset)
} else {
is.True(!ok)
}
if order := a.Order(); tt.expectOrder != nil {
is.Equal(order, tt.expectOrder)
} else {
is.Equal(len(order), 0)
}
}
}
func ptr[T any](t T) *T { return &t }

62
rsql/token.go Normal file
View File

@@ -0,0 +1,62 @@
package rsql
// Tokens for RSQL FIQL
const (
TokIllegal = "TokIllegal"
TokEOF = "TokEOF"
TokIdent = "TokIdent"
TokInteger = "TokInteger"
TokString = "TokString"
TokFloat = "TokFloat"
TokExtend = "TokExtend"
TokLParen = "("
TokRParen = ")"
TokLBracket = "["
TokRBracket = "]"
TokLIKE = "~"
TokNLIKE= "!~"
TokNOT = "!"
TokLT = "<"
TokGT = ">"
TokLE = "<="
TokGE = ">="
TokEQ = "=="
TokNEQ = "!="
TokAND = ";"
TokOR = ","
TokTRUE = "true"
TokFALSE = "false"
TokNULL = "null"
)
var keywords = map[string]TokenType {
"true": TokTRUE,
"false": TokFALSE,
"null": TokNULL,
"and": TokAND,
"or": TokOR,
}
// TokenType is a token enumeration
type TokenType string
// Token is a type and literal pair
type Token struct {
Type TokenType
Literal string
}
func newToken(tokenType TokenType, ch rune) Token {
return Token{Type: tokenType, Literal: string(ch)}
}
func lookupIdent(ident string) TokenType {
if tok, ok := keywords[ident]; ok {
return tok
}
return TokIdent
}