mirror of
https://github.com/kataras/iris.git
synced 2025-12-22 04:17:03 +00:00
partial cleanup of the macro pkg and move it from /core/router to the root because it may be used by the end-developers now to ammend the available macros per application
Former-commit-id: 951a5e7a401af25ecaa904ff6463b0def2c87afb
This commit is contained in:
132
macro/interpreter/ast/ast.go
Normal file
132
macro/interpreter/ast/ast.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package ast
|
||||
|
||||
type (
|
||||
// ParamType holds the necessary information about a parameter type for the parser to lookup for.
|
||||
ParamType interface {
|
||||
// The name of the parameter type.
|
||||
// Indent should contain the characters for the parser.
|
||||
Indent() string
|
||||
}
|
||||
|
||||
// MasterParamType if implemented and its `Master()` returns true then empty type param will be translated to this param type.
|
||||
// Also its functions will be available to the rest of the macro param type's funcs.
|
||||
//
|
||||
// Only one Master is allowed.
|
||||
MasterParamType interface {
|
||||
ParamType
|
||||
Master() bool
|
||||
}
|
||||
|
||||
// TrailingParamType if implemented and its `Trailing()` returns true
|
||||
// then it should be declared at the end of a route path and can accept any trailing path segment as one parameter.
|
||||
TrailingParamType interface {
|
||||
ParamType
|
||||
Trailing() bool
|
||||
}
|
||||
|
||||
// AliasParamType if implemeneted nad its `Alias()` returns a non-empty string
|
||||
// then the param type can be written with that string literal too.
|
||||
AliasParamType interface {
|
||||
ParamType
|
||||
Alias() string
|
||||
}
|
||||
)
|
||||
|
||||
// IsMaster returns true if the "pt" param type is a master one.
|
||||
func IsMaster(pt ParamType) bool {
|
||||
p, ok := pt.(MasterParamType)
|
||||
return ok && p.Master()
|
||||
}
|
||||
|
||||
// IsTrailing returns true if the "pt" param type is a marked as trailing,
|
||||
// which should accept more than one path segment when in the end.
|
||||
func IsTrailing(pt ParamType) bool {
|
||||
p, ok := pt.(TrailingParamType)
|
||||
return ok && p.Trailing()
|
||||
}
|
||||
|
||||
// HasAlias returns any alias of the "pt" param type.
|
||||
// If alias is empty or not found then it returns false as its second output argument.
|
||||
func HasAlias(pt ParamType) (string, bool) {
|
||||
if p, ok := pt.(AliasParamType); ok {
|
||||
alias := p.Alias()
|
||||
return alias, len(alias) > 0
|
||||
}
|
||||
|
||||
return "", false
|
||||
}
|
||||
|
||||
// GetMasterParamType accepts a list of ParamType and returns its master.
|
||||
// If no `Master` specified:
|
||||
// and len(paramTypes) > 0 then it will return the first one,
|
||||
// otherwise it returns nil.
|
||||
func GetMasterParamType(paramTypes ...ParamType) ParamType {
|
||||
for _, pt := range paramTypes {
|
||||
if IsMaster(pt) {
|
||||
return pt
|
||||
}
|
||||
}
|
||||
|
||||
if len(paramTypes) > 0 {
|
||||
return paramTypes[0]
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LookupParamType accepts the string
|
||||
// representation of a parameter type.
|
||||
// Example:
|
||||
// "string"
|
||||
// "number" or "int"
|
||||
// "long" or "int64"
|
||||
// "uint8"
|
||||
// "uint64"
|
||||
// "boolean" or "bool"
|
||||
// "alphabetical"
|
||||
// "file"
|
||||
// "path"
|
||||
func LookupParamType(indentOrAlias string, paramTypes ...ParamType) (ParamType, bool) {
|
||||
for _, pt := range paramTypes {
|
||||
if pt.Indent() == indentOrAlias {
|
||||
return pt, true
|
||||
}
|
||||
|
||||
if alias, has := HasAlias(pt); has {
|
||||
if alias == indentOrAlias {
|
||||
return pt, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// ParamStatement is a struct
|
||||
// which holds all the necessary information about a macro parameter.
|
||||
// It holds its type (string, int, alphabetical, file, path),
|
||||
// its source ({param:type}),
|
||||
// its name ("param"),
|
||||
// its attached functions by the user (min, max...)
|
||||
// and the http error code if that parameter
|
||||
// failed to be evaluated.
|
||||
type ParamStatement struct {
|
||||
Src string // the original unparsed source, i.e: {id:int range(1,5) else 404}
|
||||
Name string // id
|
||||
Type ParamType // int
|
||||
Funcs []ParamFunc // range
|
||||
ErrorCode int // 404
|
||||
}
|
||||
|
||||
// ParamFunc holds the name of a parameter's function
|
||||
// and its arguments (values)
|
||||
// A param func is declared with:
|
||||
// {param:int range(1,5)},
|
||||
// the range is the
|
||||
// param function name
|
||||
// the 1 and 5 are the two param function arguments
|
||||
// range(1,5)
|
||||
type ParamFunc struct {
|
||||
Name string // range
|
||||
Args []string // ["1","5"]
|
||||
}
|
||||
202
macro/interpreter/lexer/lexer.go
Normal file
202
macro/interpreter/lexer/lexer.go
Normal file
@@ -0,0 +1,202 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"github.com/kataras/iris/macro/interpreter/token"
|
||||
)
|
||||
|
||||
// Lexer helps us to read/scan characters of a source and resolve their token types.
|
||||
type Lexer struct {
|
||||
input string
|
||||
pos int // current pos in input, current char
|
||||
readPos int // current reading pos in input, after current char
|
||||
ch byte // current char under examination
|
||||
}
|
||||
|
||||
// New takes a source, series of chars, and returns
|
||||
// a new, ready to read from the first letter, lexer.
|
||||
func New(src string) *Lexer {
|
||||
l := &Lexer{
|
||||
input: src,
|
||||
}
|
||||
// step to the first character in order to be ready
|
||||
l.readChar()
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *Lexer) readChar() {
|
||||
if l.readPos >= len(l.input) {
|
||||
l.ch = 0
|
||||
} else {
|
||||
l.ch = l.input[l.readPos]
|
||||
}
|
||||
l.pos = l.readPos
|
||||
l.readPos++
|
||||
}
|
||||
|
||||
const (
|
||||
// Begin is the symbol which lexer should scan forward to.
|
||||
Begin = '{' // token.LBRACE
|
||||
// End is the symbol which lexer should stop scanning.
|
||||
End = '}' // token.RBRACE
|
||||
)
|
||||
|
||||
func resolveTokenType(ch byte) token.Type {
|
||||
switch ch {
|
||||
case Begin:
|
||||
return token.LBRACE
|
||||
case End:
|
||||
return token.RBRACE
|
||||
// Let's keep it simple, no evaluation for logical operators, we are not making a new programming language, keep it simple makis.
|
||||
// ||
|
||||
// case '|':
|
||||
// if l.peekChar() == '|' {
|
||||
// ch := l.ch
|
||||
// l.readChar()
|
||||
// t = token.Token{Type: token.OR, Literal: string(ch) + string(l.ch)}
|
||||
// }
|
||||
// ==
|
||||
case ':':
|
||||
return token.COLON
|
||||
case '(':
|
||||
return token.LPAREN
|
||||
case ')':
|
||||
return token.RPAREN
|
||||
case ',':
|
||||
return token.COMMA
|
||||
// literals
|
||||
case 0:
|
||||
return token.EOF
|
||||
default:
|
||||
return token.IDENT //
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// NextToken returns the next token in the series of characters.
|
||||
// It can be a single symbol, a token type or a literal.
|
||||
// It's able to return an EOF token too.
|
||||
//
|
||||
// It moves the cursor forward.
|
||||
func (l *Lexer) NextToken() (t token.Token) {
|
||||
l.skipWhitespace()
|
||||
typ := resolveTokenType(l.ch)
|
||||
t.Type = typ
|
||||
switch typ {
|
||||
case token.EOF:
|
||||
t.Literal = ""
|
||||
case token.IDENT:
|
||||
if isLetter(l.ch) {
|
||||
// letters
|
||||
lit := l.readIdentifier()
|
||||
typ := token.LookupIdent(lit)
|
||||
t = l.newToken(typ, lit)
|
||||
return
|
||||
}
|
||||
if isDigit(l.ch) {
|
||||
// numbers
|
||||
lit := l.readNumber()
|
||||
t = l.newToken(token.INT, lit)
|
||||
return
|
||||
}
|
||||
|
||||
t = l.newTokenRune(token.ILLEGAL, l.ch)
|
||||
default:
|
||||
t = l.newTokenRune(typ, l.ch)
|
||||
}
|
||||
l.readChar() // set the pos to the next
|
||||
return
|
||||
}
|
||||
|
||||
// NextDynamicToken doesn't cares about the grammar.
|
||||
// It reads numbers or any unknown symbol,
|
||||
// it's being used by parser to skip all characters
|
||||
// between parameter function's arguments inside parenthesis,
|
||||
// in order to allow custom regexp on the end-language too.
|
||||
//
|
||||
// It moves the cursor forward.
|
||||
func (l *Lexer) NextDynamicToken() (t token.Token) {
|
||||
// calculate anything, even spaces.
|
||||
|
||||
// numbers
|
||||
lit := l.readNumber()
|
||||
if lit != "" {
|
||||
return l.newToken(token.INT, lit)
|
||||
}
|
||||
|
||||
lit = l.readIdentifierFuncArgument()
|
||||
return l.newToken(token.IDENT, lit)
|
||||
}
|
||||
|
||||
// used to skip any illegal token if inside parenthesis, used to be able to set custom regexp inside a func.
|
||||
func (l *Lexer) readIdentifierFuncArgument() string {
|
||||
pos := l.pos
|
||||
for resolveTokenType(l.ch) != token.RPAREN {
|
||||
l.readChar()
|
||||
}
|
||||
|
||||
return l.input[pos:l.pos]
|
||||
}
|
||||
|
||||
// PeekNextTokenType returns only the token type
|
||||
// of the next character and it does not move forward the cursor.
|
||||
// It's being used by parser to recognise empty functions, i.e `even()`
|
||||
// as valid functions with zero input arguments.
|
||||
func (l *Lexer) PeekNextTokenType() token.Type {
|
||||
if len(l.input)-1 > l.pos {
|
||||
ch := l.input[l.pos]
|
||||
return resolveTokenType(ch)
|
||||
}
|
||||
return resolveTokenType(0) // EOF
|
||||
}
|
||||
|
||||
func (l *Lexer) newToken(tokenType token.Type, lit string) token.Token {
|
||||
t := token.Token{
|
||||
Type: tokenType,
|
||||
Literal: lit,
|
||||
Start: l.pos,
|
||||
End: l.pos,
|
||||
}
|
||||
// remember, l.pos is the last char
|
||||
// and we want to include both start and end
|
||||
// in order to be easy to the user to see by just marking the expression
|
||||
if l.pos > 1 && len(lit) > 1 {
|
||||
t.End = l.pos - 1
|
||||
t.Start = t.End - len(lit) + 1
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (l *Lexer) newTokenRune(tokenType token.Type, ch byte) token.Token {
|
||||
return l.newToken(tokenType, string(ch))
|
||||
}
|
||||
|
||||
func (l *Lexer) skipWhitespace() {
|
||||
for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
|
||||
l.readChar()
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) readIdentifier() string {
|
||||
pos := l.pos
|
||||
for isLetter(l.ch) || isDigit(l.ch) {
|
||||
l.readChar()
|
||||
}
|
||||
return l.input[pos:l.pos]
|
||||
}
|
||||
|
||||
func isLetter(ch byte) bool {
|
||||
return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
|
||||
}
|
||||
|
||||
func (l *Lexer) readNumber() string {
|
||||
pos := l.pos
|
||||
for isDigit(l.ch) {
|
||||
l.readChar()
|
||||
}
|
||||
return l.input[pos:l.pos]
|
||||
}
|
||||
|
||||
func isDigit(ch byte) bool {
|
||||
return '0' <= ch && ch <= '9'
|
||||
}
|
||||
54
macro/interpreter/lexer/lexer_test.go
Normal file
54
macro/interpreter/lexer/lexer_test.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/kataras/iris/macro/interpreter/token"
|
||||
)
|
||||
|
||||
func TestNextToken(t *testing.T) {
|
||||
input := `{id:number min(1) max(5) else 404}`
|
||||
|
||||
tests := []struct {
|
||||
expectedType token.Type
|
||||
expectedLiteral string
|
||||
}{
|
||||
{token.LBRACE, "{"}, // 0
|
||||
{token.IDENT, "id"}, // 1
|
||||
{token.COLON, ":"}, // 2
|
||||
{token.IDENT, "number"}, // 3
|
||||
{token.IDENT, "min"}, // 4
|
||||
{token.LPAREN, "("}, // 5
|
||||
{token.INT, "1"}, // 6
|
||||
{token.RPAREN, ")"}, // 7
|
||||
{token.IDENT, "max"}, // 8
|
||||
{token.LPAREN, "("}, // 9
|
||||
{token.INT, "5"}, // 10
|
||||
{token.RPAREN, ")"}, // 11
|
||||
{token.ELSE, "else"}, // 12
|
||||
{token.INT, "404"}, // 13
|
||||
{token.RBRACE, "}"}, // 14
|
||||
}
|
||||
|
||||
l := New(input)
|
||||
|
||||
for i, tt := range tests {
|
||||
tok := l.NextToken()
|
||||
|
||||
if tok.Type != tt.expectedType {
|
||||
t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
|
||||
i, tt.expectedType, tok.Type)
|
||||
}
|
||||
|
||||
if tok.Literal != tt.expectedLiteral {
|
||||
t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
|
||||
i, tt.expectedLiteral, tok.Literal)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// EMEINA STO:
|
||||
// 30/232 selida apto making a interpeter in Go.
|
||||
// den ekana to skipWhitespaces giati skeftomai
|
||||
// an borei na to xreiastw 9a dw aurio.
|
||||
192
macro/interpreter/parser/parser.go
Normal file
192
macro/interpreter/parser/parser.go
Normal file
@@ -0,0 +1,192 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/kataras/iris/macro/interpreter/ast"
|
||||
"github.com/kataras/iris/macro/interpreter/lexer"
|
||||
"github.com/kataras/iris/macro/interpreter/token"
|
||||
)
|
||||
|
||||
// Parse takes a route "fullpath"
|
||||
// and returns its param statements
|
||||
// or an error if failed.
|
||||
func Parse(fullpath string, paramTypes []ast.ParamType) ([]*ast.ParamStatement, error) {
|
||||
if len(paramTypes) == 0 {
|
||||
return nil, fmt.Errorf("empty parameter types")
|
||||
}
|
||||
|
||||
pathParts := strings.SplitN(fullpath, "/", -1)
|
||||
p := new(ParamParser)
|
||||
statements := make([]*ast.ParamStatement, 0)
|
||||
for i, s := range pathParts {
|
||||
if s == "" { // if starts with /
|
||||
continue
|
||||
}
|
||||
|
||||
// if it's not a named path parameter of the new syntax then continue to the next
|
||||
if s[0] != lexer.Begin || s[len(s)-1] != lexer.End {
|
||||
continue
|
||||
}
|
||||
|
||||
p.Reset(s)
|
||||
stmt, err := p.Parse(paramTypes)
|
||||
if err != nil {
|
||||
// exit on first error
|
||||
return nil, err
|
||||
}
|
||||
// if we have param type path but it's not the last path part
|
||||
if ast.IsTrailing(stmt.Type) && i < len(pathParts)-1 {
|
||||
return nil, fmt.Errorf("%s: parameter type \"%s\" should be registered to the very last of a path", s, stmt.Type.Indent())
|
||||
}
|
||||
|
||||
statements = append(statements, stmt)
|
||||
}
|
||||
|
||||
return statements, nil
|
||||
}
|
||||
|
||||
// ParamParser is the parser
|
||||
// which is being used by the Parse function
|
||||
// to parse path segments one by one
|
||||
// and return their parsed parameter statements (param name, param type its functions and the inline route's functions).
|
||||
type ParamParser struct {
|
||||
src string
|
||||
errors []string
|
||||
}
|
||||
|
||||
// NewParamParser receives a "src" of a single parameter
|
||||
// and returns a new ParamParser, ready to Parse.
|
||||
func NewParamParser(src string) *ParamParser {
|
||||
p := new(ParamParser)
|
||||
p.Reset(src)
|
||||
return p
|
||||
}
|
||||
|
||||
// Reset resets this ParamParser,
|
||||
// reset the errors and set the source to the input "src".
|
||||
func (p *ParamParser) Reset(src string) {
|
||||
p.src = src
|
||||
p.errors = []string{}
|
||||
}
|
||||
|
||||
func (p *ParamParser) appendErr(format string, a ...interface{}) {
|
||||
p.errors = append(p.errors, fmt.Sprintf(format, a...))
|
||||
}
|
||||
|
||||
const (
|
||||
// DefaultParamErrorCode is the default http error code, 404 not found,
|
||||
// per-parameter. An error code can be setted via
|
||||
// the "else" keyword inside a route's path.
|
||||
DefaultParamErrorCode = 404
|
||||
)
|
||||
|
||||
// func parseParamFuncArg(t token.Token) (a ast.ParamFuncArg, err error) {
|
||||
// if t.Type == token.INT {
|
||||
// return ast.ParamFuncArgToInt(t.Literal)
|
||||
// }
|
||||
// // act all as strings here, because of int vs int64 vs uint64 and etc.
|
||||
// return t.Literal, nil
|
||||
// }
|
||||
|
||||
func parseParamFuncArg(t token.Token) (a string, err error) {
|
||||
// act all as strings here, because of int vs int64 vs uint64 and etc.
|
||||
return t.Literal, nil
|
||||
}
|
||||
|
||||
func (p ParamParser) Error() error {
|
||||
if len(p.errors) > 0 {
|
||||
return fmt.Errorf(strings.Join(p.errors, "\n"))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse parses the p.src based on the given param types and returns its param statement
|
||||
// and an error on failure.
|
||||
func (p *ParamParser) Parse(paramTypes []ast.ParamType) (*ast.ParamStatement, error) {
|
||||
l := lexer.New(p.src)
|
||||
|
||||
stmt := &ast.ParamStatement{
|
||||
ErrorCode: DefaultParamErrorCode,
|
||||
Type: ast.GetMasterParamType(paramTypes...),
|
||||
Src: p.src,
|
||||
}
|
||||
|
||||
lastParamFunc := ast.ParamFunc{}
|
||||
|
||||
for {
|
||||
t := l.NextToken()
|
||||
if t.Type == token.EOF {
|
||||
if stmt.Name == "" {
|
||||
p.appendErr("[1:] parameter name is missing")
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
switch t.Type {
|
||||
case token.LBRACE:
|
||||
// can accept only letter or number only.
|
||||
nextTok := l.NextToken()
|
||||
stmt.Name = nextTok.Literal
|
||||
case token.COLON:
|
||||
// type can accept both letters and numbers but not symbols ofc.
|
||||
nextTok := l.NextToken()
|
||||
paramType, found := ast.LookupParamType(nextTok.Literal, paramTypes...)
|
||||
|
||||
if !found {
|
||||
p.appendErr("[%d:%d] unexpected parameter type: %s", t.Start, t.End, nextTok.Literal)
|
||||
}
|
||||
stmt.Type = paramType
|
||||
// param func
|
||||
case token.IDENT:
|
||||
lastParamFunc.Name = t.Literal
|
||||
case token.LPAREN:
|
||||
// param function without arguments ()
|
||||
if l.PeekNextTokenType() == token.RPAREN {
|
||||
// do nothing, just continue to the RPAREN
|
||||
continue
|
||||
}
|
||||
|
||||
argValTok := l.NextDynamicToken() // catch anything from "(" and forward, until ")", because we need to
|
||||
// be able to use regex expression as a macro type's func argument too.
|
||||
|
||||
// fmt.Printf("argValTok: %#v\n", argValTok)
|
||||
// fmt.Printf("argVal: %#v\n", argVal)
|
||||
lastParamFunc.Args = append(lastParamFunc.Args, argValTok.Literal)
|
||||
|
||||
case token.COMMA:
|
||||
argValTok := l.NextToken()
|
||||
lastParamFunc.Args = append(lastParamFunc.Args, argValTok.Literal)
|
||||
case token.RPAREN:
|
||||
stmt.Funcs = append(stmt.Funcs, lastParamFunc)
|
||||
lastParamFunc = ast.ParamFunc{} // reset
|
||||
case token.ELSE:
|
||||
errCodeTok := l.NextToken()
|
||||
if errCodeTok.Type != token.INT {
|
||||
p.appendErr("[%d:%d] expected error code to be an integer but got %s", t.Start, t.End, errCodeTok.Literal)
|
||||
continue
|
||||
}
|
||||
errCode, err := strconv.Atoi(errCodeTok.Literal)
|
||||
if err != nil {
|
||||
// this is a bug on lexer if throws because we already check for token.INT
|
||||
p.appendErr("[%d:%d] unexpected lexer error while trying to convert error code to an integer, %s", t.Start, t.End, err.Error())
|
||||
continue
|
||||
}
|
||||
stmt.ErrorCode = errCode
|
||||
case token.RBRACE:
|
||||
// check if } but not {
|
||||
if stmt.Name == "" {
|
||||
p.appendErr("[%d:%d] illegal token: }, forgot '{' ?", t.Start, t.End)
|
||||
}
|
||||
break
|
||||
case token.ILLEGAL:
|
||||
p.appendErr("[%d:%d] illegal token: %s", t.Start, t.End, t.Literal)
|
||||
default:
|
||||
p.appendErr("[%d:%d] unexpected token type: %q with value %s", t.Start, t.End, t.Type, t.Literal)
|
||||
}
|
||||
}
|
||||
|
||||
return stmt, p.Error()
|
||||
}
|
||||
339
macro/interpreter/parser/parser_test.go
Normal file
339
macro/interpreter/parser/parser_test.go
Normal file
@@ -0,0 +1,339 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/kataras/iris/macro/interpreter/ast"
|
||||
)
|
||||
|
||||
type simpleParamType string
|
||||
|
||||
func (pt simpleParamType) Indent() string { return string(pt) }
|
||||
|
||||
type masterParamType simpleParamType
|
||||
|
||||
func (pt masterParamType) Indent() string { return string(pt) }
|
||||
func (pt masterParamType) Master() bool { return true }
|
||||
|
||||
type wildcardParamType string
|
||||
|
||||
func (pt wildcardParamType) Indent() string { return string(pt) }
|
||||
func (pt wildcardParamType) Trailing() bool { return true }
|
||||
|
||||
type aliasedParamType []string
|
||||
|
||||
func (pt aliasedParamType) Indent() string { return string(pt[0]) }
|
||||
func (pt aliasedParamType) Alias() string { return pt[1] }
|
||||
|
||||
var (
|
||||
paramTypeString = masterParamType("string")
|
||||
paramTypeNumber = aliasedParamType{"number", "int"}
|
||||
paramTypeInt64 = aliasedParamType{"int64", "long"}
|
||||
paramTypeUint8 = simpleParamType("uint8")
|
||||
paramTypeUint64 = simpleParamType("uint64")
|
||||
paramTypeBool = aliasedParamType{"bool", "boolean"}
|
||||
paramTypeAlphabetical = simpleParamType("alphabetical")
|
||||
paramTypeFile = simpleParamType("file")
|
||||
paramTypePath = wildcardParamType("path")
|
||||
)
|
||||
|
||||
var testParamTypes = []ast.ParamType{
|
||||
paramTypeString,
|
||||
paramTypeNumber, paramTypeInt64, paramTypeUint8, paramTypeUint64,
|
||||
paramTypeBool,
|
||||
paramTypeAlphabetical, paramTypeFile, paramTypePath,
|
||||
}
|
||||
|
||||
func TestParseParamError(t *testing.T) {
|
||||
// fail
|
||||
illegalChar := '$'
|
||||
|
||||
input := "{id" + string(illegalChar) + "int range(1,5) else 404}"
|
||||
p := NewParamParser(input)
|
||||
|
||||
_, err := p.Parse(testParamTypes)
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("expecting not empty error on input '%s'", input)
|
||||
}
|
||||
|
||||
illIdx := strings.IndexRune(input, illegalChar)
|
||||
expectedErr := fmt.Sprintf("[%d:%d] illegal token: %s", illIdx, illIdx, "$")
|
||||
if got := err.Error(); got != expectedErr {
|
||||
t.Fatalf("expecting error to be '%s' but got: %s", expectedErr, got)
|
||||
}
|
||||
//
|
||||
|
||||
// success
|
||||
input2 := "{id:uint64 range(1,5) else 404}"
|
||||
p.Reset(input2)
|
||||
_, err = p.Parse(testParamTypes)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("expecting empty error on input '%s', but got: %s", input2, err.Error())
|
||||
}
|
||||
//
|
||||
}
|
||||
|
||||
// mustLookupParamType same as `ast.LookupParamType` but it panics if "indent" does not match with a valid Param Type.
|
||||
func mustLookupParamType(indent string) ast.ParamType {
|
||||
pt, found := ast.LookupParamType(indent, testParamTypes...)
|
||||
if !found {
|
||||
panic("param type '" + indent + "' is not part of the provided param types")
|
||||
}
|
||||
|
||||
return pt
|
||||
}
|
||||
|
||||
func TestParseParam(t *testing.T) {
|
||||
tests := []struct {
|
||||
valid bool
|
||||
expectedStatement ast.ParamStatement
|
||||
}{
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:number min(1) max(5) else 404}",
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("number"),
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "min",
|
||||
Args: []string{"1"}},
|
||||
{
|
||||
Name: "max",
|
||||
Args: []string{"5"}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
}}, // 0
|
||||
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:number range(1,5)}",
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("number"),
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "range",
|
||||
Args: []string{"1", "5"}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
}}, // 1
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{file:path contains(.)}",
|
||||
Name: "file",
|
||||
Type: mustLookupParamType("path"),
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "contains",
|
||||
Args: []string{"."}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
}}, // 2
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{username:alphabetical}",
|
||||
Name: "username",
|
||||
Type: mustLookupParamType("alphabetical"),
|
||||
ErrorCode: 404,
|
||||
}}, // 3
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{myparam}",
|
||||
Name: "myparam",
|
||||
Type: mustLookupParamType("string"),
|
||||
ErrorCode: 404,
|
||||
}}, // 4
|
||||
{false,
|
||||
ast.ParamStatement{
|
||||
Src: "{myparam_:thisianunexpected}",
|
||||
Name: "myparam_",
|
||||
Type: nil,
|
||||
ErrorCode: 404,
|
||||
}}, // 5
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{myparam2}",
|
||||
Name: "myparam2", // we now allow integers to the parameter names.
|
||||
Type: ast.GetMasterParamType(testParamTypes...),
|
||||
ErrorCode: 404,
|
||||
}}, // 6
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:number even()}", // test param funcs without any arguments (LPAREN peek for RPAREN)
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("number"),
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "even"},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
}}, // 7
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:int64 else 404}",
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("int64"),
|
||||
ErrorCode: 404,
|
||||
}}, // 8
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:long else 404}", // backwards-compatible test.
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("int64"),
|
||||
ErrorCode: 404,
|
||||
}}, // 9
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{id:long else 404}",
|
||||
Name: "id",
|
||||
Type: mustLookupParamType("int64"), // backwards-compatible test of LookupParamType.
|
||||
ErrorCode: 404,
|
||||
}}, // 10
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{has:bool else 404}",
|
||||
Name: "has",
|
||||
Type: mustLookupParamType("bool"),
|
||||
ErrorCode: 404,
|
||||
}}, // 11
|
||||
{true,
|
||||
ast.ParamStatement{
|
||||
Src: "{has:boolean else 404}", // backwards-compatible test.
|
||||
Name: "has",
|
||||
Type: mustLookupParamType("bool"),
|
||||
ErrorCode: 404,
|
||||
}}, // 12
|
||||
|
||||
}
|
||||
|
||||
p := new(ParamParser)
|
||||
for i, tt := range tests {
|
||||
p.Reset(tt.expectedStatement.Src)
|
||||
resultStmt, err := p.Parse(testParamTypes)
|
||||
|
||||
if tt.valid && err != nil {
|
||||
t.Fatalf("tests[%d] - error %s", i, err.Error())
|
||||
} else if !tt.valid && err == nil {
|
||||
t.Fatalf("tests[%d] - expected to be a failure", i)
|
||||
}
|
||||
|
||||
if resultStmt != nil { // is valid here
|
||||
if !reflect.DeepEqual(tt.expectedStatement, *resultStmt) {
|
||||
t.Fatalf("tests[%d] - wrong statement, expected and result differs. Details:\n%#v\n%#v", i, tt.expectedStatement, *resultStmt)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
tests := []struct {
|
||||
path string
|
||||
valid bool
|
||||
expectedStatements []ast.ParamStatement
|
||||
}{
|
||||
{"/api/users/{id:number min(1) max(5) else 404}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{id:number min(1) max(5) else 404}",
|
||||
Name: "id",
|
||||
Type: paramTypeNumber,
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "min",
|
||||
Args: []string{"1"}},
|
||||
{
|
||||
Name: "max",
|
||||
Args: []string{"5"}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 0
|
||||
{"/admin/{id:uint64 range(1,5)}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{id:uint64 range(1,5)}",
|
||||
Name: "id",
|
||||
Type: paramTypeUint64,
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "range",
|
||||
Args: []string{"1", "5"}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 1
|
||||
{"/files/{file:path contains(.)}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{file:path contains(.)}",
|
||||
Name: "file",
|
||||
Type: paramTypePath,
|
||||
Funcs: []ast.ParamFunc{
|
||||
{
|
||||
Name: "contains",
|
||||
Args: []string{"."}},
|
||||
},
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 2
|
||||
{"/profile/{username:alphabetical}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{username:alphabetical}",
|
||||
Name: "username",
|
||||
Type: paramTypeAlphabetical,
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 3
|
||||
{"/something/here/{myparam}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{myparam}",
|
||||
Name: "myparam",
|
||||
Type: paramTypeString,
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 4
|
||||
{"/unexpected/{myparam_:thisianunexpected}", false,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{myparam_:thisianunexpected}",
|
||||
Name: "myparam_",
|
||||
Type: nil,
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 5
|
||||
{"/p2/{myparam2}", true,
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{myparam2}",
|
||||
Name: "myparam2", // we now allow integers to the parameter names.
|
||||
Type: paramTypeString,
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 6
|
||||
{"/assets/{file:path}/invalid", false, // path should be in the end segment
|
||||
[]ast.ParamStatement{{
|
||||
Src: "{file:path}",
|
||||
Name: "file",
|
||||
Type: paramTypePath,
|
||||
ErrorCode: 404,
|
||||
},
|
||||
}}, // 7
|
||||
}
|
||||
for i, tt := range tests {
|
||||
statements, err := Parse(tt.path, testParamTypes)
|
||||
|
||||
if tt.valid && err != nil {
|
||||
t.Fatalf("tests[%d] - error %s", i, err.Error())
|
||||
} else if !tt.valid && err == nil {
|
||||
t.Fatalf("tests[%d] - expected to be a failure", i)
|
||||
}
|
||||
for j := range statements {
|
||||
for l := range tt.expectedStatements {
|
||||
if !reflect.DeepEqual(tt.expectedStatements[l], *statements[j]) {
|
||||
t.Fatalf("tests[%d] - wrong statements, expected and result differs. Details:\n%#v\n%#v", i, tt.expectedStatements[l], *statements[j])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
53
macro/interpreter/token/token.go
Normal file
53
macro/interpreter/token/token.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package token
|
||||
|
||||
// Type is a specific type of int which describes the symbols.
|
||||
type Type int
|
||||
|
||||
// Token describes the letter(s) or symbol, is a result of the lexer.
|
||||
type Token struct {
|
||||
Type Type
|
||||
Literal string
|
||||
Start int // including the first char
|
||||
End int // including the last char
|
||||
}
|
||||
|
||||
// /about/{fullname:alphabetical}
|
||||
// /profile/{anySpecialName:string}
|
||||
// {id:uint64 range(1,5) else 404}
|
||||
// /admin/{id:number eq(1) else 402}
|
||||
// /file/{filepath:file else 405}
|
||||
const (
|
||||
EOF = iota // 0
|
||||
ILLEGAL
|
||||
|
||||
// Identifiers + literals
|
||||
LBRACE // {
|
||||
RBRACE // }
|
||||
// PARAM_IDENTIFIER // id
|
||||
COLON // :
|
||||
LPAREN // (
|
||||
RPAREN // )
|
||||
// PARAM_FUNC_ARG // 1
|
||||
COMMA
|
||||
IDENT // string or keyword
|
||||
// Keywords
|
||||
// keywords_start
|
||||
ELSE // else
|
||||
// keywords_end
|
||||
INT // 42
|
||||
)
|
||||
|
||||
const eof rune = 0
|
||||
|
||||
var keywords = map[string]Type{
|
||||
"else": ELSE,
|
||||
}
|
||||
|
||||
// LookupIdent receives a series of chars
|
||||
// and tries to resolves the token type.
|
||||
func LookupIdent(ident string) Type {
|
||||
if tok, ok := keywords[ident]; ok {
|
||||
return tok
|
||||
}
|
||||
return IDENT
|
||||
}
|
||||
Reference in New Issue
Block a user