1
0
mirror of https://github.com/kataras/iris.git synced 2026-01-22 19:36:00 +00:00

Remove internal/cmd/gen, it has no use for the end-developer.

Former-commit-id: 12bead9d379c6644e391c482fe71b2e88263376c
This commit is contained in:
kataras
2017-06-11 23:58:34 +03:00
parent d031ad55b8
commit 74989ad0a1
19 changed files with 60 additions and 409 deletions

View File

@@ -12,6 +12,9 @@ import (
"time"
)
// NewDevLogger returns a new logger of io.Writer which
// formats its log message input and writes it
// to the os.Stdout.
func NewDevLogger(omitTimeFor ...string) io.Writer {
mu := &sync.Mutex{} // for now and last log
lastLog := time.Now()

View File

@@ -26,7 +26,7 @@ type stringWriter interface {
WriteString(string) (int, error)
}
// Log sends a message to the defined io.Writer logger, it's
// Log sends a message to the defined logger of io.Writer logger, it's
// just a help function for internal use but it can be used to a cusotom middleware too.
//
// See AttachLogger too.

View File

@@ -4,4 +4,6 @@
package logger
// NoOpLogger returns a new, non-operational logger of io.Writer,
// it does nothing any form of input.
var NoOpLogger = writerFunc(func([]byte) (int, error) { return -1, nil })

View File

@@ -5,7 +5,6 @@
package router
import (
"fmt"
"html"
"net/http"
"sort"
@@ -53,10 +52,6 @@ func (h *routerHandler) getTree(method, subdomain string) *tree {
}
func (h *routerHandler) addRoute(method, subdomain, path string, handlers context.Handlers) error {
if len(path) == 0 || path[0] != '/' {
return fmt.Errorf("router: path %q must begin with %q", path, "/")
}
t := h.getTree(method, subdomain)
if t == nil {

View File

@@ -8,6 +8,7 @@ import (
"github.com/kataras/iris/core/router/macro/interpreter/token"
)
// Lexer helps us to read/scan characters of a source and resolve their token types.
type Lexer struct {
input string
pos int // current pos in input, current char
@@ -15,6 +16,8 @@ type Lexer struct {
ch byte // current char under examination
}
// New takes a source, series of chars, and returns
// a new, ready to read from the first letter, lexer.
func New(src string) *Lexer {
l := &Lexer{
input: src,
@@ -35,11 +38,13 @@ func (l *Lexer) readChar() {
}
const (
// Begin is the symbol which lexer should scan forward to.
Begin = '{' // token.LBRACE
End = '}' // token.RBRACE
// End is the symbol which lexer should stop scanning.
End = '}' // token.RBRACE
)
func resolveTokenType(ch byte) token.TokenType {
func resolveTokenType(ch byte) token.Type {
switch ch {
case Begin:
return token.LBRACE
@@ -71,6 +76,11 @@ func resolveTokenType(ch byte) token.TokenType {
}
// NextToken returns the next token in the series of characters.
// It can be a single symbol, a token type or a literal.
// It's able to return an EOF token too.
//
// It moves the cursor forward.
func (l *Lexer) NextToken() (t token.Token) {
l.skipWhitespace()
typ := resolveTokenType(l.ch)
@@ -101,6 +111,13 @@ func (l *Lexer) NextToken() (t token.Token) {
return
}
// NextDynamicToken doesn't cares about the grammar.
// It reads numbers or any unknown symbol,
// it's being used by parser to skip all characters
// between parameter function's arguemnts inside parenthesis,
// in order to allow custom regexp on the end-language too.
//
// It moves the cursor forward.
func (l *Lexer) NextDynamicToken() (t token.Token) {
// calculate anything, even spaces.
@@ -124,8 +141,11 @@ func (l *Lexer) readIdentifierFuncArgument() string {
return l.input[pos:l.pos]
}
// useful when we want to peek but no continue, i.e empty param functions 'even()'
func (l *Lexer) PeekNextTokenType() token.TokenType {
// PeekNextTokenType returns only the token type
// of the next character and it does not move forward the cursor.
// It's being used by parser to recognise empty functions, i.e `even()`
// as valid functions with zero input arguments.
func (l *Lexer) PeekNextTokenType() token.Type {
if len(l.input)-1 > l.pos {
ch := l.input[l.pos]
return resolveTokenType(ch)
@@ -133,7 +153,7 @@ func (l *Lexer) PeekNextTokenType() token.TokenType {
return resolveTokenType(0) // EOF
}
func (l *Lexer) newToken(tokenType token.TokenType, lit string) token.Token {
func (l *Lexer) newToken(tokenType token.Type, lit string) token.Token {
t := token.Token{
Type: tokenType,
Literal: lit,
@@ -151,7 +171,7 @@ func (l *Lexer) newToken(tokenType token.TokenType, lit string) token.Token {
return t
}
func (l *Lexer) newTokenRune(tokenType token.TokenType, ch byte) token.Token {
func (l *Lexer) newTokenRune(tokenType token.Type, ch byte) token.Token {
return l.newToken(tokenType, string(ch))
}

View File

@@ -14,7 +14,7 @@ func TestNextToken(t *testing.T) {
input := `{id:int min(1) max(5) else 404}`
tests := []struct {
expectedType token.TokenType
expectedType token.Type
expectedLiteral string
}{
{token.LBRACE, "{"}, // 0

View File

@@ -4,10 +4,12 @@
package token
type TokenType int
// Type is a specific type of int which describes the symbols.
type Type int
// Token describes the letter(s) or symbol, is a result of the lexer.
type Token struct {
Type TokenType
Type Type
Literal string
Start int // including the first char
End int // including the last char
@@ -33,19 +35,21 @@ const (
COMMA
IDENT // string or keyword
// Keywords
keywords_start
// keywords_start
ELSE // else
keywords_end
// keywords_end
INT // 42
)
const eof rune = 0
var keywords = map[string]TokenType{
var keywords = map[string]Type{
"else": ELSE,
}
func LookupIdent(ident string) TokenType {
// LookupIdent receives a series of chars
// and tries to resolves the token type.
func LookupIdent(ident string) Type {
if tok, ok := keywords[ident]; ok {
return tok
}