feat(goctl): Add api parser (#2585)

This commit is contained in:
anqiansong 2023-03-28 23:45:26 +08:00 committed by GitHub
parent 455a6c8f97
commit 50bc361430
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
59 changed files with 11633 additions and 6 deletions

View File

@ -13,8 +13,11 @@ import (
"github.com/spf13/cobra"
"github.com/zeromicro/go-zero/core/errorx"
"github.com/zeromicro/go-zero/tools/goctl/api/parser"
"github.com/zeromicro/go-zero/tools/goctl/api/util"
"github.com/zeromicro/go-zero/tools/goctl/pkg/env"
apiF "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/format"
"github.com/zeromicro/go-zero/tools/goctl/util/pathx"
)
@ -90,6 +93,10 @@ func apiFormatReader(reader io.Reader, filename string, skipCheckDeclare bool) e
// ApiFormatByPath format api from file path
func ApiFormatByPath(apiFilePath string, skipCheckDeclare bool) error {
if env.UseExperimental() {
return apiF.File(apiFilePath)
}
data, err := os.ReadFile(apiFilePath)
if err != nil {
return err

View File

@ -158,7 +158,7 @@ func sweep() error {
tm := time.Unix(seconds, 0)
if tm.Before(keepTime) {
if err := os.Remove(fpath); err != nil {
if err := os.RemoveAll(fpath); err != nil {
fmt.Println(aurora.Red(fmt.Sprintf("failed to remove file: %s", fpath)))
return err
}

View File

@ -8,6 +8,8 @@ import (
"github.com/zeromicro/go-zero/tools/goctl/api/parser/g4/ast"
"github.com/zeromicro/go-zero/tools/goctl/api/parser/g4/gen/api"
"github.com/zeromicro/go-zero/tools/goctl/api/spec"
"github.com/zeromicro/go-zero/tools/goctl/pkg/env"
apiParser "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/parser"
)
type parser struct {
@ -17,6 +19,10 @@ type parser struct {
// Parse parses the api file
func Parse(filename string) (*spec.ApiSpec, error) {
if env.UseExperimental() {
return apiParser.Parse(filename, "")
}
astParser := ast.NewParser(ast.WithParserPrefix(filepath.Base(filename)), ast.WithParserDebug())
parsedApi, err := astParser.Parse(filename)
if err != nil {

View File

@ -11,6 +11,11 @@
}
// service doc
@server(
group: test
middleware: m1,m2
prefix: v1
)
service greet-api {
// handler doc
@handler GreetHandler // handler comment

View File

@ -21,9 +21,9 @@ type (
// ApiSpec describes an api file
ApiSpec struct {
Info Info
Syntax ApiSyntax
Imports []Import
Info Info // Deprecated: useless expression
Syntax ApiSyntax // Deprecated: useless expression
Imports []Import // Deprecated: useless expression
Types []Type
Service Service
}
@ -70,6 +70,7 @@ type (
// Route describes api route
Route struct {
// Deprecated: Use Service AtServer instead.
AtServerAnnotation Annotation
Method string
Path string

View File

@ -6,7 +6,7 @@ import (
)
// BuildVersion is the version of goctl.
const BuildVersion = "1.5.0"
const BuildVersion = "1.5.1"
var tag = map[string]int{"pre-alpha": 0, "alpha": 1, "pre-bata": 2, "beta": 3, "released": 4, "": 5}

View File

@ -25,11 +25,14 @@ const (
GoctlDebug = "GOCTL_DEBUG"
GoctlCache = "GOCTL_CACHE"
GoctlVersion = "GOCTL_VERSION"
GoctlExperimental = "GOCTL_EXPERIMENTAL"
ProtocVersion = "PROTOC_VERSION"
ProtocGenGoVersion = "PROTOC_GEN_GO_VERSION"
ProtocGenGoGRPCVersion = "PROTO_GEN_GO_GRPC_VERSION"
envFileDir = "env"
envFileDir = "env"
ExperimentalOn = "on"
ExperimentalOff = "off"
)
// init initializes the goctl environment variables, the environment variables of the function are set in order,
@ -56,6 +59,8 @@ func init() {
if value := existsEnv.GetStringOr(GoctlCache, ""); value != "" {
goctlEnv.SetKV(GoctlCache, value)
}
experimental:=existsEnv.GetOr(GoctlExperimental,ExperimentalOff)
goctlEnv.SetKV(GoctlExperimental,experimental)
}
if !goctlEnv.HasKey(GoctlHome) {
goctlEnv.SetKV(GoctlHome, defaultGoctlHome)
@ -69,7 +74,12 @@ func init() {
goctlEnv.SetKV(GoctlCache, cacheDir)
}
if !goctlEnv.HasKey(GoctlExperimental){
goctlEnv.SetKV(GoctlExperimental, ExperimentalOff)
}
goctlEnv.SetKV(GoctlVersion, version.BuildVersion)
protocVer, _ := protoc.Version()
goctlEnv.SetKV(ProtocVersion, protocVer)
@ -92,6 +102,10 @@ func GetOr(key, def string) string {
return goctlEnv.GetStringOr(key, def)
}
func UseExperimental() bool {
return GetOr(GoctlExperimental, ExperimentalOff) == ExperimentalOn
}
func readEnv(goctlHome string) *sortedmap.SortedMap {
envFile := filepath.Join(goctlHome, envFileDir)
data, err := os.ReadFile(envFile)

View File

@ -0,0 +1,32 @@
package assertx
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
// ErrorOrigin is used to assert error and print source and error.
func ErrorOrigin(t *testing.T, source string, err ...error) {
if len(err) == 0 {
t.Fatalf("expected errors, got 0 error")
return
}
for _, e := range err {
fmt.Printf("<%s>: %v\n", source, e)
assert.Error(t, e)
}
}
// Error is used to assert error.
func Error(t *testing.T, err ...error) {
if len(err) == 0 {
t.Fatalf("expected errors, got 0 error")
return
}
for _, e := range err {
fmt.Println(e)
assert.Error(t, e)
}
}

View File

@ -0,0 +1,223 @@
package ast
import (
"fmt"
"io"
"strings"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
"github.com/zeromicro/go-zero/tools/goctl/util"
)
// Node represents a node in the AST.
type Node interface {
// Pos returns the position of the first character belonging to the node.
Pos() token.Position
// End returns the position of the first character immediately after the node.
End() token.Position
// Format returns the node's text after format.
Format(...string) string
// HasHeadCommentGroup returns true if the node has head comment group.
HasHeadCommentGroup() bool
// HasLeadingCommentGroup returns true if the node has leading comment group.
HasLeadingCommentGroup() bool
// CommentGroup returns the node's head comment group and leading comment group.
CommentGroup() (head, leading CommentGroup)
}
// Stmt represents a statement in the AST.
type Stmt interface {
Node
stmtNode()
}
// Expr represents an expression in the AST.
type Expr interface {
Node
exprNode()
}
// AST represents a parsed API file.
type AST struct {
Filename string
Stmts []Stmt
readPosition int
}
// TokenNode represents a token node in the AST.
type TokenNode struct {
// HeadCommentGroup are the comments in prev lines.
HeadCommentGroup CommentGroup
// Token is the token of the node.
Token token.Token
// LeadingCommentGroup are the tail comments in same line.
LeadingCommentGroup CommentGroup
// headFlag and leadingFlag is a comment flag only used in transfer another Node to TokenNode,
// headFlag's value is true do not represent HeadCommentGroup is not empty,
// leadingFlag's values is true do not represent LeadingCommentGroup is not empty.
headFlag, leadingFlag bool
}
// NewTokenNode creates and returns a new TokenNode.
func NewTokenNode(tok token.Token) *TokenNode {
return &TokenNode{Token: tok}
}
// IsEmptyString returns true if the node is empty string.
func (t *TokenNode) IsEmptyString() bool {
return t.Equal("")
}
// IsZeroString returns true if the node is zero string.
func (t *TokenNode) IsZeroString() bool {
return t.Equal(`""`) || t.Equal("``")
}
// Equal returns true if the node's text is equal to the given text.
func (t *TokenNode) Equal(s string) bool {
return t.Token.Text == s
}
// SetLeadingCommentGroup sets the node's leading comment group.
func (t *TokenNode) SetLeadingCommentGroup(cg CommentGroup) {
t.LeadingCommentGroup = cg
}
func (t *TokenNode) HasLeadingCommentGroup() bool {
return t.LeadingCommentGroup.Valid() || t.leadingFlag
}
func (t *TokenNode) HasHeadCommentGroup() bool {
return t.HeadCommentGroup.Valid() || t.headFlag
}
func (t *TokenNode) CommentGroup() (head, leading CommentGroup) {
return t.HeadCommentGroup, t.LeadingCommentGroup
}
// PeekFirstLeadingComment returns the first leading comment of the node.
func (t *TokenNode) PeekFirstLeadingComment() *CommentStmt {
if len(t.LeadingCommentGroup) > 0 {
return t.LeadingCommentGroup[0]
}
return nil
}
// PeekFirstHeadComment returns the first head comment of the node.
func (t *TokenNode) PeekFirstHeadComment() *CommentStmt {
if len(t.HeadCommentGroup) > 0 {
return t.HeadCommentGroup[0]
}
return nil
}
func (t *TokenNode) Format(prefix ...string) string {
p := peekOne(prefix)
var textList []string
for _, v := range t.HeadCommentGroup {
textList = append(textList, v.Format(p))
}
var tokenText = p + t.Token.Text
var validLeadingCommentGroup CommentGroup
for _, e := range t.LeadingCommentGroup {
if util.IsEmptyStringOrWhiteSpace(e.Comment.Text) {
continue
}
validLeadingCommentGroup = append(validLeadingCommentGroup, e)
}
if len(validLeadingCommentGroup) > 0 {
tokenText = tokenText + WhiteSpace + t.LeadingCommentGroup.Join(WhiteSpace)
}
textList = append(textList, tokenText)
return strings.Join(textList, NewLine)
}
func (t *TokenNode) Pos() token.Position {
if len(t.HeadCommentGroup) > 0 {
return t.PeekFirstHeadComment().Pos()
}
return t.Token.Position
}
func (t *TokenNode) End() token.Position {
if len(t.LeadingCommentGroup) > 0 {
return t.LeadingCommentGroup[len(t.LeadingCommentGroup)-1].End()
}
return t.Token.Position
}
// Format formats the AST.
func (a *AST) Format(w io.Writer) {
fw := NewWriter(w)
defer fw.Flush()
for idx, e := range a.Stmts {
if e.Format() == NilIndent {
continue
}
fw.Write(withNode(e))
fw.NewLine()
switch e.(type) {
case *SyntaxStmt:
fw.NewLine()
case *ImportGroupStmt:
fw.NewLine()
case *ImportLiteralStmt:
if idx < len(a.Stmts)-1 {
_, ok := a.Stmts[idx+1].(*ImportLiteralStmt)
if !ok {
fw.NewLine()
}
}
case *InfoStmt:
fw.NewLine()
case *ServiceStmt:
fw.NewLine()
case *TypeGroupStmt:
fw.NewLine()
case *TypeLiteralStmt:
fw.NewLine()
case *CommentStmt:
}
}
}
// FormatForUnitTest formats the AST for unit test.
func (a *AST) FormatForUnitTest(w io.Writer) {
fw := NewWriter(w)
defer fw.Flush()
for _, e := range a.Stmts {
text := e.Format()
if text == NilIndent {
continue
}
fw.WriteText(text)
}
}
// Print prints the AST.
func (a *AST) Print() {
_ = Print(a)
}
// SyntaxError represents a syntax error.
func SyntaxError(pos token.Position, format string, v ...interface{}) error {
return fmt.Errorf("syntax error: %s %s", pos.String(), fmt.Sprintf(format, v...))
}
// DuplicateStmtError represents a duplicate statement error.
func DuplicateStmtError(pos token.Position, msg string) error {
return fmt.Errorf("duplicate declaration: %s %s", pos.String(), msg)
}
func peekOne(list []string) string {
if len(list) == 0 {
return ""
}
return list[0]
}

View File

@ -0,0 +1,75 @@
package ast
import (
"strings"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
"github.com/zeromicro/go-zero/tools/goctl/util"
)
// CommentGroup represents a list of comments.
type CommentGroup []*CommentStmt
// List returns the list of comments.
func (cg CommentGroup) List() []string {
var list = make([]string, 0, len(cg))
for _, v := range cg {
comment := v.Comment.Text
if util.IsEmptyStringOrWhiteSpace(comment) {
continue
}
list = append(list, comment)
}
return list
}
// String joins and returns the comment text.
func (cg CommentGroup) String() string {
return cg.Join(" ")
}
// Join joins the comments with the given separator.
func (cg CommentGroup) Join(sep string) string {
if !cg.Valid() {
return ""
}
list := cg.List()
return strings.Join(list, sep)
}
// Valid returns true if the comment is valid.
func (cg CommentGroup) Valid() bool {
return len(cg) > 0
}
// CommentStmt represents a comment statement.
type CommentStmt struct {
// Comment is the comment token.
Comment token.Token
}
func (c *CommentStmt) HasHeadCommentGroup() bool {
return false
}
func (c *CommentStmt) HasLeadingCommentGroup() bool {
return false
}
func (c *CommentStmt) CommentGroup() (head, leading CommentGroup) {
return
}
func (c *CommentStmt) stmtNode() {}
func (c *CommentStmt) Pos() token.Position {
return c.Comment.Position
}
func (c *CommentStmt) End() token.Position {
return c.Comment.Position
}
func (c *CommentStmt) Format(prefix ...string) string {
return peekOne(prefix) + c.Comment.Text
}

View File

@ -0,0 +1,111 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
// ImportStmt represents an import statement.
type ImportStmt interface {
Stmt
importNode()
}
// ImportLiteralStmt represents an import literal statement.
type ImportLiteralStmt struct {
// Import is the import token.
Import *TokenNode
// Value is the import value.
Value *TokenNode
}
func (i *ImportLiteralStmt) HasHeadCommentGroup() bool {
return i.Import.HasHeadCommentGroup()
}
func (i *ImportLiteralStmt) HasLeadingCommentGroup() bool {
return i.Value.HasLeadingCommentGroup()
}
func (i *ImportLiteralStmt) CommentGroup() (head, leading CommentGroup) {
return i.Import.HeadCommentGroup, i.Value.LeadingCommentGroup
}
func (i *ImportLiteralStmt) Format(prefix ...string) (result string) {
if i.Value.IsZeroString() {
return ""
}
w := NewBufferWriter()
importNode := transferTokenNode(i.Import, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
w.Write(withNode(importNode, i.Value), withMode(ModeExpectInSameLine))
return w.String()
}
func (i *ImportLiteralStmt) End() token.Position {
return i.Value.End()
}
func (i *ImportLiteralStmt) importNode() {}
func (i *ImportLiteralStmt) Pos() token.Position {
return i.Import.Pos()
}
func (i *ImportLiteralStmt) stmtNode() {}
type ImportGroupStmt struct {
// Import is the import token.
Import *TokenNode
// LParen is the left parenthesis token.
LParen *TokenNode
// Values is the import values.
Values []*TokenNode
// RParen is the right parenthesis token.
RParen *TokenNode
}
func (i *ImportGroupStmt) HasHeadCommentGroup() bool {
return i.Import.HasHeadCommentGroup()
}
func (i *ImportGroupStmt) HasLeadingCommentGroup() bool {
return i.RParen.HasLeadingCommentGroup()
}
func (i *ImportGroupStmt) CommentGroup() (head, leading CommentGroup) {
return i.Import.HeadCommentGroup, i.RParen.LeadingCommentGroup
}
func (i *ImportGroupStmt) Format(prefix ...string) string {
var textList []string
for _, v := range i.Values {
if v.IsZeroString() {
continue
}
textList = append(textList, v.Format(Indent))
}
if len(textList) == 0 {
return ""
}
importNode := transferTokenNode(i.Import, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
w := NewBufferWriter()
w.Write(withNode(importNode, i.LParen), expectSameLine())
w.NewLine()
for _, v := range i.Values {
node := transferTokenNode(v, withTokenNodePrefix(peekOne(prefix)+Indent))
w.Write(withNode(node), expectSameLine())
w.NewLine()
}
w.Write(withNode(transferTokenNode(i.RParen, withTokenNodePrefix(prefix...))))
return w.String()
}
func (i *ImportGroupStmt) End() token.Position {
return i.RParen.End()
}
func (i *ImportGroupStmt) importNode() {}
func (i *ImportGroupStmt) Pos() token.Position {
return i.Import.Pos()
}
func (i *ImportGroupStmt) stmtNode() {}

View File

@ -0,0 +1,65 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
// InfoStmt is the info statement.
type InfoStmt struct {
// Info is the info keyword.
Info *TokenNode
// LParen is the left parenthesis.
LParen *TokenNode
// Values is the info values.
Values []*KVExpr
// RParen is the right parenthesis.
RParen *TokenNode
}
func (i *InfoStmt) HasHeadCommentGroup() bool {
return i.Info.HasHeadCommentGroup()
}
func (i *InfoStmt) HasLeadingCommentGroup() bool {
return i.RParen.HasLeadingCommentGroup()
}
func (i *InfoStmt) CommentGroup() (head, leading CommentGroup) {
return i.Info.HeadCommentGroup, i.RParen.LeadingCommentGroup
}
func (i *InfoStmt) Format(prefix ...string) string {
if len(i.Values) == 0 {
return ""
}
var textList []string
for _, v := range i.Values {
if v.Value.IsZeroString() {
continue
}
textList = append(textList, v.Format(Indent))
}
if len(textList) == 0 {
return ""
}
w := NewBufferWriter()
infoNode := transferTokenNode(i.Info, withTokenNodePrefix(prefix...), ignoreLeadingComment())
w.Write(withNode(infoNode, i.LParen))
w.NewLine()
for _, v := range i.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}
w.Write(withNode(transferTokenNode(i.RParen, withTokenNodePrefix(prefix...))))
return w.String()
}
func (i *InfoStmt) End() token.Position {
return i.RParen.End()
}
func (i *InfoStmt) Pos() token.Position {
return i.Info.Pos()
}
func (i *InfoStmt) stmtNode() {}

View File

@ -0,0 +1,39 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
// KVExpr is a key value expression.
type KVExpr struct {
// Key is the key of the key value expression.
Key *TokenNode
// Value is the value of the key value expression.
Value *TokenNode
}
func (i *KVExpr) HasHeadCommentGroup() bool {
return i.Key.HasHeadCommentGroup()
}
func (i *KVExpr) HasLeadingCommentGroup() bool {
return i.Value.HasLeadingCommentGroup()
}
func (i *KVExpr) CommentGroup() (head, leading CommentGroup) {
return i.Key.HeadCommentGroup, i.Value.LeadingCommentGroup
}
func (i *KVExpr) Format(prefix ...string) string {
w := NewBufferWriter()
w.Write(withNode(i.Key, i.Value), withPrefix(prefix...), withInfix(Indent), withRawText())
return w.String()
}
func (i *KVExpr) End() token.Position {
return i.Value.End()
}
func (i *KVExpr) Pos() token.Position {
return i.Key.Pos()
}
func (i *KVExpr) exprNode() {}

View File

@ -0,0 +1,237 @@
package ast
import (
"fmt"
"go/token"
"io"
"os"
"reflect"
apitoken "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
)
// A FieldFilter may be provided to Fprint to control the output.
type FieldFilter func(name string, value reflect.Value) bool
// NotNilFilter returns true for field values that are not nil,
// it returns false otherwise.
func NotNilFilter(_ string, v reflect.Value) bool {
switch v.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice:
return !v.IsNil()
}
return true
}
// Fprint prints the value of x to the writer w.
func Fprint(w io.Writer, x interface{}, f FieldFilter) error {
return fprint(w, x, f)
}
func fprint(w io.Writer, x interface{}, f FieldFilter) (err error) {
// setup printer
p := printer{
output: w,
filter: f,
ptrmap: make(map[interface{}]int),
last: '\n', // force printing of line number on first line
}
// install error handler
defer func() {
if e := recover(); e != nil {
err = e.(localError).err // re-panics if it's not a localError
}
}()
// print x
if x == nil {
p.printf("nil\n")
return
}
p.print(reflect.ValueOf(x))
p.printf("\n")
return
}
func Print(x interface{}) error {
return Fprint(os.Stdout, x, NotNilFilter)
}
type printer struct {
output io.Writer
filter FieldFilter
ptrmap map[interface{}]int // *T -> line number
prefixIndent int // current indentation level
last byte // the last byte processed by Write
line int // current line number
}
var prefixIndent = []byte(". ")
// Write implements io.Writer.
func (p *printer) Write(data []byte) (n int, err error) {
var m int
for i, b := range data {
// invariant: data[0:n] has been written
if b == '\n' {
m, err = p.output.Write(data[n : i+1])
n += m
if err != nil {
return
}
p.line++
} else if p.last == '\n' {
_, err = fmt.Fprintf(p.output, "%6d ", p.line)
if err != nil {
return
}
for j := p.prefixIndent; j > 0; j-- {
_, err = p.output.Write(prefixIndent)
if err != nil {
return
}
}
}
p.last = b
}
if len(data) > n {
m, err = p.output.Write(data[n:])
n += m
}
return
}
// localError wraps locally caught errors so we can distinguish
// them from genuine panics which we don't want to return as errors.
type localError struct {
err error
}
// printf is a convenience wrapper that takes care of print errors.
func (p *printer) printf(format string, args ...interface{}) {
if _, err := fmt.Fprintf(p, format, args...); err != nil {
panic(localError{err})
}
}
// Implementation note: Print is written for AST nodes but could be
// used to print arbitrary data structures; such a version should
// probably be in a different package.
//
// Note: This code detects (some) cycles created via pointers but
// not cycles that are created via slices or maps containing the
// same slice or map. Code for general data structures probably
// should catch those as well.
func (p *printer) print(x reflect.Value) {
if !NotNilFilter("", x) {
p.printf("nil")
return
}
switch x.Kind() {
case reflect.Interface:
p.print(x.Elem())
case reflect.Map:
p.printf("%s (len = %d) {", x.Type(), x.Len())
if x.Len() > 0 {
p.prefixIndent++
p.printf("\n")
for _, key := range x.MapKeys() {
p.print(key)
p.printf(": ")
p.print(x.MapIndex(key))
p.printf("\n")
}
p.prefixIndent--
}
p.printf("}")
case reflect.Pointer:
p.printf("*")
// type-checked ASTs may contain cycles - use ptrmap
// to keep track of objects that have been printed
// already and print the respective line number instead
ptr := x.Interface()
if line, exists := p.ptrmap[ptr]; exists {
p.printf("(obj @ %d)", line)
} else {
p.ptrmap[ptr] = p.line
p.print(x.Elem())
}
case reflect.Array:
p.printf("%s {", x.Type())
if x.Len() > 0 {
p.prefixIndent++
p.printf("\n")
for i, n := 0, x.Len(); i < n; i++ {
p.printf("%d: ", i)
p.print(x.Index(i))
p.printf("\n")
}
p.prefixIndent--
}
p.printf("}")
case reflect.Slice:
if s, ok := x.Interface().([]byte); ok {
p.printf("%#q", s)
return
}
p.printf("%s (len = %d) {", x.Type(), x.Len())
if x.Len() > 0 {
p.prefixIndent++
p.printf("\n")
for i, n := 0, x.Len(); i < n; i++ {
p.printf("%d: ", i)
p.print(x.Index(i))
p.printf("\n")
}
p.prefixIndent--
}
p.printf("}")
case reflect.Struct:
if val, ok := x.Interface().(apitoken.Position); ok {
p.printf(val.String())
return
}
t := x.Type()
p.printf("%s {", t)
p.prefixIndent++
first := true
for i, n := 0, t.NumField(); i < n; i++ {
// exclude non-exported fields because their
// values cannot be accessed via reflection
if name := t.Field(i).Name; token.IsExported(name) {
value := x.Field(i)
if p.filter == nil || p.filter(name, value) {
if first {
p.printf("\n")
first = false
}
p.printf("%s: ", name)
p.print(value)
p.printf("\n")
}
}
}
p.prefixIndent--
p.printf("}")
default:
v := x.Interface()
switch v := v.(type) {
case string:
// print strings in quotes
p.printf("%q", v)
return
}
// default
p.printf("%v", v)
}
}

View File

@ -0,0 +1,577 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
// AtServerStmt represents @server statement.
type AtServerStmt struct {
// AtServer is the @server token.
AtServer *TokenNode
// LParen is the left parenthesis token.
LParen *TokenNode
// Values is the key-value pairs.
Values []*KVExpr
// RParen is the right parenthesis token.
RParen *TokenNode
}
func (a *AtServerStmt) HasHeadCommentGroup() bool {
return a.AtServer.HasHeadCommentGroup()
}
func (a *AtServerStmt) HasLeadingCommentGroup() bool {
return a.RParen.HasLeadingCommentGroup()
}
func (a *AtServerStmt) CommentGroup() (head, leading CommentGroup) {
return a.AtServer.HeadCommentGroup, a.RParen.LeadingCommentGroup
}
func (a *AtServerStmt) Format(prefix ...string) string {
if len(a.Values) == 0 {
return ""
}
var textList []string
for _, v := range a.Values {
if v.Value.IsZeroString() {
continue
}
textList = append(textList, v.Format())
}
if len(textList) == 0 {
return ""
}
w := NewBufferWriter()
atServerNode := transferTokenNode(a.AtServer, withTokenNodePrefix(prefix...), ignoreLeadingComment())
w.Write(withNode(atServerNode, a.LParen), expectSameLine())
w.NewLine()
for _, v := range a.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}
w.Write(withNode(transferTokenNode(a.RParen, withTokenNodePrefix(prefix...))))
return w.String()
}
func (a *AtServerStmt) End() token.Position {
return a.RParen.End()
}
func (a *AtServerStmt) Pos() token.Position {
return a.AtServer.Pos()
}
func (a *AtServerStmt) stmtNode() {}
type AtDocStmt interface {
Stmt
atDocNode()
}
type AtDocLiteralStmt struct {
AtDoc *TokenNode
Value *TokenNode
}
func (a *AtDocLiteralStmt) HasHeadCommentGroup() bool {
return a.AtDoc.HasHeadCommentGroup()
}
func (a *AtDocLiteralStmt) HasLeadingCommentGroup() bool {
return a.Value.HasLeadingCommentGroup()
}
func (a *AtDocLiteralStmt) CommentGroup() (head, leading CommentGroup) {
return a.AtDoc.HeadCommentGroup, a.Value.LeadingCommentGroup
}
func (a *AtDocLiteralStmt) Format(prefix ...string) string {
if a.Value.IsZeroString() {
return ""
}
w := NewBufferWriter()
atDocNode := transferTokenNode(a.AtDoc, withTokenNodePrefix(prefix...), ignoreLeadingComment())
valueNode := transferTokenNode(a.Value, ignoreHeadComment())
w.Write(withNode(atDocNode, valueNode), expectSameLine())
return w.String()
}
func (a *AtDocLiteralStmt) End() token.Position {
return a.Value.End()
}
func (a *AtDocLiteralStmt) atDocNode() {}
func (a *AtDocLiteralStmt) Pos() token.Position {
return a.AtDoc.Pos()
}
func (a *AtDocLiteralStmt) stmtNode() {}
type AtDocGroupStmt struct {
AtDoc *TokenNode
LParen *TokenNode
Values []*KVExpr
RParen *TokenNode
}
func (a *AtDocGroupStmt) HasHeadCommentGroup() bool {
return a.AtDoc.HasHeadCommentGroup()
}
func (a *AtDocGroupStmt) HasLeadingCommentGroup() bool {
return a.RParen.HasLeadingCommentGroup()
}
func (a *AtDocGroupStmt) CommentGroup() (head, leading CommentGroup) {
return a.AtDoc.HeadCommentGroup, a.RParen.LeadingCommentGroup
}
func (a *AtDocGroupStmt) Format(prefix ...string) string {
if len(a.Values) == 0 {
return ""
}
var textList []string
for _, v := range a.Values {
if v.Value.IsZeroString() {
continue
}
textList = append(textList, v.Format(peekOne(prefix)+Indent))
}
if len(textList) == 0 {
return ""
}
w := NewBufferWriter()
atDocNode := transferTokenNode(a.AtDoc, withTokenNodePrefix(prefix...), ignoreLeadingComment())
w.Write(withNode(atDocNode, a.LParen), expectSameLine())
w.NewLine()
for _, v := range a.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}
w.Write(withNode(transferTokenNode(a.RParen, withTokenNodePrefix(prefix...))))
return w.String()
}
func (a *AtDocGroupStmt) End() token.Position {
return a.RParen.End()
}
func (a *AtDocGroupStmt) atDocNode() {}
func (a *AtDocGroupStmt) Pos() token.Position {
return a.AtDoc.Pos()
}
func (a *AtDocGroupStmt) stmtNode() {}
type ServiceStmt struct {
AtServerStmt *AtServerStmt
Service *TokenNode
Name *ServiceNameExpr
LBrace *TokenNode
Routes []*ServiceItemStmt
RBrace *TokenNode
}
func (s *ServiceStmt) HasHeadCommentGroup() bool {
if s.AtServerStmt != nil {
return s.AtServerStmt.HasHeadCommentGroup()
}
return s.Service.HasHeadCommentGroup()
}
func (s *ServiceStmt) HasLeadingCommentGroup() bool {
return s.RBrace.HasLeadingCommentGroup()
}
func (s *ServiceStmt) CommentGroup() (head, leading CommentGroup) {
if s.AtServerStmt != nil {
head, _ = s.AtServerStmt.CommentGroup()
return head, s.RBrace.LeadingCommentGroup
}
return s.Service.HeadCommentGroup, s.RBrace.LeadingCommentGroup
}
func (s *ServiceStmt) Format(prefix ...string) string {
w := NewBufferWriter()
if s.AtServerStmt != nil {
text := s.AtServerStmt.Format()
if len(text) > 0 {
w.WriteText(text)
w.NewLine()
}
}
serviceNode := transferTokenNode(s.Service, withTokenNodePrefix(prefix...))
w.Write(withNode(serviceNode, s.Name, s.LBrace), expectSameLine())
if len(s.Routes) == 0 {
w.Write(withNode(transferTokenNode(s.RBrace, withTokenNodePrefix(prefix...))))
return w.String()
}
w.NewLine()
for idx, route := range s.Routes {
routeNode := transfer2TokenNode(route, false, withTokenNodePrefix(peekOne(prefix)+Indent))
w.Write(withNode(routeNode))
if idx < len(s.Routes)-1 {
w.NewLine()
}
}
w.Write(withNode(transferTokenNode(s.RBrace, withTokenNodePrefix(prefix...))))
return w.String()
}
func (s *ServiceStmt) End() token.Position {
return s.RBrace.End()
}
func (s *ServiceStmt) Pos() token.Position {
if s.AtServerStmt != nil {
return s.AtServerStmt.Pos()
}
return s.Service.Pos()
}
func (s *ServiceStmt) stmtNode() {}
type ServiceNameExpr struct {
Name *TokenNode
}
func (s *ServiceNameExpr) HasHeadCommentGroup() bool {
return s.Name.HasHeadCommentGroup()
}
func (s *ServiceNameExpr) HasLeadingCommentGroup() bool {
return s.Name.HasLeadingCommentGroup()
}
func (s *ServiceNameExpr) CommentGroup() (head, leading CommentGroup) {
return s.Name.HeadCommentGroup, s.Name.LeadingCommentGroup
}
func (s *ServiceNameExpr) Format(...string) string {
w := NewBufferWriter()
w.WriteText(s.Name.Format())
return w.String()
}
func (s *ServiceNameExpr) End() token.Position {
return s.Name.End()
}
func (s *ServiceNameExpr) Pos() token.Position {
return s.Name.Pos()
}
func (s *ServiceNameExpr) exprNode() {}
type AtHandlerStmt struct {
AtHandler *TokenNode
Name *TokenNode
}
func (a *AtHandlerStmt) HasHeadCommentGroup() bool {
return a.AtHandler.HasHeadCommentGroup()
}
func (a *AtHandlerStmt) HasLeadingCommentGroup() bool {
return a.Name.HasLeadingCommentGroup()
}
func (a *AtHandlerStmt) CommentGroup() (head, leading CommentGroup) {
return a.AtHandler.HeadCommentGroup, a.Name.LeadingCommentGroup
}
func (a *AtHandlerStmt) Format(prefix ...string) string {
w := NewBufferWriter()
atDocNode := transferTokenNode(a.AtHandler, withTokenNodePrefix(prefix...), ignoreLeadingComment())
nameNode := transferTokenNode(a.Name, ignoreHeadComment())
w.Write(withNode(atDocNode, nameNode), expectSameLine())
return w.String()
}
func (a *AtHandlerStmt) End() token.Position {
return a.Name.End()
}
func (a *AtHandlerStmt) Pos() token.Position {
return a.AtHandler.Pos()
}
func (a *AtHandlerStmt) stmtNode() {}
type ServiceItemStmt struct {
AtDoc AtDocStmt
AtHandler *AtHandlerStmt
Route *RouteStmt
}
func (s *ServiceItemStmt) HasHeadCommentGroup() bool {
if s.AtDoc != nil {
return s.AtDoc.HasHeadCommentGroup()
}
return s.AtHandler.HasHeadCommentGroup()
}
func (s *ServiceItemStmt) HasLeadingCommentGroup() bool {
return s.Route.HasLeadingCommentGroup()
}
func (s *ServiceItemStmt) CommentGroup() (head, leading CommentGroup) {
_, leading = s.Route.CommentGroup()
if s.AtDoc != nil {
head, _ = s.AtDoc.CommentGroup()
return head, leading
}
head, _ = s.AtHandler.CommentGroup()
return head, leading
}
func (s *ServiceItemStmt) Format(prefix ...string) string {
w := NewBufferWriter()
if s.AtDoc != nil {
w.WriteText(s.AtDoc.Format(prefix...))
w.NewLine()
}
w.WriteText(s.AtHandler.Format(prefix...))
w.NewLine()
routeNode := transfer2TokenNode(s.Route, false, withTokenNodePrefix(prefix...))
w.Write(withNode(routeNode))
w.NewLine()
return w.String()
}
func (s *ServiceItemStmt) End() token.Position {
return s.Route.End()
}
func (s *ServiceItemStmt) Pos() token.Position {
if s.AtDoc != nil {
return s.AtDoc.Pos()
}
return s.AtHandler.Pos()
}
func (s *ServiceItemStmt) stmtNode() {}
type RouteStmt struct {
Method *TokenNode
Path *PathExpr
Request *BodyStmt
Returns *TokenNode
Response *BodyStmt
}
func (r *RouteStmt) HasHeadCommentGroup() bool {
return r.Method.HasHeadCommentGroup()
}
func (r *RouteStmt) HasLeadingCommentGroup() bool {
if r.Response != nil {
return r.Response.HasLeadingCommentGroup()
} else if r.Returns != nil {
return r.Returns.HasLeadingCommentGroup()
} else if r.Request != nil {
return r.Request.HasLeadingCommentGroup()
}
return r.Path.HasLeadingCommentGroup()
}
func (r *RouteStmt) CommentGroup() (head, leading CommentGroup) {
head, _ = r.Method.CommentGroup()
if r.Response != nil {
_, leading = r.Response.CommentGroup()
} else if r.Returns != nil {
_, leading = r.Returns.CommentGroup()
} else if r.Request != nil {
_, leading = r.Request.CommentGroup()
}
return head, leading
}
func (r *RouteStmt) Format(prefix ...string) string {
w := NewBufferWriter()
methodNode := transferTokenNode(r.Method, withTokenNodePrefix(prefix...), ignoreLeadingComment())
if r.Response != nil {
if r.Response.Body == nil {
r.Response.RParen = transferTokenNode(r.Response.RParen, ignoreHeadComment())
if r.Request != nil {
w.Write(withNode(methodNode, r.Path, r.Request), expectSameLine())
} else {
w.Write(withNode(methodNode, r.Path), expectSameLine())
}
} else {
r.Response.RParen = transferTokenNode(r.Response.RParen, ignoreHeadComment())
if r.Request != nil {
w.Write(withNode(methodNode, r.Path, r.Request, r.Returns, r.Response), expectSameLine())
} else {
w.Write(withNode(methodNode, r.Path, r.Returns, r.Response), expectSameLine())
}
}
} else if r.Request != nil {
r.Request.RParen = transferTokenNode(r.Request.RParen, ignoreHeadComment())
w.Write(withNode(methodNode, r.Path, r.Request), expectSameLine())
} else {
pathNode := transferTokenNode(r.Path.Value, ignoreHeadComment())
w.Write(withNode(methodNode, pathNode), expectSameLine())
}
return w.String()
}
func (r *RouteStmt) End() token.Position {
if r.Response != nil {
return r.Response.End()
}
if r.Returns != nil {
return r.Returns.Pos()
}
if r.Request != nil {
return r.Request.End()
}
return r.Path.End()
}
func (r *RouteStmt) Pos() token.Position {
return r.Method.Pos()
}
func (r *RouteStmt) stmtNode() {}
type PathExpr struct {
Value *TokenNode
}
func (p *PathExpr) HasHeadCommentGroup() bool {
return p.Value.HasHeadCommentGroup()
}
func (p *PathExpr) HasLeadingCommentGroup() bool {
return p.Value.HasLeadingCommentGroup()
}
func (p *PathExpr) CommentGroup() (head, leading CommentGroup) {
return p.Value.CommentGroup()
}
func (p *PathExpr) Format(prefix ...string) string {
pathNode := transferTokenNode(p.Value, ignoreComment())
return pathNode.Format(prefix...)
}
func (p *PathExpr) End() token.Position {
return p.Value.End()
}
func (p *PathExpr) Pos() token.Position {
return p.Value.Pos()
}
func (p *PathExpr) exprNode() {}
type BodyStmt struct {
LParen *TokenNode
Body *BodyExpr
RParen *TokenNode
}
func (b *BodyStmt) HasHeadCommentGroup() bool {
return b.LParen.HasHeadCommentGroup()
}
func (b *BodyStmt) HasLeadingCommentGroup() bool {
return b.RParen.HasLeadingCommentGroup()
}
func (b *BodyStmt) CommentGroup() (head, leading CommentGroup) {
return b.LParen.HeadCommentGroup, b.RParen.LeadingCommentGroup
}
func (b *BodyStmt) Format(...string) string {
w := NewBufferWriter()
if b.Body == nil {
return ""
}
w.Write(withNode(b.LParen, b.Body, b.RParen), withInfix(NilIndent), expectSameLine())
return w.String()
}
func (b *BodyStmt) End() token.Position {
return b.RParen.End()
}
func (b *BodyStmt) Pos() token.Position {
return b.LParen.Pos()
}
func (b *BodyStmt) stmtNode() {}
type BodyExpr struct {
LBrack *TokenNode
RBrack *TokenNode
Star *TokenNode
Value *TokenNode
}
func (e *BodyExpr) HasHeadCommentGroup() bool {
if e.LBrack != nil {
return e.LBrack.HasHeadCommentGroup()
} else if e.Star != nil {
return e.Star.HasHeadCommentGroup()
} else {
return e.Value.HasHeadCommentGroup()
}
}
func (e *BodyExpr) HasLeadingCommentGroup() bool {
return e.Value.HasLeadingCommentGroup()
}
func (e *BodyExpr) CommentGroup() (head, leading CommentGroup) {
if e.LBrack != nil {
head = e.LBrack.HeadCommentGroup
} else if e.Star != nil {
head = e.Star.HeadCommentGroup
} else {
head = e.Value.HeadCommentGroup
}
return head, e.Value.LeadingCommentGroup
}
func (e *BodyExpr) End() token.Position {
return e.Value.End()
}
func (e *BodyExpr) Format(...string) string {
w := NewBufferWriter()
if e.LBrack != nil {
lbrackNode := transferTokenNode(e.LBrack, ignoreComment())
rbrackNode := transferTokenNode(e.RBrack, ignoreComment())
if e.Star != nil {
starNode := transferTokenNode(e.Star, ignoreComment())
w.Write(withNode(lbrackNode, rbrackNode, starNode, e.Value), withInfix(NilIndent), expectSameLine())
} else {
w.Write(withNode(lbrackNode, rbrackNode, e.Value), withInfix(NilIndent), expectSameLine())
}
} else if e.Star != nil {
starNode := transferTokenNode(e.Star, ignoreComment())
w.Write(withNode(starNode, e.Value), withInfix(NilIndent), expectSameLine())
} else {
w.Write(withNode(e.Value))
}
return w.String()
}
func (e *BodyExpr) Pos() token.Position {
if e.LBrack != nil {
return e.LBrack.Pos()
}
if e.Star != nil {
return e.Star.Pos()
}
return e.Value.Pos()
}
func (e *BodyExpr) exprNode() {}

View File

@ -0,0 +1,44 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
// SyntaxStmt represents a syntax statement.
type SyntaxStmt struct {
// Syntax is the syntax token.
Syntax *TokenNode
// Assign is the assign token.
Assign *TokenNode
// Value is the syntax value.
Value *TokenNode
}
func (s *SyntaxStmt) HasHeadCommentGroup() bool {
return s.Syntax.HasHeadCommentGroup()
}
func (s *SyntaxStmt) HasLeadingCommentGroup() bool {
return s.Value.HasLeadingCommentGroup()
}
func (s *SyntaxStmt) CommentGroup() (head, leading CommentGroup) {
return s.Syntax.HeadCommentGroup, s.Syntax.LeadingCommentGroup
}
func (s *SyntaxStmt) Format(prefix ...string) string {
w := NewBufferWriter()
syntaxNode := transferTokenNode(s.Syntax,
withTokenNodePrefix(prefix...), ignoreLeadingComment())
assignNode := transferTokenNode(s.Assign, ignoreLeadingComment())
w.Write(withNode(syntaxNode, assignNode, s.Value), withPrefix(prefix...), expectSameLine())
return w.String()
}
func (s *SyntaxStmt) End() token.Position {
return s.Value.End()
}
func (s *SyntaxStmt) Pos() token.Position {
return s.Syntax.Pos()
}
func (s *SyntaxStmt) stmtNode() {}

View File

@ -0,0 +1,797 @@
package ast
import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
/*******************TypeStmt Begin********************/
// TypeStmt is the interface for type statement.
type TypeStmt interface {
Stmt
typeNode()
}
// TypeLiteralStmt is the type statement for type literal.
type TypeLiteralStmt struct {
// Type is the type keyword.
Type *TokenNode
// Expr is the type expression.
Expr *TypeExpr
}
func (t *TypeLiteralStmt) HasHeadCommentGroup() bool {
return t.Type.HasHeadCommentGroup()
}
func (t *TypeLiteralStmt) HasLeadingCommentGroup() bool {
return t.Expr.HasLeadingCommentGroup()
}
func (t *TypeLiteralStmt) CommentGroup() (head, leading CommentGroup) {
_, leading = t.Expr.CommentGroup()
return t.Type.HeadCommentGroup, leading
}
func (t *TypeLiteralStmt) Format(prefix ...string) string {
w := NewBufferWriter()
w.Write(withNode(t.Type, t.Expr), withPrefix(prefix...), expectSameLine())
return w.String()
}
func (t *TypeLiteralStmt) End() token.Position {
return t.Expr.End()
}
func (t *TypeLiteralStmt) Pos() token.Position {
return t.Type.Pos()
}
func (t *TypeLiteralStmt) stmtNode() {}
func (t *TypeLiteralStmt) typeNode() {}
// TypeGroupStmt is the type statement for type group.
type TypeGroupStmt struct {
// Type is the type keyword.
Type *TokenNode
// LParen is the left parenthesis.
LParen *TokenNode
// ExprList is the type expression list.
ExprList []*TypeExpr
// RParen is the right parenthesis.
RParen *TokenNode
}
func (t *TypeGroupStmt) HasHeadCommentGroup() bool {
return t.Type.HasHeadCommentGroup()
}
func (t *TypeGroupStmt) HasLeadingCommentGroup() bool {
return t.RParen.HasLeadingCommentGroup()
}
func (t *TypeGroupStmt) CommentGroup() (head, leading CommentGroup) {
return t.Type.HeadCommentGroup, t.RParen.LeadingCommentGroup
}
func (t *TypeGroupStmt) Format(prefix ...string) string {
if len(t.ExprList) == 0 {
return ""
}
w := NewBufferWriter()
typeNode := transferTokenNode(t.Type, withTokenNodePrefix(prefix...))
w.Write(withNode(typeNode, t.LParen), expectSameLine())
w.NewLine()
for _, e := range t.ExprList {
w.Write(withNode(e), withPrefix(peekOne(prefix)+Indent))
w.NewLine()
}
w.WriteText(t.RParen.Format(prefix...))
return w.String()
}
func (t *TypeGroupStmt) End() token.Position {
return t.RParen.End()
}
func (t *TypeGroupStmt) Pos() token.Position {
return t.Type.Pos()
}
func (t *TypeGroupStmt) stmtNode() {}
func (t *TypeGroupStmt) typeNode() {}
/*******************TypeStmt End********************/
/*******************TypeExpr Begin********************/
// TypeExpr is the type expression.
type TypeExpr struct {
// Name is the type name.
Name *TokenNode
// Assign is the assign operator.
Assign *TokenNode
// DataType is the data type.
DataType DataType
}
func (e *TypeExpr) HasHeadCommentGroup() bool {
return e.Name.HasHeadCommentGroup()
}
func (e *TypeExpr) HasLeadingCommentGroup() bool {
return e.DataType.HasLeadingCommentGroup()
}
func (e *TypeExpr) CommentGroup() (head, leading CommentGroup) {
_, leading = e.DataType.CommentGroup()
return e.Name.HeadCommentGroup, leading
}
func (e *TypeExpr) Format(prefix ...string) string {
w := NewBufferWriter()
nameNode := transferTokenNode(e.Name, withTokenNodePrefix(prefix...))
dataTypeNode := transfer2TokenNode(e.DataType, false, withTokenNodePrefix(prefix...))
if e.Assign != nil {
w.Write(withNode(nameNode, e.Assign, dataTypeNode), expectSameLine())
} else {
w.Write(withNode(nameNode, dataTypeNode), expectSameLine())
}
return w.String()
}
func (e *TypeExpr) End() token.Position {
return e.DataType.End()
}
func (e *TypeExpr) Pos() token.Position {
return e.Name.Pos()
}
func (e *TypeExpr) exprNode() {}
func (e *TypeExpr) isStruct() bool {
return e.DataType.ContainsStruct()
}
/*******************TypeExpr Begin********************/
/*******************Elem Begin********************/
// ElemExpr is the element expression.
type ElemExpr struct {
// Name is the field element name.
Name []*TokenNode
// DataType is the field data type.
DataType DataType
// Tag is the field tag.
Tag *TokenNode
}
// IsAnonymous returns true if the element is anonymous.
func (e *ElemExpr) IsAnonymous() bool {
return len(e.Name) == 0
}
func (e *ElemExpr) HasHeadCommentGroup() bool {
if e.IsAnonymous() {
return e.DataType.HasHeadCommentGroup()
}
return e.Name[0].HasHeadCommentGroup()
}
func (e *ElemExpr) HasLeadingCommentGroup() bool {
if e.Tag != nil {
return e.Tag.HasLeadingCommentGroup()
}
return e.DataType.HasLeadingCommentGroup()
}
func (e *ElemExpr) CommentGroup() (head, leading CommentGroup) {
if e.Tag != nil {
leading = e.Tag.LeadingCommentGroup
} else {
_, leading = e.DataType.CommentGroup()
}
if e.IsAnonymous() {
head, _ := e.DataType.CommentGroup()
return head, leading
}
return e.Name[0].HeadCommentGroup, leading
}
func (e *ElemExpr) Format(prefix ...string) string {
w := NewBufferWriter()
var nameNodeList []*TokenNode
for idx, n := range e.Name {
if idx == 0 {
nameNodeList = append(nameNodeList,
transferTokenNode(n, ignoreLeadingComment()))
} else if idx < len(e.Name)-1 {
nameNodeList = append(nameNodeList,
transferTokenNode(n, ignoreLeadingComment(), ignoreHeadComment()))
} else {
nameNodeList = append(nameNodeList, transferTokenNode(n, ignoreHeadComment()))
}
}
var dataTypeOption []tokenNodeOption
if e.DataType.ContainsStruct() {
dataTypeOption = append(dataTypeOption, withTokenNodePrefix(peekOne(prefix)+Indent))
} else {
dataTypeOption = append(dataTypeOption, withTokenNodePrefix(prefix...))
}
dataTypeNode := transfer2TokenNode(e.DataType, false, dataTypeOption...)
if len(nameNodeList) > 0 {
nameNode := transferNilInfixNode(nameNodeList,
withTokenNodePrefix(prefix...), withTokenNodeInfix(", "))
if e.Tag != nil {
w.Write(withNode(nameNode, dataTypeNode, e.Tag), expectIndentInfix(), expectSameLine())
} else {
w.Write(withNode(nameNode, dataTypeNode), expectIndentInfix(), expectSameLine())
}
} else {
if e.Tag != nil {
w.Write(withNode(dataTypeNode, e.Tag), expectIndentInfix(), expectSameLine())
} else {
w.Write(withNode(dataTypeNode), expectIndentInfix(), expectSameLine())
}
}
return w.String()
}
func (e *ElemExpr) End() token.Position {
if e.Tag != nil {
return e.Tag.End()
}
return e.DataType.End()
}
func (e *ElemExpr) Pos() token.Position {
if len(e.Name) > 0 {
return e.Name[0].Pos()
}
return token.IllegalPosition
}
func (e *ElemExpr) exprNode() {}
/*******************Elem End********************/
/*******************ElemExprList Begin********************/
// ElemExprList is the element expression list.
type ElemExprList []*ElemExpr
/*******************ElemExprList Begin********************/
/*******************DataType Begin********************/
// DataType represents the data type.
type DataType interface {
Expr
dataTypeNode()
// CanEqual returns true if the data type can be equal.
CanEqual() bool
// ContainsStruct returns true if the data type contains struct.
ContainsStruct() bool
// RawText returns the raw text of the data type.
RawText() string
}
// AnyDataType is the any data type.
type AnyDataType struct {
// Any is the any token node.
Any *TokenNode
isChild bool
}
func (t *AnyDataType) HasHeadCommentGroup() bool {
return t.Any.HasHeadCommentGroup()
}
func (t *AnyDataType) HasLeadingCommentGroup() bool {
return t.Any.HasLeadingCommentGroup()
}
func (t *AnyDataType) CommentGroup() (head, leading CommentGroup) {
return t.Any.HeadCommentGroup, t.Any.LeadingCommentGroup
}
func (t *AnyDataType) Format(prefix ...string) string {
return t.Any.Format(prefix...)
}
func (t *AnyDataType) End() token.Position {
return t.Any.End()
}
func (t *AnyDataType) RawText() string {
return t.Any.Token.Text
}
func (t *AnyDataType) ContainsStruct() bool {
return false
}
func (t *AnyDataType) Pos() token.Position {
return t.Any.Pos()
}
func (t *AnyDataType) exprNode() {}
func (t *AnyDataType) dataTypeNode() {}
func (t *AnyDataType) CanEqual() bool {
return true
}
// ArrayDataType is the array data type.
type ArrayDataType struct {
// LB is the left bracket token node.
LBrack *TokenNode
// Len is the array length.
Length *TokenNode
// RB is the right bracket token node.
RBrack *TokenNode
// DataType is the array data type.
DataType DataType
isChild bool
}
func (t *ArrayDataType) HasHeadCommentGroup() bool {
return t.LBrack.HasHeadCommentGroup()
}
func (t *ArrayDataType) HasLeadingCommentGroup() bool {
return t.DataType.HasLeadingCommentGroup()
}
func (t *ArrayDataType) CommentGroup() (head, leading CommentGroup) {
_, leading = t.DataType.CommentGroup()
return t.LBrack.HeadCommentGroup, leading
}
func (t *ArrayDataType) Format(prefix ...string) string {
w := NewBufferWriter()
lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
lengthNode := transferTokenNode(t.Length, ignoreLeadingComment())
rbrack := transferTokenNode(t.RBrack, ignoreHeadComment())
var dataType *TokenNode
var options []tokenNodeOption
options = append(options, withTokenNodePrefix(prefix...))
if t.isChild {
options = append(options, ignoreComment())
} else {
options = append(options, ignoreHeadComment())
}
dataType = transfer2TokenNode(t.DataType, false, options...)
node := transferNilInfixNode([]*TokenNode{lbrack, lengthNode, rbrack, dataType})
w.Write(withNode(node))
return w.String()
}
func (t *ArrayDataType) End() token.Position {
return t.DataType.End()
}
func (t *ArrayDataType) RawText() string {
return t.Format("")
}
func (t *ArrayDataType) ContainsStruct() bool {
return t.DataType.ContainsStruct()
}
func (t *ArrayDataType) CanEqual() bool {
return t.DataType.CanEqual()
}
func (t *ArrayDataType) Pos() token.Position {
return t.LBrack.Pos()
}
func (t *ArrayDataType) exprNode() {}
func (t *ArrayDataType) dataTypeNode() {}
// BaseDataType is a common id type which contains bool, uint8, uint16, uint32,
// uint64, int8, int16, int32, int64, float32, float64, complex64, complex128,
// string, int, uint, uintptr, byte, rune, any.
type BaseDataType struct {
// Base is the base token node.
Base *TokenNode
isChild bool
}
func (t *BaseDataType) HasHeadCommentGroup() bool {
return t.Base.HasHeadCommentGroup()
}
func (t *BaseDataType) HasLeadingCommentGroup() bool {
return t.Base.HasLeadingCommentGroup()
}
func (t *BaseDataType) CommentGroup() (head, leading CommentGroup) {
return t.Base.HeadCommentGroup, t.Base.LeadingCommentGroup
}
func (t *BaseDataType) Format(prefix ...string) string {
return t.Base.Format(prefix...)
}
func (t *BaseDataType) End() token.Position {
return t.Base.End()
}
func (t *BaseDataType) RawText() string {
return t.Base.Token.Text
}
func (t *BaseDataType) ContainsStruct() bool {
return false
}
func (t *BaseDataType) CanEqual() bool {
return true
}
func (t *BaseDataType) Pos() token.Position {
return t.Base.Pos()
}
func (t *BaseDataType) exprNode() {}
func (t *BaseDataType) dataTypeNode() {}
// InterfaceDataType is the interface data type.
type InterfaceDataType struct {
// Interface is the interface token node.
Interface *TokenNode
isChild bool
}
func (t *InterfaceDataType) HasHeadCommentGroup() bool {
return t.Interface.HasHeadCommentGroup()
}
func (t *InterfaceDataType) HasLeadingCommentGroup() bool {
return t.Interface.HasLeadingCommentGroup()
}
func (t *InterfaceDataType) CommentGroup() (head, leading CommentGroup) {
return t.Interface.HeadCommentGroup, t.Interface.LeadingCommentGroup
}
func (t *InterfaceDataType) Format(prefix ...string) string {
return t.Interface.Format(prefix...)
}
func (t *InterfaceDataType) End() token.Position {
return t.Interface.End()
}
func (t *InterfaceDataType) RawText() string {
return t.Interface.Token.Text
}
func (t *InterfaceDataType) ContainsStruct() bool {
return false
}
func (t *InterfaceDataType) CanEqual() bool {
return true
}
func (t *InterfaceDataType) Pos() token.Position {
return t.Interface.Pos()
}
func (t *InterfaceDataType) exprNode() {}
func (t *InterfaceDataType) dataTypeNode() {}
// MapDataType is the map data type.
type MapDataType struct {
// Map is the map token node.
Map *TokenNode
// Lbrack is the left bracket token node.
LBrack *TokenNode
// Key is the map key data type.
Key DataType
// Rbrack is the right bracket token node.
RBrack *TokenNode
// Value is the map value data type.
Value DataType
isChild bool
}
func (t *MapDataType) HasHeadCommentGroup() bool {
return t.Map.HasHeadCommentGroup()
}
func (t *MapDataType) HasLeadingCommentGroup() bool {
return t.Value.HasLeadingCommentGroup()
}
func (t *MapDataType) CommentGroup() (head, leading CommentGroup) {
_, leading = t.Value.CommentGroup()
return t.Map.HeadCommentGroup, leading
}
func (t *MapDataType) Format(prefix ...string) string {
w := NewBufferWriter()
mapNode := transferTokenNode(t.Map, ignoreLeadingComment())
lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
rbrack := transferTokenNode(t.RBrack, ignoreComment())
var keyOption, valueOption []tokenNodeOption
keyOption = append(keyOption, ignoreComment())
valueOption = append(valueOption, withTokenNodePrefix(prefix...))
if t.isChild {
valueOption = append(valueOption, ignoreComment())
} else {
valueOption = append(valueOption, ignoreHeadComment())
}
keyDataType := transfer2TokenNode(t.Key, true, keyOption...)
valueDataType := transfer2TokenNode(t.Value, false, valueOption...)
node := transferNilInfixNode([]*TokenNode{mapNode, lbrack, keyDataType, rbrack, valueDataType})
w.Write(withNode(node))
return w.String()
}
func (t *MapDataType) End() token.Position {
return t.Value.End()
}
func (t *MapDataType) RawText() string {
return t.Format("")
}
func (t *MapDataType) ContainsStruct() bool {
return t.Key.ContainsStruct() || t.Value.ContainsStruct()
}
func (t *MapDataType) CanEqual() bool {
return false
}
func (t *MapDataType) Pos() token.Position {
return t.Map.Pos()
}
func (t *MapDataType) exprNode() {}
func (t *MapDataType) dataTypeNode() {}
// PointerDataType is the pointer data type.
type PointerDataType struct {
// Star is the star token node.
Star *TokenNode
// DataType is the pointer data type.
DataType DataType
isChild bool
}
func (t *PointerDataType) HasHeadCommentGroup() bool {
return t.Star.HasHeadCommentGroup()
}
func (t *PointerDataType) HasLeadingCommentGroup() bool {
return t.DataType.HasLeadingCommentGroup()
}
func (t *PointerDataType) CommentGroup() (head, leading CommentGroup) {
_, leading = t.DataType.CommentGroup()
return t.Star.HeadCommentGroup, leading
}
func (t *PointerDataType) Format(prefix ...string) string {
w := NewBufferWriter()
star := transferTokenNode(t.Star, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
var dataTypeOption []tokenNodeOption
dataTypeOption = append(dataTypeOption, ignoreHeadComment())
dataType := transfer2TokenNode(t.DataType, false, dataTypeOption...)
node := transferNilInfixNode([]*TokenNode{star, dataType})
w.Write(withNode(node))
return w.String()
}
func (t *PointerDataType) End() token.Position {
return t.DataType.End()
}
func (t *PointerDataType) RawText() string {
return t.Format("")
}
func (t *PointerDataType) ContainsStruct() bool {
return t.DataType.ContainsStruct()
}
func (t *PointerDataType) CanEqual() bool {
return t.DataType.CanEqual()
}
func (t *PointerDataType) Pos() token.Position {
return t.Star.Pos()
}
func (t *PointerDataType) exprNode() {}
func (t *PointerDataType) dataTypeNode() {}
// SliceDataType is the slice data type.
type SliceDataType struct {
// Lbrack is the left bracket token node.
LBrack *TokenNode
// Rbrack is the right bracket token node.
RBrack *TokenNode
// DataType is the slice data type.
DataType DataType
isChild bool
}
func (t *SliceDataType) HasHeadCommentGroup() bool {
return t.LBrack.HasHeadCommentGroup()
}
func (t *SliceDataType) HasLeadingCommentGroup() bool {
return t.DataType.HasLeadingCommentGroup()
}
func (t *SliceDataType) CommentGroup() (head, leading CommentGroup) {
_, leading = t.DataType.CommentGroup()
return t.LBrack.HeadCommentGroup, leading
}
func (t *SliceDataType) Format(prefix ...string) string {
w := NewBufferWriter()
lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
rbrack := transferTokenNode(t.RBrack, ignoreHeadComment())
dataType := transfer2TokenNode(t.DataType, false, withTokenNodePrefix(prefix...), ignoreHeadComment())
node := transferNilInfixNode([]*TokenNode{lbrack, rbrack, dataType})
w.Write(withNode(node))
return w.String()
}
func (t *SliceDataType) End() token.Position {
return t.DataType.End()
}
func (t *SliceDataType) RawText() string {
return t.Format("")
}
func (t *SliceDataType) ContainsStruct() bool {
return t.DataType.ContainsStruct()
}
func (t *SliceDataType) CanEqual() bool {
return false
}
func (t *SliceDataType) Pos() token.Position {
return t.LBrack.Pos()
}
func (t *SliceDataType) exprNode() {}
func (t *SliceDataType) dataTypeNode() {}
// StructDataType is the structure data type.
type StructDataType struct {
// Lbrace is the left brace token node.
LBrace *TokenNode
// Elements is the structure elements.
Elements ElemExprList
// Rbrace is the right brace token node.
RBrace *TokenNode
isChild bool
}
func (t *StructDataType) HasHeadCommentGroup() bool {
return t.LBrace.HasHeadCommentGroup()
}
func (t *StructDataType) HasLeadingCommentGroup() bool {
return t.RBrace.HasLeadingCommentGroup()
}
func (t *StructDataType) CommentGroup() (head, leading CommentGroup) {
return t.LBrace.HeadCommentGroup, t.RBrace.LeadingCommentGroup
}
func (t *StructDataType) Format(prefix ...string) string {
w := NewBufferWriter()
if len(t.Elements) == 0 {
lbrace := transferTokenNode(t.LBrace, withTokenNodePrefix(prefix...), ignoreLeadingComment())
rbrace := transferTokenNode(t.RBrace, ignoreHeadComment())
brace := transferNilInfixNode([]*TokenNode{lbrace, rbrace})
w.Write(withNode(brace), expectSameLine())
return w.String()
}
w.WriteText(t.LBrace.Format(NilIndent))
w.NewLine()
for _, e := range t.Elements {
var nameNodeList []*TokenNode
if len(e.Name) > 0 {
for idx, n := range e.Name {
if idx == 0 {
nameNodeList = append(nameNodeList,
transferTokenNode(n, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment()))
} else if idx < len(e.Name)-1 {
nameNodeList = append(nameNodeList,
transferTokenNode(n, ignoreLeadingComment(), ignoreHeadComment()))
} else {
nameNodeList = append(nameNodeList, transferTokenNode(n, ignoreHeadComment()))
}
}
}
var dataTypeOption []tokenNodeOption
if e.DataType.ContainsStruct() || e.IsAnonymous() {
dataTypeOption = append(dataTypeOption, withTokenNodePrefix(peekOne(prefix)+Indent))
} else {
dataTypeOption = append(dataTypeOption, withTokenNodePrefix(prefix...))
}
dataTypeNode := transfer2TokenNode(e.DataType, false, dataTypeOption...)
if len(nameNodeList) > 0 {
nameNode := transferNilInfixNode(nameNodeList, withTokenNodeInfix(", "))
if e.Tag != nil {
if e.DataType.ContainsStruct() {
w.Write(withNode(nameNode, dataTypeNode, e.Tag), expectSameLine())
} else {
w.Write(withNode(nameNode, e.DataType, e.Tag), expectIndentInfix(), expectSameLine())
}
} else {
if e.DataType.ContainsStruct() {
w.Write(withNode(nameNode, dataTypeNode), expectSameLine())
} else {
w.Write(withNode(nameNode, e.DataType), expectIndentInfix(), expectSameLine())
}
}
} else {
if e.Tag != nil {
if e.DataType.ContainsStruct() {
w.Write(withNode(dataTypeNode, e.Tag), expectSameLine())
} else {
w.Write(withNode(e.DataType, e.Tag), expectIndentInfix(), expectSameLine())
}
} else {
if e.DataType.ContainsStruct() {
w.Write(withNode(dataTypeNode), expectSameLine())
} else {
w.Write(withNode(dataTypeNode), expectIndentInfix(), expectSameLine())
}
}
}
w.NewLine()
}
w.WriteText(t.RBrace.Format(prefix...))
return w.String()
}
func (t *StructDataType) End() token.Position {
return t.RBrace.End()
}
func (t *StructDataType) RawText() string {
return t.Format("")
}
func (t *StructDataType) ContainsStruct() bool {
return true
}
func (t *StructDataType) CanEqual() bool {
for _, v := range t.Elements {
if !v.DataType.CanEqual() {
return false
}
}
return true
}
func (t *StructDataType) Pos() token.Position {
return t.LBrace.Pos()
}
func (t *StructDataType) exprNode() {}
func (t *StructDataType) dataTypeNode() {}
/*******************DataType End********************/

View File

@ -0,0 +1,403 @@
package ast
import (
"bytes"
"fmt"
"io"
"strings"
"text/tabwriter"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
"github.com/zeromicro/go-zero/tools/goctl/util"
)
const (
NilIndent = ""
WhiteSpace = " "
Indent = "\t"
NewLine = "\n"
)
const (
_ WriteMode = 1 << iota
// ModeAuto is the default mode, which will automatically
//determine whether to write a newline.
ModeAuto
// ModeExpectInSameLine will write in the same line.
ModeExpectInSameLine
)
type Option func(o *option)
type option struct {
prefix string
infix string
mode WriteMode
nodes []Node
rawText bool
}
type tokenNodeOption func(o *tokenNodeOpt)
type tokenNodeOpt struct {
prefix string
infix string
ignoreHeadComment bool
ignoreLeadingComment bool
}
// WriteMode is the mode of writing.
type WriteMode int
// Writer is the writer of ast.
type Writer struct {
tw *tabwriter.Writer
writer io.Writer
}
func transfer2TokenNode(node Node, isChild bool, opt ...tokenNodeOption) *TokenNode {
option := new(tokenNodeOpt)
for _, o := range opt {
o(option)
}
var copyOpt = append([]tokenNodeOption(nil), opt...)
var tn *TokenNode
switch val := node.(type) {
case *AnyDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.Any, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.Any = tn
case *ArrayDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.LBrack, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.LBrack = tn
case *BaseDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.Base, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.Base = tn
case *InterfaceDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.Interface, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.Interface = tn
case *MapDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.Map, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.Map = tn
case *PointerDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.Star, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.Star = tn
case *SliceDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.LBrack, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.LBrack = tn
case *StructDataType:
copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
tn = transferTokenNode(val.LBrace, copyOpt...)
if option.ignoreHeadComment {
tn.HeadCommentGroup = nil
}
if option.ignoreLeadingComment {
tn.LeadingCommentGroup = nil
}
val.isChild = isChild
val.LBrace = tn
default:
}
return &TokenNode{
headFlag: node.HasHeadCommentGroup(),
leadingFlag: node.HasLeadingCommentGroup(),
Token: token.Token{
Text: node.Format(option.prefix),
Position: node.Pos(),
},
LeadingCommentGroup: CommentGroup{
{
token.Token{Position: node.End()},
},
},
}
}
func transferNilInfixNode(nodes []*TokenNode, opt ...tokenNodeOption) *TokenNode {
result := &TokenNode{}
var option = new(tokenNodeOpt)
for _, o := range opt {
o(option)
}
var list []string
for _, n := range nodes {
list = append(list, n.Token.Text)
}
result.Token = token.Token{
Text: option.prefix + strings.Join(list, option.infix),
Position: nodes[0].Pos(),
}
if !option.ignoreHeadComment {
result.HeadCommentGroup = nodes[0].HeadCommentGroup
}
if !option.ignoreLeadingComment {
result.LeadingCommentGroup = nodes[len(nodes)-1].LeadingCommentGroup
}
return result
}
func transferTokenNode(node *TokenNode, opt ...tokenNodeOption) *TokenNode {
result := &TokenNode{}
var option = new(tokenNodeOpt)
for _, o := range opt {
o(option)
}
result.Token = token.Token{
Type: node.Token.Type,
Text: option.prefix + node.Token.Text,
Position: node.Token.Position,
}
if !option.ignoreHeadComment {
for _, v := range node.HeadCommentGroup {
result.HeadCommentGroup = append(result.HeadCommentGroup,
&CommentStmt{Comment: token.Token{
Type: v.Comment.Type,
Text: option.prefix + v.Comment.Text,
Position: v.Comment.Position,
}})
}
}
if !option.ignoreLeadingComment {
for _, v := range node.LeadingCommentGroup {
result.LeadingCommentGroup = append(result.LeadingCommentGroup, v)
}
}
return result
}
func ignoreHeadComment() tokenNodeOption {
return func(o *tokenNodeOpt) {
o.ignoreHeadComment = true
}
}
func ignoreLeadingComment() tokenNodeOption {
return func(o *tokenNodeOpt) {
o.ignoreLeadingComment = true
}
}
func ignoreComment() tokenNodeOption {
return func(o *tokenNodeOpt) {
o.ignoreHeadComment = true
o.ignoreLeadingComment = true
}
}
func withTokenNodePrefix(prefix ...string) tokenNodeOption {
return func(o *tokenNodeOpt) {
for _, p := range prefix {
o.prefix = p
}
}
}
func withTokenNodeInfix(infix string) tokenNodeOption {
return func(o *tokenNodeOpt) {
o.infix = infix
}
}
func expectSameLine() Option {
return func(o *option) {
o.mode = ModeExpectInSameLine
}
}
func expectIndentInfix() Option {
return func(o *option) {
o.infix = Indent
}
}
func withNode(nodes ...Node) Option {
return func(o *option) {
o.nodes = nodes
}
}
func withMode(mode WriteMode) Option {
return func(o *option) {
o.mode = mode
}
}
func withPrefix(prefix ...string) Option {
return func(o *option) {
for _, p := range prefix {
o.prefix = p
}
}
}
func withInfix(infix string) Option {
return func(o *option) {
o.infix = infix
}
}
func withRawText() Option {
return func(o *option) {
o.rawText = true
}
}
// NewWriter returns a new Writer.
func NewWriter(writer io.Writer) *Writer {
return &Writer{
tw: tabwriter.NewWriter(writer, 1, 8, 1, ' ', tabwriter.TabIndent),
writer: writer,
}
}
// NewBufferWriter returns a new buffer Writer.
func NewBufferWriter() *Writer {
writer := bytes.NewBuffer(nil)
return &Writer{
tw: tabwriter.NewWriter(writer, 1, 8, 1, ' ', tabwriter.TabIndent),
writer: writer,
}
}
// String returns the string of the buffer.
func (w *Writer) String() string {
buffer, ok := w.writer.(*bytes.Buffer)
if !ok {
return ""
}
w.Flush()
return buffer.String()
}
// Flush flushes the buffer.
func (w *Writer) Flush() {
_ = w.tw.Flush()
}
// NewLine writes a new line.
func (w *Writer) NewLine() {
_, _ = fmt.Fprint(w.tw, NewLine)
}
// Write writes the node.
func (w *Writer) Write(opts ...Option) {
if len(opts) == 0 {
return
}
var opt = new(option)
opt.mode = ModeAuto
opt.prefix = NilIndent
opt.infix = WhiteSpace
for _, v := range opts {
v(opt)
}
w.write(opt)
}
// WriteText writes the text.
func (w *Writer) WriteText(text string) {
_, _ = fmt.Fprintf(w.tw, text)
}
func (w *Writer) write(opt *option) {
if len(opt.nodes) == 0 {
return
}
var textList []string
line := opt.nodes[0].End().Line
for idx, node := range opt.nodes {
mode := opt.mode
preIdx := idx - 1
var preNodeHasLeading bool
if preIdx > -1 && preIdx < len(opt.nodes) {
preNode := opt.nodes[preIdx]
preNodeHasLeading = preNode.HasLeadingCommentGroup()
}
if node.HasHeadCommentGroup() || preNodeHasLeading {
mode = ModeAuto
}
if mode == ModeAuto && node.Pos().Line > line {
textList = append(textList, NewLine)
}
line = node.End().Line
if util.TrimWhiteSpace(node.Format()) == "" {
continue
}
textList = append(textList, node.Format(opt.prefix))
}
text := strings.Join(textList, opt.infix)
text = strings.ReplaceAll(text, " \n", "\n")
text = strings.ReplaceAll(text, "\n ", "\n")
if opt.rawText {
_, _ = fmt.Fprint(w.writer, text)
return
}
_, _ = fmt.Fprint(w.tw, text)
}

View File

@ -0,0 +1,45 @@
package format
import (
"bytes"
"io"
"io/ioutil"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/parser"
)
// File formats the api file.
func File(filename string) error {
data, err := ioutil.ReadFile(filename)
if err != nil {
return err
}
buffer := bytes.NewBuffer(nil)
if err := Source(data, buffer); err != nil {
return err
}
return ioutil.WriteFile(filename, buffer.Bytes(), 0666)
}
// Source formats the api source.
func Source(source []byte, w io.Writer) error {
p := parser.New("", source)
result := p.Parse()
if err := p.CheckErrors(); err != nil {
return err
}
result.Format(w)
return nil
}
func formatForUnitTest(source []byte, w io.Writer) error {
p := parser.New("", source)
result := p.Parse()
if err := p.CheckErrors(); err != nil {
return err
}
result.FormatForUnitTest(w)
return nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,36 @@
// server foo
@server ( // server
// key-value form
key1: value1
key2: value2
jwt: Auth // enable jwt
prefix: /v1 // the route prefix
)
// service foo
service foo { // foo
// example1
@doc "example1"
@handler example1 // handler declare
get /path/example1 // no body
@doc ( // doc group
key1: "value1"
key11: "value11"
key111: "value111"
)
@handler example2 // handler example2
get /path/example2/:id // path arg
@doc "example3"
@handler example3
get /path/example3/:id (Foo) // no response
@doc "example4"
@handler example4
post /path/example4/a-b returns (Foo) // no request
@doc "example5"
@handler example5
// example5
post /path/example5/a-b (Foo) returns (Bar) // do not comment between path and body
}

View File

@ -0,0 +1,37 @@
/*aa*/
type (
/*bb*/
T /*cc*/ { // T.bg
// Name head1
/*Name head2*/
Name string `json:"name"` // name
Age int `json:"age"` // age
Extra
Address string
Hobby []{
Name string // hobby.name
Rate string
} `json:"hobby"` // hobby
Child { // child.bg
Name string `json:"name"` // child.name
Gender int `json:"gender"` // child.gender
Birthday string `json:"birthday"` // child.birthday
Desc string // son.desc
Son { // son.bg
Name string `json:"name"` // son.name
Gender int `json:"gender"` // son.gender
Birthday string `json:"birthday"` // son.birthday
Desc string // son.desc
Hobby []{
Name string // hobby.name
Description string
// Map
Map map[string]{
Name string `json:"name"`
Age string `json:"age"`
} `json:"map"`
} `json:"hobby"` // hobby
} // son.end
} // child.end
} // T.end
)

View File

@ -0,0 +1,34 @@
/*aa*/
type /*bb*/ T /*cc*/ { // T.bg
// Name head1
/*Name head2*/
Name string `json:"name"` // name
Age int `json:"age"` // age
Extra
Address string
Hobby []{
Name string // hobby.name
Rate string
} `json:"hobby"` // hobby
Child { // child.bg
Name string `json:"name"` // child.name
Gender int `json:"gender"` // child.gender
Birthday string `json:"birthday"` // child.birthday
Desc string // son.desc
Son { // son.bg
Name string `json:"name"` // son.name
Gender int `json:"gender"` // son.gender
Birthday string `json:"birthday"` // son.birthday
Desc string // son.desc
Hobby []{
Name string // hobby.name
Description string
// Map
Map map[string]{
Name string `json:"name"`
Age string `json:"age"`
} `json:"map"`
} `json:"hobby"` // hobby
} // son.end
} // child.end
} // T.end

View File

@ -0,0 +1,154 @@
syntax = "v1"
import "example_base1.api"
import (
"example_base2.api"
)
info (
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type GetFormReq {
Name string `form:"name"`
Age int `form:"age"`
Hobbits []string `form:"hobbits"`
Any any `form:"any"`
}
type GetFormREsp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
type (
PostFormReq {
Name string `form:"name"`
Age int `form:"age"`
Hobbits []string `form:"hobbits"`
}
PostFormResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
)
type (
PostJsonReq {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
PostJsonResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
Extra map[string]string `json:"extra"`
Data interface{} `json:"data"`
}
)
type (
PostPathReq {
Id string `path:"id"`
}
PostPathResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
Extra map[string]string `json:"extra"`
Data interface{} `json:"data"`
}
)
type (
DemoOfArrayReq {
In string `json:"in"`
}
DemoOfArrayResp {
Out string `json:"out"`
}
)
type (
Nest {
Name string `json:"name"`
}
NestDemoReq {
Nest *Nest `json:"nest"`
}
NestDemoResp {
Nest []*Nest `json:"nest"`
}
)
@server (
group: form
)
service example {
@handler getForm
get /example/form (GetFormReq) returns (GetFormREsp)
@handler postForm
post /example/form (PostFormReq) returns (PostFormResp)
}
@server (
group: json
jwt: Auth
)
service example {
@doc "json demo"
@handler postJson
post /example/json (PostJsonReq) returns (PostJsonResp)
}
@server (
group: path
middleware: Path
prefix: /v1/v2
)
service example {
@doc (
desc: "path demo"
)
@handler postPath
post /example/path (PostPathReq) returns (PostPathResp)
}
@server (
group: array
prefix: /array
)
service example {
@doc (
desc: "array response demo"
)
@handler getArray
post /example/array (DemoOfArrayReq) returns ([]DemoOfArrayResp)
@doc (
desc: "array pointer response demo"
)
@handler getArrayPointer
post /example/array/pointer (DemoOfArrayReq) returns ([]*DemoOfArrayResp)
@doc (
desc: "array base response demo"
)
@handler getArrayBase
post /example/array/pointer (DemoOfArrayReq) returns ([]string)
}
service example {
@handler nestDemo
post /example/nest (NestDemoReq) returns (NestDemoResp)
}

View File

@ -0,0 +1,35 @@
// server foo
@server(// server
// key-value form
key1:value1
key2:value2
jwt:Auth // enable jwt
prefix: /v1 // the route prefix
)
// service foo
service foo{// foo
// example1
@doc "example1"
@handler example1 // handler declare
get /path/example1// no body
@doc( // doc group
key1:"value1"
key11:"value11"
key111:"value111"
)
@handler example2 // handler example2
get /path/example2/:id// path arg
@doc
"example3"
@handler
example3
get /path/example3/:id
( Foo )// no response
@doc "example4"
@handler example4
post /path/example4/a-b returns ( Foo )// no request
@doc "example5"
@handler example5
// example5
post /path/example5/a-b ( Foo ) returns ( Bar ) // do not comment between path and body
}

View File

@ -0,0 +1,34 @@
/*aa*/type (
/*bb*/T /*cc*/{// T.bg
// Name head1
/*Name head2*/Name string `json:"name"`// name
Age int `json:"age"` // age
Extra
Address string
Hobby []{
Name string // hobby.name
Rate string
} `json:"hobby"` // hobby
Child {// child.bg
Name string `json:"name"`// child.name
Gender int `json:"gender"`// child.gender
Birthday string `json:"birthday"`// child.birthday
Desc string // son.desc
Son {// son.bg
Name string `json:"name"`// son.name
Gender int `json:"gender"`// son.gender
Birthday string `json:"birthday"`// son.birthday
Desc string // son.desc
Hobby []{
Name string // hobby.name
Description string
// Map
Map map[string]{
Name string `json:"name"`
Age string `json:"age"`
}`json:"map"`
} `json:"hobby"` // hobby
}// son.end
}// child.end
}// T.end
)

View File

@ -0,0 +1,32 @@
/*aa*/type /*bb*/T /*cc*/{// T.bg
// Name head1
/*Name head2*/Name string `json:"name"`// name
Age int `json:"age"` // age
Extra
Address string
Hobby []{
Name string // hobby.name
Rate string
} `json:"hobby"` // hobby
Child {// child.bg
Name string `json:"name"`// child.name
Gender int `json:"gender"`// child.gender
Birthday string `json:"birthday"`// child.birthday
Desc string // son.desc
Son {// son.bg
Name string `json:"name"`// son.name
Gender int `json:"gender"`// son.gender
Birthday string `json:"birthday"`// son.birthday
Desc string // son.desc
Hobby []{
Name string // hobby.name
Description string
// Map
Map map[string]{
Name string `json:"name"`
Age string `json:"age"`
}`json:"map"`
} `json:"hobby"` // hobby
}// son.end
}// child.end
}// T.end

View File

@ -0,0 +1,412 @@
package parser
import (
"fmt"
"strings"
"github.com/zeromicro/go-zero/tools/goctl/api/spec"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
)
// Analyzer analyzes the ast and converts it to spec.
type Analyzer struct {
api *API
spec *spec.ApiSpec
}
func (a *Analyzer) astTypeToSpec(in ast.DataType) (spec.Type, error) {
isLiteralType := func(dt ast.DataType) bool {
_, ok := dt.(*ast.BaseDataType)
if ok {
return true
}
_, ok = dt.(*ast.AnyDataType)
return ok
}
switch v := (in).(type) {
case *ast.BaseDataType:
raw := v.RawText()
if IsBaseType(raw) {
return spec.PrimitiveType{
RawName: raw,
}, nil
}
return spec.DefineStruct{RawName: raw}, nil
case *ast.AnyDataType:
return nil, ast.SyntaxError(v.Pos(), "unsupported any type")
case *ast.StructDataType:
// TODO(keson) feature: can be extended
case *ast.InterfaceDataType:
return spec.InterfaceType{RawName: v.RawText()}, nil
case *ast.MapDataType:
if !isLiteralType(v.Key) {
return nil, ast.SyntaxError(v.Pos(), "expected literal type, got <%T>", v)
}
if !v.Key.CanEqual() {
return nil, ast.SyntaxError(v.Pos(), "map key <%T> must be equal data type", v)
}
value, err := a.astTypeToSpec(v.Value)
if err != nil {
return nil, err
}
return spec.MapType{
RawName: v.RawText(),
Key: v.RawText(),
Value: value,
}, nil
case *ast.PointerDataType:
raw := v.DataType.RawText()
if IsBaseType(raw) {
return spec.PointerType{RawName: v.RawText(), Type: spec.PrimitiveType{RawName: raw}}, nil
}
value, err := a.astTypeToSpec(v.DataType)
if err != nil {
return nil, err
}
return spec.PointerType{
RawName: v.RawText(),
Type: value,
}, nil
case *ast.ArrayDataType:
if v.Length.Token.Type == token.ELLIPSIS {
return nil, ast.SyntaxError(v.Pos(), "Array: unsupported dynamic length")
}
value, err := a.astTypeToSpec(v.DataType)
if err != nil {
return nil, err
}
return spec.ArrayType{
RawName: v.RawText(),
Value: value,
}, nil
case *ast.SliceDataType:
value, err := a.astTypeToSpec(v.DataType)
if err != nil {
return nil, err
}
return spec.ArrayType{
RawName: v.RawText(),
Value: value,
}, nil
}
return nil, ast.SyntaxError(in.Pos(), "unsupported type <%T>", in)
}
func (a *Analyzer) convert2Spec() error {
if err := a.fillTypes(); err != nil {
return err
}
return a.fillService()
}
func (a *Analyzer) convertAtDoc(atDoc ast.AtDocStmt) spec.AtDoc {
var ret spec.AtDoc
switch val := atDoc.(type) {
case *ast.AtDocLiteralStmt:
ret.Text = val.Value.Token.Text
case *ast.AtDocGroupStmt:
ret.Properties = a.convertKV(val.Values)
}
return ret
}
func (a *Analyzer) convertKV(kv []*ast.KVExpr) map[string]string {
var ret = map[string]string{}
for _, v := range kv {
key := strings.TrimSuffix(v.Key.Token.Text, ":")
ret[key] = v.Value.Token.Text
}
return ret
}
func (a *Analyzer) fieldToMember(field *ast.ElemExpr) (spec.Member, error) {
var name []string
for _, v := range field.Name {
name = append(name, v.Token.Text)
}
tp, err := a.astTypeToSpec(field.DataType)
if err != nil {
return spec.Member{}, err
}
head, leading := field.CommentGroup()
m := spec.Member{
Name: strings.Join(name, ", "),
Type: tp,
Docs: head.List(),
Comment: leading.String(),
IsInline: field.IsAnonymous(),
}
if field.Tag != nil {
m.Tag = field.Tag.Token.Text
}
return m, nil
}
func (a *Analyzer) fillRouteType(route *spec.Route) error {
if route.RequestType != nil {
switch route.RequestType.(type) {
case spec.DefineStruct:
tp, err := a.findDefinedType(route.RequestType.Name())
if err != nil {
return err
}
route.RequestType = tp
}
}
if route.ResponseType != nil {
switch route.ResponseType.(type) {
case spec.DefineStruct:
tp, err := a.findDefinedType(route.ResponseType.Name())
if err != nil {
return err
}
route.ResponseType = tp
}
}
return nil
}
func (a *Analyzer) fillService() error {
var groups []spec.Group
for _, item := range a.api.ServiceStmts {
var group spec.Group
if item.AtServerStmt != nil {
group.Annotation.Properties = a.convertKV(item.AtServerStmt.Values)
}
for _, astRoute := range item.Routes {
head, leading := astRoute.CommentGroup()
route := spec.Route{
Method: astRoute.Route.Method.Token.Text,
Path: astRoute.Route.Path.Format(""),
Doc: head.List(),
Comment: leading.List(),
}
if astRoute.AtDoc != nil {
route.AtDoc = a.convertAtDoc(astRoute.AtDoc)
}
if astRoute.AtHandler != nil {
route.AtDoc = a.convertAtDoc(astRoute.AtDoc)
route.Handler = astRoute.AtHandler.Name.Token.Text
head, leading := astRoute.AtHandler.CommentGroup()
route.HandlerDoc = head.List()
route.HandlerComment = leading.List()
}
if astRoute.Route.Request != nil && astRoute.Route.Request.Body != nil {
requestType, err := a.getType(astRoute.Route.Request)
if err != nil {
return err
}
route.RequestType = requestType
}
if astRoute.Route.Response != nil && astRoute.Route.Response.Body != nil {
responseType, err := a.getType(astRoute.Route.Response)
if err != nil {
return err
}
route.ResponseType = responseType
}
if err := a.fillRouteType(&route); err != nil {
return err
}
group.Routes = append(group.Routes, route)
name := item.Name.Format("")
if len(a.spec.Service.Name) > 0 && a.spec.Service.Name != name {
return ast.SyntaxError(item.Name.Pos(), "multiple service names defined <%s> and <%s>", name, a.spec.Service.Name)
}
a.spec.Service.Name = name
}
groups = append(groups, group)
}
a.spec.Service.Groups = groups
return nil
}
func (a *Analyzer) fillTypes() error {
for _, item := range a.api.TypeStmt {
switch v := (item).(type) {
case *ast.TypeLiteralStmt:
err := a.fillTypeExpr(v.Expr)
if err != nil {
return err
}
case *ast.TypeGroupStmt:
for _, expr := range v.ExprList {
err := a.fillTypeExpr(expr)
if err != nil {
return err
}
}
}
}
var types []spec.Type
for _, item := range a.spec.Types {
switch v := (item).(type) {
case spec.DefineStruct:
var members []spec.Member
for _, member := range v.Members {
switch v := member.Type.(type) {
case spec.DefineStruct:
tp, err := a.findDefinedType(v.RawName)
if err != nil {
return err
}
member.Type = tp
}
members = append(members, member)
}
v.Members = members
types = append(types, v)
default:
return fmt.Errorf("unknown type %+v", v)
}
}
a.spec.Types = types
return nil
}
func (a *Analyzer) fillTypeExpr(expr *ast.TypeExpr) error {
head, _ := expr.CommentGroup()
switch val := expr.DataType.(type) {
case *ast.StructDataType:
var members []spec.Member
for _, item := range val.Elements {
m, err := a.fieldToMember(item)
if err != nil {
return err
}
members = append(members, m)
}
a.spec.Types = append(a.spec.Types, spec.DefineStruct{
RawName: expr.Name.Token.Text,
Members: members,
Docs: head.List(),
})
return nil
default:
return ast.SyntaxError(expr.Pos(), "expected <struct> expr, got <%T>", expr.DataType)
}
}
func (a *Analyzer) findDefinedType(name string) (spec.Type, error) {
for _, item := range a.spec.Types {
if _, ok := item.(spec.DefineStruct); ok {
if item.Name() == name {
return item, nil
}
}
}
return nil, fmt.Errorf("type %s not defined", name)
}
func (a *Analyzer) getType(expr *ast.BodyStmt) (spec.Type, error) {
body := expr.Body
var tp spec.Type
var err error
var rawText = body.Format("")
if IsBaseType(body.Value.Token.Text) {
tp = spec.PrimitiveType{RawName: body.Value.Token.Text}
} else {
tp, err = a.findDefinedType(body.Value.Token.Text)
if err != nil {
return nil, err
}
}
if body.LBrack != nil {
if body.Star != nil {
return spec.PointerType{
RawName: rawText,
Type: tp,
}, nil
}
return spec.ArrayType{
RawName: rawText,
Value: tp,
}, nil
}
if body.Star != nil {
return spec.PointerType{
RawName: rawText,
Type: tp,
}, nil
}
return tp, nil
}
// Parse parses the given file and returns the parsed spec.
func Parse(filename string, src interface{}) (*spec.ApiSpec, error) {
p := New(filename, src)
ast := p.Parse()
if err := p.CheckErrors(); err != nil {
return nil, err
}
var importManager = make(map[string]placeholder.Type)
importManager[ast.Filename] = placeholder.PlaceHolder
api, err := convert2API(ast, importManager)
if err != nil {
return nil, err
}
var result = new(spec.ApiSpec)
analyzer := Analyzer{
api: api,
spec: result,
}
err = analyzer.convert2Spec()
if err != nil {
return nil, err
}
return result, nil
}
var kind = map[string]placeholder.Type{
"bool": placeholder.PlaceHolder,
"int": placeholder.PlaceHolder,
"int8": placeholder.PlaceHolder,
"int16": placeholder.PlaceHolder,
"int32": placeholder.PlaceHolder,
"int64": placeholder.PlaceHolder,
"uint": placeholder.PlaceHolder,
"uint8": placeholder.PlaceHolder,
"uint16": placeholder.PlaceHolder,
"uint32": placeholder.PlaceHolder,
"uint64": placeholder.PlaceHolder,
"uintptr": placeholder.PlaceHolder,
"float32": placeholder.PlaceHolder,
"float64": placeholder.PlaceHolder,
"complex64": placeholder.PlaceHolder,
"complex128": placeholder.PlaceHolder,
"string": placeholder.PlaceHolder,
"byte": placeholder.PlaceHolder,
"rune": placeholder.PlaceHolder,
"any": placeholder.PlaceHolder,
}
// IsBaseType returns true if the given type is a base type.
func IsBaseType(text string) bool {
_, ok := kind[text]
return ok
}

View File

@ -0,0 +1,45 @@
package parser
import (
"bytes"
"fmt"
"io/ioutil"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/assertx"
)
func Test_Parse(t *testing.T) {
t.Run("valid", func(t *testing.T) {
_, err := Parse("./testdata/example.api", nil)
assert.Nil(t, err)
})
t.Run("invalid", func(t *testing.T) {
data, err := ioutil.ReadFile("./testdata/invalid.api")
assert.NoError(t, err)
splits := bytes.Split(data, []byte("-----"))
var testFile []string
for idx, split := range splits {
replacer := strings.NewReplacer(" ", "", "\t", "", "\n", "", "\r", "", "\f", "")
r := replacer.Replace(string(split))
if len(r) == 0 {
continue
}
filename := filepath.Join(t.TempDir(), fmt.Sprintf("invalid%d.api", idx))
err := ioutil.WriteFile(filename, split, 0666)
assert.NoError(t, err)
testFile = append(testFile, filename)
}
for _, v := range testFile {
_, err := Parse(v, nil)
assertx.Error(t, err)
}
})
t.Run("circleImport", func(t *testing.T) {
_, err := Parse("./testdata/base.api", nil)
assertx.Error(t, err)
})
}

View File

@ -0,0 +1,311 @@
package parser
import (
"fmt"
"path/filepath"
"strings"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
)
// API is the parsed api file.
type API struct {
Filename string
Syntax *ast.SyntaxStmt
info *ast.InfoStmt // Info block does not participate in code generation.
importStmt []ast.ImportStmt // ImportStmt block does not participate in code generation.
TypeStmt []ast.TypeStmt
ServiceStmts []*ast.ServiceStmt
importManager map[string]placeholder.Type
}
func convert2API(a *ast.AST, importManager map[string]placeholder.Type) (*API, error) {
var api = new(API)
api.importManager = make(map[string]placeholder.Type)
api.Filename = a.Filename
for k, v := range importManager {
api.importManager[k] = v
}
one := a.Stmts[0]
syntax, ok := one.(*ast.SyntaxStmt)
if !ok {
return nil, ast.SyntaxError(one.Pos(), "expected syntax statement, got <%T>", one)
}
api.Syntax = syntax
for i := 1; i < len(a.Stmts); i++ {
one := a.Stmts[i]
switch val := one.(type) {
case *ast.SyntaxStmt:
return nil, ast.DuplicateStmtError(val.Pos(), "duplicate syntax statement")
case *ast.InfoStmt:
if api.info != nil {
return nil, ast.DuplicateStmtError(val.Pos(), "duplicate info statement")
}
api.info = val
case ast.ImportStmt:
api.importStmt = append(api.importStmt, val)
case ast.TypeStmt:
api.TypeStmt = append(api.TypeStmt, val)
case *ast.ServiceStmt:
api.ServiceStmts = append(api.ServiceStmts, val)
}
}
if err := api.SelfCheck(); err != nil {
return nil, err
}
return api, nil
}
func (api *API) checkImportStmt() error {
f := newFilter()
b := f.addCheckItem("import value expression")
for _, v := range api.importStmt {
switch val := v.(type) {
case *ast.ImportLiteralStmt:
b.check(val.Value)
case *ast.ImportGroupStmt:
b.check(val.Values...)
}
}
return f.error()
}
func (api *API) checkInfoStmt() error {
if api.info == nil {
return nil
}
f := newFilter()
b := f.addCheckItem("info key expression")
for _, v := range api.info.Values {
b.check(v.Key)
}
return f.error()
}
func (api *API) checkServiceStmt() error {
f := newFilter()
serviceNameChecker := f.addCheckItem("service name expression")
handlerChecker := f.addCheckItem("handler expression")
pathChecker := f.addCheckItem("path expression")
var serviceName = map[string]string{}
for _, v := range api.ServiceStmts {
name := strings.TrimSuffix(v.Name.Format(""), "-api")
if sn, ok := serviceName[name]; ok {
if sn != name {
serviceNameChecker.errorManager.add(ast.SyntaxError(v.Name.Pos(), "multiple service name"))
}
} else {
serviceName[name] = name
}
var group = api.getAtServerValue(v.AtServerStmt, "prefix")
for _, item := range v.Routes {
handlerChecker.check(item.AtHandler.Name)
path := fmt.Sprintf("[%s]:%s", group, item.Route.Format(""))
pathChecker.check(ast.NewTokenNode(token.Token{
Text: path,
Position: item.Route.Pos(),
}))
}
}
return f.error()
}
func (api *API) checkTypeStmt() error {
f := newFilter()
b := f.addCheckItem("type expression")
for _, v := range api.TypeStmt {
switch val := v.(type) {
case *ast.TypeLiteralStmt:
b.check(val.Expr.Name)
case *ast.TypeGroupStmt:
for _, expr := range val.ExprList {
b.check(expr.Name)
}
}
}
return f.error()
}
func (api *API) checkTypeDeclareContext() error {
var typeMap = map[string]placeholder.Type{}
for _, v := range api.TypeStmt {
switch tp := v.(type) {
case *ast.TypeLiteralStmt:
typeMap[tp.Expr.Name.Token.Text] = placeholder.PlaceHolder
case *ast.TypeGroupStmt:
for _, v := range tp.ExprList {
typeMap[v.Name.Token.Text] = placeholder.PlaceHolder
}
}
}
return api.checkTypeContext(typeMap)
}
func (api *API) checkTypeContext(declareContext map[string]placeholder.Type) error {
var em = newErrorManager()
for _, v := range api.TypeStmt {
switch tp := v.(type) {
case *ast.TypeLiteralStmt:
em.add(api.checkTypeExprContext(declareContext, tp.Expr.DataType))
case *ast.TypeGroupStmt:
for _, v := range tp.ExprList {
em.add(api.checkTypeExprContext(declareContext, v.DataType))
}
}
}
return em.error()
}
func (api *API) checkTypeExprContext(declareContext map[string]placeholder.Type, tp ast.DataType) error {
switch val := tp.(type) {
case *ast.ArrayDataType:
return api.checkTypeExprContext(declareContext, val.DataType)
case *ast.BaseDataType:
if IsBaseType(val.Base.Token.Text) {
return nil
}
_, ok := declareContext[val.Base.Token.Text]
if !ok {
return ast.SyntaxError(val.Base.Pos(), "unresolved type <%s>", val.Base.Token.Text)
}
return nil
case *ast.MapDataType:
var manager = newErrorManager()
manager.add(api.checkTypeExprContext(declareContext, val.Key))
manager.add(api.checkTypeExprContext(declareContext, val.Value))
return manager.error()
case *ast.PointerDataType:
return api.checkTypeExprContext(declareContext, val.DataType)
case *ast.SliceDataType:
return api.checkTypeExprContext(declareContext, val.DataType)
case *ast.StructDataType:
var manager = newErrorManager()
for _, e := range val.Elements {
manager.add(api.checkTypeExprContext(declareContext, e.DataType))
}
return manager.error()
}
return nil
}
func (api *API) getAtServerValue(atServer *ast.AtServerStmt, key string) string {
if atServer == nil {
return ""
}
for _, val := range atServer.Values {
if val.Key.Token.Text == key {
return val.Value.Token.Text
}
}
return ""
}
func (api *API) mergeAPI(in *API) error {
for k, v := range in.importManager {
api.importManager[k] = v
}
if api.Syntax.Value.Format() != in.Syntax.Value.Format() {
return ast.SyntaxError(in.Syntax.Value.Pos(),
"multiple syntax value expression, expected <%s>, got <%s>",
api.Syntax.Value.Format(),
in.Syntax.Value.Format(),
)
}
api.TypeStmt = append(api.TypeStmt, in.TypeStmt...)
api.ServiceStmts = append(api.ServiceStmts, in.ServiceStmts...)
return nil
}
func (api *API) parseImportedAPI(imports []ast.ImportStmt) ([]*API, error) {
var list []*API
if len(imports) == 0 {
return list, nil
}
var importValueSet = map[string]token.Token{}
for _, imp := range imports {
switch val := imp.(type) {
case *ast.ImportLiteralStmt:
importValueSet[strings.ReplaceAll(val.Value.Token.Text, `"`, "")] = val.Value.Token
case *ast.ImportGroupStmt:
for _, v := range val.Values {
importValueSet[strings.ReplaceAll(v.Token.Text, `"`, "")] = v.Token
}
}
}
dir := filepath.Dir(api.Filename)
for impPath, tok := range importValueSet {
if !filepath.IsAbs(impPath) {
impPath = filepath.Join(dir, impPath)
}
// import cycle check
if _, ok := api.importManager[impPath]; ok {
return nil, ast.SyntaxError(tok.Position, "import circle not allowed")
} else {
api.importManager[impPath] = placeholder.PlaceHolder
}
p := New(impPath, "")
ast := p.Parse()
if err := p.CheckErrors(); err != nil {
return nil, err
}
nestedApi, err := convert2API(ast, api.importManager)
if err != nil {
return nil, err
}
if err = nestedApi.parseReverse(); err != nil {
return nil, err
}
list = append(list, nestedApi)
if err != nil {
return nil, err
}
}
return list, nil
}
func (api *API) parseReverse() error {
list, err := api.parseImportedAPI(api.importStmt)
if err != nil {
return err
}
for _, e := range list {
if err = api.mergeAPI(e); err != nil {
return err
}
}
return nil
}
func (api *API) SelfCheck() error {
if err := api.parseReverse(); err != nil {
return err
}
if err := api.checkImportStmt(); err != nil {
return err
}
if err := api.checkInfoStmt(); err != nil {
return err
}
if err := api.checkTypeStmt(); err != nil {
return err
}
if err := api.checkServiceStmt(); err != nil {
return err
}
return api.checkTypeDeclareContext()
}

View File

@ -0,0 +1,28 @@
package parser
import (
"fmt"
"strings"
)
type errorManager struct {
errors []string
}
func newErrorManager() *errorManager {
return &errorManager{}
}
func (e *errorManager) add(err error) {
if err == nil {
return
}
e.errors = append(e.errors, err.Error())
}
func (e *errorManager) error() error {
if len(e.errors)==0{
return nil
}
return fmt.Errorf(strings.Join(e.errors, "\n"))
}

View File

@ -0,0 +1,55 @@
package parser
import (
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
)
type filterBuilder struct {
m map[string]placeholder.Type
checkExprName string
errorManager *errorManager
}
func (b *filterBuilder) check(nodes ...*ast.TokenNode) {
for _, node := range nodes {
if _, ok := b.m[node.Token.Text]; ok {
b.errorManager.add(ast.DuplicateStmtError(node.Pos(), "duplicate "+b.checkExprName))
} else {
b.m[node.Token.Text] = placeholder.PlaceHolder
}
}
}
func (b *filterBuilder) error() error {
return b.errorManager.error()
}
type filter struct {
builders []*filterBuilder
}
func newFilter() *filter {
return &filter{}
}
func (f *filter) addCheckItem(checkExprName string) *filterBuilder {
b := &filterBuilder{
m: make(map[string]placeholder.Type),
checkExprName: checkExprName,
errorManager: newErrorManager(),
}
f.builders = append(f.builders, b)
return b
}
func (f *filter) error() error {
if len(f.builders) == 0 {
return nil
}
var errorManager = newErrorManager()
for _, b := range f.builders {
errorManager.add(b.error())
}
return errorManager.error()
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,5 @@
@doc(
foo: "foo"
bar: "bar"
baz: ""
)

View File

@ -0,0 +1,3 @@
@doc ""
@doc "foo"
@doc "bar"

View File

@ -0,0 +1,3 @@
@handler foo
@handler foo1
@handler _bar

View File

@ -0,0 +1,16 @@
@server(
foo: bar
bar: baz
baz: foo
qux: /v1
quux: /v1/v2
middleware: M1,M2
timeout1: 1h
timeout2: 10m
timeout3: 10s
timeout4: 10ms
timeout5: 10µs
timeout6: 10ns
timeout7: 1h10m10s10ms10µs10ns
maxBytes: 1024
)

View File

@ -0,0 +1,11 @@
syntax = "v1"
import "base1.api"
info (
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)

View File

@ -0,0 +1,11 @@
syntax = "v1"
import "base2.api"
info (
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)

View File

@ -0,0 +1,11 @@
syntax = "v1"
import "base.api"
info (
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)

View File

@ -0,0 +1,4 @@
// foo
// bar
/*foo*/
/*bar*/ //baz

View File

@ -0,0 +1,167 @@
syntax = "v1"
import "example_base1.api"
import (
"example_base2.api"
)
info (
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type GetFormReq {
Name string `form:"name"`
Age int `form:"age"`
Hobbits []string `form:"hobbits"`
}
type GetFormREsp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
type (
PostFormReq {
Name string `form:"name"`
Age int `form:"age"`
Hobbits []string `form:"hobbits"`
}
PostFormResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
)
type (
PostJsonReq {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
}
PostJsonResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
Extra map[string]string `json:"extra"`
Data interface{} `json:"data"`
}
)
type (
PostPathReq {
Id string `path:"id"`
}
PostPathResp {
Name string `json:"name"`
Age int `json:"age"`
Hobbits []string `json:"hobbits"`
Hobbits2 [2]string `json:"hobbits2"`
Extra map[string]string `json:"extra"`
Data interface{} `json:"data"`
}
)
type (
DemoOfArrayReq {
In string `json:"in"`
}
DemoOfArrayResp {
Out string `json:"out"`
}
)
type (
Nest {
Name string `json:"name"`
}
NestDemoReq1 {
Nest *Nest `json:"nest"`
}
NestDemoResp1 {
Nest []*Nest `json:"nest"`
}
NestDemoReq2 {
*Nest
}
NestDemoResp2 {
*Nest `json:"nest"`
}
)
@server (
group: form
timeout: 3s
)
service example {
@handler getForm
get /example/form (GetFormReq) returns (GetFormREsp)
@handler postForm
post /example/form (PostFormReq) returns (PostFormResp)
}
@server (
group: json
jwt: Auth
timeout: 3m
)
service example {
@doc "json demo"
@handler postJson
post /example/json (PostJsonReq) returns (PostJsonResp)
}
@server (
group: path
middleware: Path
prefix: /v1/v2
timeout: 100ms
)
service example {
@doc (
desc: "path demo"
)
@handler postPath
post /example/path (PostPathReq) returns (PostPathResp)
}
@server (
group: array
prefix: /array
maxBytes: 1024
)
service example {
@doc (
desc: "array response demo"
)
@handler getArray
post /example/array (DemoOfArrayReq) returns ([]DemoOfArrayResp)
@doc (
desc: "array pointer response demo"
)
@handler getArrayPointer
post /example/array/pointer (DemoOfArrayReq) returns ([]*DemoOfArrayResp)
@doc (
desc: "array base response demo"
)
@handler getArrayBase
post /example/array/base (DemoOfArrayReq) returns ([]string)
}
service example {
@handler nestDemo1
post /example/nest (NestDemoReq1) returns (NestDemoResp1)
@handler nestDemo2
post /example/nest2 (NestDemoReq2) returns (NestDemoResp2)
}

View File

@ -0,0 +1,12 @@
syntax = "v1"
info(
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type BaseReq1{}
type BaseResp1{}

View File

@ -0,0 +1,12 @@
syntax = "v1"
info(
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type BaseReq2{}
type BaseResp2{}

View File

@ -0,0 +1,5 @@
import (
""
"foo"
"bar"
)

View File

@ -0,0 +1,3 @@
import ""
import "foo"
import "bar"

View File

@ -0,0 +1,7 @@
info(
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)

View File

@ -0,0 +1,136 @@
// test case: expected syntax statement
info ()
-----
// test case: duplicate syntax statement
syntax = "v1"
syntax = "v1"
-----
// test case: duplicate info statement
syntax = "v1"
info()
info()
-----
// test case: duplicate type
syntax = "v1"
type Foo{}
type Foo{}
-----
// test case: duplicate type
syntax = "v1"
type Baz{}
type (
Baz{}
)
-----
// test case: multiple service name
syntax = "v1"
service foo{
@handler foo
get /foo
}
service bar{
@handler foo
get /foo
}
-----
// test case: duplicate handler
syntax = "v1"
service foo{
@handler foo
get /foo
@handler foo
get /bar
}
-----
// test case: duplicate path
syntax = "v1"
service foo{
@handler foo
get /foo
@handler bar
get /foo
@handler qux
get /v1/baz
}
@server(
prefix: /v1
)
service foo{
@handler qux
get /baz
@handler quux
get /baz
}
-----
// test case: type declare context
syntax = "v1"
type Foo {
Bar Bar `json:"bar"`
}
-----
// test case: map key expected literal type
syntax = "v1"
type Foo {
Bar map[[]int]string `json:"bar"`
}
-----
// test case: map key expected literal type
syntax = "v1"
type Foo {
Bar map[[]int]string `json:"bar"`
}
-----
// test case: map key expected literal type
syntax = "v1"
type Foo {
Bar *map[[]int]string `json:"bar"`
}
-----
// test case: map valu expected literal type
syntax = "v1"
type Foo {
Bar *map[string]{} `json:"bar"`
}
-----
// test case: invalid slice
syntax = "v1"
type Foo {
Bar []map[[]int]string `json:"bar"`
}
-----
// test case: array
syntax = "v1"
type Foo {
Bar [...]int `json:"bar"`
}
-----
// test case: any
syntax = "v1"
type Foo {
Bar any `json:"bar"`
}
-----
// test case: unresolved type
syntax = "v1"
service example {
@handler nestDemo
post /example/nest (NestDemoReq) returns (NestDemoResp)
}

View File

@ -0,0 +1,37 @@
service foo {
@handler bar
get /ping
@handler bar
get /ping;
}
service bar {
@doc "bar"
@handler foo
get /foo/:bar (Foo)
@doc "bar"
@handler foo
get /foo/:bar (Foo) returns ();
@handler foo
get /foo/:bar returns (Foo)
@handler foo
get /foo/:bar () returns (Foo);
}
service baz-api {
@handler foo
post /foo/:bar/foo-bar-baz (Foo) returns (*Bar)
@handler foo
post /foo/:bar/foo-bar-baz (Foo) returns (*Bar);
@handler bar
post /foo ([]Foo) returns ([]*Bar)
@handler bar
post /foo ([]Foo) returns ([]*Bar);
}

View File

@ -0,0 +1,160 @@
// aaaa
/*bb*/ syntax /*cc*/ = /*dd*/ "v1" /*syntax doc*/ // syntax stmt
// bbb
info ( // info stmt
title: "type title here" // title expr
/*ee*/
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type AliasInt int
type AliasString = string
type AliasArray [2]int8
type AliasArray2 [...]int8
type AliasSlice []int8
type AliasMap map[string]int
type Any interface{}
type AliasMapKeyStruct map[{
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
}
}
}
}]int
type AliasMapValueStruct map[string]{
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
}
}
}
}
type Foo {
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
Bar {
Name string `json:"name"`
Age int `json:"age"`
}
}
}
}
type Bar {
Base int `json:"base"`
Array1 [2]int `json:"array1"`
Array2 [...]int `json:"array2"`
Slice []int `json:"slice"`
Map1 map[string]int `json:"map1"`
Map2 map[string]*int `json:"map2"`
Map3 map[string][]int `json:"map3"`
Map4 map[string][]*int `json:"map4"`
Map5 map[string][2]int `json:"map5"`
Map6 map[string][...]int `json:"map6"`
Interface interface{} `json:"interface"`
Any any `json:"any"`
Foo Foo `json:"foo"`
Baz {
F1 int `json:"f1"`
F2 int `json:"f2"`
} `json:"baz"`
Qux *string `json:"qux"`
Quux bool `json:"quux"`
}
type (
GroupAliasInt int
GroupAliasString = string
GroupAliasArray [2]int8
GroupAliasArray2 [...]int8
GroupAliasSlice []int8
GroupAliasMap map[string]int
GroupAny interface{}
GroupFoo {}
GroupBar {
Base int `json:"base"`
Array1 [2]int `json:"array1"`
Array2 [...]int `json:"array2"`
Slice []int `json:"slice"`
Map1 map[string]int `json:"map1"`
Map2 map[string]*int `json:"map2"`
Map3 map[string][]int `json:"map3"`
Map4 map[string][]*int `json:"map4"`
Map5 map[string][2]int `json:"map5"`
Map6 map[string][...]int `json:"map6"`
Interface interface{} `json:"interface"`
Any any `json:"any"`
Foo Foo `json:"foo"`
Baz {
F1 int `json:"f1"`
F2 int `json:"f2"`
} `json:"baz"`
Qux *string `json:"qux"`
Quux bool `json:"quux"`
}
)
@server ()
service test {
@handler foo
get /test/foo
}
@server (
jwt: Auth
group: Group1
)
service test {
@doc "ping"
@handler foo
get /test/foo
@doc (
key1: "value1"
key2: "value2"
)
@handler bar
get /test/foo (Foo)
@handler baz
post /test/foo/baz returns (Bar)
@handler qux
post /test/foo/baz/:qux (Foo) returns (Bar)
@handler quux
post /test/foo/baz/:qux/qu-ux (Foo) returns (Bar)
@handler foobar
post /foo/bar (*Foo) returns ([]Bar)
@handler barbaz
post /bar/baz ([]*Foo) returns ([]int)
}
// terminal
// terminal2
/*
kkk
*/

View File

@ -0,0 +1,50 @@
// format api demo
syntax ="v1" // dd
info()
info(foo:"")
info(foo:""
bar: ""
quux: "")
info(foo:""
bar: ""
quux: ""
)
// info statement
// info statement
info (// Info bloack
title: "type title here" // title comment
desc: "type desc here"
author: "type author here"
/*aaa*/
/*
bbb
*/
email: "type email here" // eamil comment
/*aaa*/version:/*bbb*/ "type version here"// version comment
)
import ""
import "aaa"
import"bb"
import "cc"
import()
import(
)
import (
)
import ("aa")
import ("aa" "bb")
import ("aa"
"bb"
)
import ("aa"
"bb")
import (
"aa"
"bb"
)

View File

@ -0,0 +1,7 @@
package placeholder
// Type is the placeholder type.
type Type struct{}
// PlaceHolder is the placeholder.
var PlaceHolder Type

View File

@ -0,0 +1,667 @@
package scanner
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"log"
"path/filepath"
"strings"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
)
const (
initMode mode = iota
// document mode bg
documentHalfOpen
documentOpen
documentHalfClose
documentClose
// document mode end
// string mode bg
stringOpen
stringClose
// string mode end
)
var missingInput = errors.New("missing input")
type mode int
// Scanner is a lexical scanner.
type Scanner struct {
filename string
size int
data []rune
position int // current position in input (points to current char)
readPosition int // current reading position in input (after current char)
ch rune
lines []int
}
// NextToken returns the next token.
func (s *Scanner) NextToken() (token.Token, error) {
s.skipWhiteSpace()
switch s.ch {
case '/':
peekOne := s.peekRune()
switch peekOne {
case '/':
return s.scanLineComment(), nil
case '*':
return s.scanDocument()
default:
return s.newToken(token.QUO), nil
}
case '-':
return s.newToken(token.SUB), nil
case '*':
return s.newToken(token.MUL), nil
case '(':
return s.newToken(token.LPAREN), nil
case '[':
return s.newToken(token.LBRACK), nil
case '{':
return s.newToken(token.LBRACE), nil
case ',':
return s.newToken(token.COMMA), nil
case '.':
position := s.position
peekOne := s.peekRune()
if peekOne != '.' {
return s.newToken(token.DOT), nil
}
s.readRune()
peekOne = s.peekRune()
if peekOne != '.' {
return s.newToken(token.DOT), nil
}
s.readRune()
s.readRune()
return token.Token{
Type: token.ELLIPSIS,
Text: "...",
Position: s.newPosition(position),
}, nil
case ')':
return s.newToken(token.RPAREN), nil
case ']':
return s.newToken(token.RBRACK), nil
case '}':
return s.newToken(token.RBRACE), nil
case ';':
return s.newToken(token.SEMICOLON), nil
case ':':
return s.newToken(token.COLON), nil
case '=':
return s.newToken(token.ASSIGN), nil
case '@':
return s.scanAt()
case '"':
return s.scanString('"', token.STRING)
case '`':
return s.scanString('`', token.RAW_STRING)
case 0:
return token.EofToken, nil
default:
if s.isIdentifierLetter(s.ch) {
return s.scanIdent(), nil
}
if s.isDigit(s.ch) {
return s.scanIntOrDuration(), nil
}
tok := token.NewIllegalToken(s.ch, s.newPosition(s.position))
s.readRune()
return tok, nil
}
}
func (s *Scanner) newToken(tp token.Type) token.Token {
tok := token.Token{
Type: tp,
Text: string(s.ch),
Position: s.positionAt(),
}
s.readRune()
return tok
}
func (s *Scanner) readRune() {
if s.readPosition >= s.size {
s.ch = 0
} else {
s.ch = s.data[s.readPosition]
}
s.position = s.readPosition
s.readPosition += 1
}
func (s *Scanner) peekRune() rune {
if s.readPosition >= s.size {
return 0
}
return s.data[s.readPosition]
}
func (s *Scanner) scanString(delim rune, tp token.Type) (token.Token, error) {
position := s.position
var stringMode = initMode
for {
switch s.ch {
case delim:
switch stringMode {
case initMode:
stringMode = stringOpen
case stringOpen:
stringMode = stringClose
s.readRune()
return token.Token{
Type: tp,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}, nil
}
case 0:
switch stringMode {
case initMode: // assert: dead code
return token.ErrorToken, s.assertExpected(token.EOF, tp)
case stringOpen:
return token.ErrorToken, s.assertExpectedString(token.EOF.String(), string(delim))
case stringClose: // assert: dead code
return token.Token{
Type: tp,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}, nil
}
}
s.readRune()
}
}
func (s *Scanner) scanAt() (token.Token, error) {
position := s.position
peek := s.peekRune()
if !s.isLetter(peek) {
if peek == 0 {
return token.NewIllegalToken(s.ch, s.positionAt()), nil
}
return token.ErrorToken, s.assertExpectedString(string(peek), token.IDENT.String())
}
s.readRune()
letters := s.scanLetterSet()
switch letters {
case "handler":
return token.Token{
Type: token.AT_HANDLER,
Text: "@handler",
Position: s.newPosition(position),
}, nil
case "server":
return token.Token{
Type: token.AT_SERVER,
Text: "@server",
Position: s.newPosition(position),
}, nil
case "doc":
return token.Token{
Type: token.AT_DOC,
Text: "@doc",
Position: s.newPosition(position),
}, nil
default:
return token.ErrorToken, s.assertExpectedString(
"@"+letters,
token.AT_DOC.String(),
token.AT_HANDLER.String(),
token.AT_SERVER.String())
}
}
func (s *Scanner) scanIntOrDuration() token.Token {
position := s.position
for s.isDigit(s.ch) {
s.readRune()
}
switch s.ch {
case 'n', 'µ', 'm', 's', 'h':
return s.scanDuration(position)
default:
return token.Token{
Type: token.INT,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
}
}
// scanDuration scans a duration literal, for example "1ns", "1µs", "1ms", "1s", "1m", "1h".
func (s *Scanner) scanDuration(bgPos int) token.Token {
switch s.ch {
case 'n':
return s.scanNanosecond(bgPos)
case 'µ':
return s.scanMicrosecond(bgPos)
case 'm':
return s.scanMillisecondOrMinute(bgPos)
case 's':
return s.scanSecond(bgPos)
case 'h':
return s.scanHour(bgPos)
default:
return s.illegalToken()
}
}
func (s *Scanner) scanNanosecond(bgPos int) token.Token {
s.readRune()
if s.ch != 's' {
return s.illegalToken()
}
s.readRune()
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
func (s *Scanner) scanMicrosecond(bgPos int) token.Token {
s.readRune()
if s.ch != 's' {
return s.illegalToken()
}
s.readRune()
if !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
for s.isDigit(s.ch) {
s.readRune()
}
if s.ch != 'n' {
return s.illegalToken()
}
return s.scanNanosecond(bgPos)
}
func (s *Scanner) scanMillisecondOrMinute(bgPos int) token.Token {
s.readRune()
if s.ch != 's' { // minute
if s.ch == 0 || !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
return s.scanMinute(bgPos)
}
return s.scanMillisecond(bgPos)
}
func (s *Scanner) scanMillisecond(bgPos int) token.Token {
s.readRune()
if !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
for s.isDigit(s.ch) {
s.readRune()
}
switch s.ch {
case 'n':
return s.scanNanosecond(bgPos)
case 'µ':
return s.scanMicrosecond(bgPos)
default:
return s.illegalToken()
}
}
func (s *Scanner) scanSecond(bgPos int) token.Token {
s.readRune()
if !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
for s.isDigit(s.ch) {
s.readRune()
}
switch s.ch {
case 'n':
return s.scanNanosecond(bgPos)
case 'µ':
return s.scanMicrosecond(bgPos)
case 'm':
s.readRune()
if s.ch != 's' {
return s.illegalToken()
}
return s.scanMillisecond(bgPos)
default:
return s.illegalToken()
}
}
func (s *Scanner) scanMinute(bgPos int) token.Token {
if !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
for s.isDigit(s.ch) {
s.readRune()
}
switch s.ch {
case 'n':
return s.scanNanosecond(bgPos)
case 'µ':
return s.scanMicrosecond(bgPos)
case 'm':
s.readRune()
if s.ch != 's' {
return s.illegalToken()
}
return s.scanMillisecond(bgPos)
case 's':
return s.scanSecond(bgPos)
default:
return s.illegalToken()
}
}
func (s *Scanner) scanHour(bgPos int) token.Token {
s.readRune()
if !s.isDigit(s.ch) {
return token.Token{
Type: token.DURATION,
Text: string(s.data[bgPos:s.position]),
Position: s.newPosition(bgPos),
}
}
for s.isDigit(s.ch) {
s.readRune()
}
switch s.ch {
case 'n':
return s.scanNanosecond(bgPos)
case 'µ':
return s.scanMicrosecond(bgPos)
case 'm':
return s.scanMillisecondOrMinute(bgPos)
case 's':
return s.scanSecond(bgPos)
default:
return s.illegalToken()
}
}
func (s *Scanner) illegalToken() token.Token {
tok := token.NewIllegalToken(s.ch, s.newPosition(s.position))
s.readRune()
return tok
}
func (s *Scanner) scanIdent() token.Token {
position := s.position
for s.isIdentifierLetter(s.ch) || s.isDigit(s.ch) {
s.readRune()
}
ident := string(s.data[position:s.position])
if s.ch == ':' {
s.readRune()
return token.Token{
Type: token.KEY,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
}
if ident == "interface" && s.ch == '{' && s.peekRune() == '}' {
s.readRune()
s.readRune()
return token.Token{
Type: token.ANY,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
}
return token.Token{
Type: token.IDENT,
Text: ident,
Position: s.newPosition(position),
}
}
func (s *Scanner) scanLetterSet() string {
position := s.position
for s.isLetter(s.ch) {
s.readRune()
}
return string(s.data[position:s.position])
}
func (s *Scanner) scanLineComment() token.Token {
position := s.position
for s.ch != '\n' && s.ch != 0 {
s.readRune()
}
return token.Token{
Type: token.COMMENT,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
}
func (s *Scanner) scanDocument() (token.Token, error) {
position := s.position
var documentMode = initMode
for {
switch s.ch {
case '*':
switch documentMode {
case documentHalfOpen:
documentMode = documentOpen // /*
case documentOpen, documentHalfClose:
documentMode = documentHalfClose // (?m)\/\*\*+
}
case 0:
switch documentMode {
case initMode, documentHalfOpen: // assert: dead code
return token.ErrorToken, s.assertExpected(token.EOF, token.MUL)
case documentOpen:
return token.ErrorToken, s.assertExpected(token.EOF, token.MUL)
case documentHalfClose:
return token.ErrorToken, s.assertExpected(token.EOF, token.QUO)
}
case '/':
switch documentMode {
case initMode: // /
documentMode = documentHalfOpen
case documentHalfOpen: // assert: dead code
return token.ErrorToken, s.assertExpected(token.QUO, token.MUL)
case documentHalfClose:
documentMode = documentClose // /*\*+*/
s.readRune()
tok := token.Token{
Type: token.DOCUMENT,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
return tok, nil
}
}
s.readRune()
}
}
func (s *Scanner) assertExpected(actual token.Type, expected ...token.Type) error {
var expects []string
for _, v := range expected {
expects = append(expects, fmt.Sprintf("'%s'", v.String()))
}
text := fmt.Sprint(s.positionAt().String(), " ", fmt.Sprintf(
"expected %s, got '%s'",
strings.Join(expects, " | "),
actual.String(),
))
return errors.New(text)
}
func (s *Scanner) assertExpectedString(actual string, expected ...string) error {
var expects []string
for _, v := range expected {
expects = append(expects, fmt.Sprintf("'%s'", v))
}
text := fmt.Sprint(s.positionAt().String(), " ", fmt.Sprintf(
"expected %s, got '%s'",
strings.Join(expects, " | "),
actual,
))
return errors.New(text)
}
func (s *Scanner) positionAt() token.Position {
return s.newPosition(s.position)
}
func (s *Scanner) newPosition(position int) token.Position {
line := s.lineCount()
return token.Position{
Filename: s.filename,
Line: line,
Column: position - s.lines[line-1],
}
}
func (s *Scanner) lineCount() int {
return len(s.lines)
}
func (s *Scanner) skipWhiteSpace() {
for s.isWhiteSpace(s.ch) {
s.readRune()
}
}
func (s *Scanner) isDigit(b rune) bool {
return b >= '0' && b <= '9'
}
func (s *Scanner) isLetter(b rune) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')
}
func (s *Scanner) isIdentifierLetter(b rune) bool {
if s.isLetter(b) {
return true
}
return b == '_'
}
func (s *Scanner) isWhiteSpace(b rune) bool {
if b == '\n' {
s.lines = append(s.lines, s.position)
}
return b == ' ' || b == '\t' || b == '\r' || b == '\f' || b == '\v' || b == '\n'
}
// MustNewScanner returns a new scanner for the given filename and data.
func MustNewScanner(filename string, src interface{}) *Scanner {
sc, err := NewScanner(filename, src)
if err != nil {
log.Fatalln(err)
}
return sc
}
// NewScanner returns a new scanner for the given filename and data.
func NewScanner(filename string, src interface{}) (*Scanner, error) {
data, err := readData(filename, src)
if err != nil {
return nil, err
}
if len(data) == 0 {
return nil, missingInput
}
var runeList []rune
for _, r := range string(data) {
runeList = append(runeList, r)
}
filename = filepath.Base(filename)
s := &Scanner{
filename: filename,
size: len(runeList),
data: runeList,
lines: []int{-1},
readPosition: 0,
}
s.readRune()
return s, nil
}
func readData(filename string, src interface{}) ([]byte, error) {
data, err := ioutil.ReadFile(filename)
if err == nil {
return data, nil
}
switch v := src.(type) {
case []byte:
data = append(data, v...)
case *bytes.Buffer:
data = v.Bytes()
case string:
data = []byte(v)
default:
return nil, fmt.Errorf("unsupported type: %T", src)
}
return data, nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,28 @@
syntax = "v1"
info(
title: "type title here"
desc: "type desc here"
author: "type author here"
email: "type email here"
version: "type version here"
)
type request {
// TODO: add members here and delete this comment
}
type response {
// TODO: add members here and delete this comment
}
@server(
jwt: Auth
group: template
)
service template {
@doc "foo" /*foo*/
@handler handlerName // TODO: replace handler name and delete this comment
get /users/id/:userId (request) returns (response)
}

View File

@ -0,0 +1,21 @@
package token
import "fmt"
// IllegalPosition is a position that is not valid.
var IllegalPosition = Position{}
// Position represents a rune position in the source code.
type Position struct {
Filename string
Line int
Column int
}
// String returns a string representation of the position.
func (p Position) String() string {
if len(p.Filename) == 0 {
return fmt.Sprint(p.Line, ":", p.Column)
}
return fmt.Sprint(p.Filename, " ", p.Line, ":", p.Column)
}

View File

@ -0,0 +1,357 @@
package token
import (
"fmt"
"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
"github.com/zeromicro/go-zero/tools/goctl/util"
)
const (
Syntax = "syntax"
Info = "info"
Service = "service"
Returns = "returns"
Any = "any"
TypeKeyword = "type"
MapKeyword = "map"
ImportKeyword = "import"
)
// Type is the type of token.
type Type int
// EofToken is the end of file token.
var EofToken = Token{Type: EOF}
// ErrorToken is the error token.
var ErrorToken = Token{Type: error}
// Token is the token of a rune.
type Token struct {
Type Type
Text string
Position Position
}
// Fork forks token for a given Type.
func (t Token) Fork(tp Type) Token {
return Token{
Type: tp,
Text: t.Text,
Position: t.Position,
}
}
// IsEmptyString returns true if the token is empty string.
func (t Token) IsEmptyString() bool {
if t.Type != STRING && t.Type != RAW_STRING {
return false
}
text := util.TrimWhiteSpace(t.Text)
return text == `""` || text == "``"
}
// IsComment returns true if the token is comment.
func (t Token) IsComment() bool {
return t.IsType(COMMENT)
}
// IsDocument returns true if the token is document.
func (t Token) IsDocument() bool {
return t.IsType(DOCUMENT)
}
// IsType returns true if the token is the given type.
func (t Token) IsType(tp Type) bool {
return t.Type == tp
}
// Line returns the line number of the token.
func (t Token) Line() int {
return t.Position.Line
}
// String returns the string of the token.
func (t Token) String() string {
if t == ErrorToken {
return t.Type.String()
}
return fmt.Sprintf("%s %s %s", t.Position.String(), t.Type.String(), t.Text)
}
// Valid returns true if the token is valid.
func (t Token) Valid() bool {
return t.Type != token_bg
}
// IsKeyword returns true if the token is keyword.
func (t Token) IsKeyword() bool {
return golang_keyword_beg < t.Type && t.Type < golang_keyword_end
}
// IsBaseType returns true if the token is base type.
func (t Token) IsBaseType() bool {
_, ok := baseDataType[t.Text]
return ok
}
// IsHttpMethod returns true if the token is http method.
func (t Token) IsHttpMethod() bool {
_, ok := httpMethod[t.Text]
return ok
}
// Is returns true if the token text is one of the given list.
func (t Token) Is(text ...string) bool {
for _, v := range text {
if t.Text == v {
return true
}
}
return false
}
const (
token_bg Type = iota
error
ILLEGAL
EOF
COMMENT
DOCUMENT
literal_beg
IDENT // main
INT // 123
DURATION // 3s,3ms
STRING // "abc"
RAW_STRING // `abc`
PATH // `abc`
KEY // `abc:`
literal_end
operator_beg
SUB // -
MUL // *
QUO // /
ASSIGN // =
LPAREN // (
LBRACK // [
LBRACE // {
COMMA // ,
DOT // .
RPAREN // )
RBRACE // }
RBRACK // ]
SEMICOLON // ;
COLON // :
ELLIPSIS
operator_end
golang_keyword_beg
BREAK
CASE
CHAN
CONST
CONTINUE
DEFAULT
DEFER
ELSE
FALLTHROUGH
FOR
FUNC
GO
GOTO
IF
IMPORT
INTERFACE
MAP
PACKAGE
RANGE
RETURN
SELECT
STRUCT
SWITCH
TYPE
VAR
golang_keyword_end
api_keyword_bg
AT_DOC
AT_HANDLER
AT_SERVER
ANY
api_keyword_end
token_end
)
// String returns the string of the token type.
func (t Type) String() string {
if t >= token_bg && t < token_end {
return tokens[t]
}
return ""
}
var tokens = [...]string{
ILLEGAL: "ILLEGAL",
EOF: "EOF",
COMMENT: "COMMENT",
DOCUMENT: "DOCUMENT",
IDENT: "IDENT",
INT: "INT",
DURATION: "DURATION",
STRING: "STRING",
RAW_STRING: "RAW_STRING",
PATH: "PATH",
KEY: "KEY",
SUB: "-",
MUL: "*",
QUO: "/",
ASSIGN: "=",
LPAREN: "(",
LBRACK: "[",
LBRACE: "{",
COMMA: ",",
DOT: ".",
RPAREN: ")",
RBRACK: "]",
RBRACE: "}",
SEMICOLON: ";",
COLON: ":",
ELLIPSIS: "...",
BREAK: "break",
CASE: "case",
CHAN: "chan",
CONST: "const",
CONTINUE: "continue",
DEFAULT: "default",
DEFER: "defer",
ELSE: "else",
FALLTHROUGH: "fallthrough",
FOR: "for",
FUNC: "func",
GO: "go",
GOTO: "goto",
IF: "if",
IMPORT: "import",
INTERFACE: "interface",
MAP: "map",
PACKAGE: "package",
RANGE: "range",
RETURN: "return",
SELECT: "select",
STRUCT: "struct",
SWITCH: "switch",
TYPE: "type",
VAR: "var",
AT_DOC: "@doc",
AT_HANDLER: "@handler",
AT_SERVER: "@server",
ANY: "interface{}",
}
// HttpMethods returns the http methods.
var HttpMethods = []interface{}{"get", "head", "post", "put", "patch", "delete", "connect", "options", "trace"}
var httpMethod = map[string]placeholder.Type{
"get": placeholder.PlaceHolder,
"head": placeholder.PlaceHolder,
"post": placeholder.PlaceHolder,
"put": placeholder.PlaceHolder,
"patch": placeholder.PlaceHolder,
"delete": placeholder.PlaceHolder,
"connect": placeholder.PlaceHolder,
"options": placeholder.PlaceHolder,
"trace": placeholder.PlaceHolder,
}
var keywords = map[string]Type{
// golang_keyword_bg
"break": BREAK,
"case": CASE,
"chan": CHAN,
"const": CONST,
"continue": CONTINUE,
"default": DEFAULT,
"defer": DEFER,
"else": ELSE,
"fallthrough": FALLTHROUGH,
"for": FOR,
"func": FUNC,
"go": GO,
"goto": GOTO,
"if": IF,
"import": IMPORT,
"interface": INTERFACE,
"map": MAP,
"package": PACKAGE,
"range": RANGE,
"return": RETURN,
"select": SELECT,
"struct": STRUCT,
"switch": SWITCH,
"type": TYPE,
"var": VAR,
// golang_keyword_end
}
var baseDataType = map[string]placeholder.Type{
"bool": placeholder.PlaceHolder,
"uint8": placeholder.PlaceHolder,
"uint16": placeholder.PlaceHolder,
"uint32": placeholder.PlaceHolder,
"uint64": placeholder.PlaceHolder,
"int8": placeholder.PlaceHolder,
"int16": placeholder.PlaceHolder,
"int32": placeholder.PlaceHolder,
"int64": placeholder.PlaceHolder,
"float32": placeholder.PlaceHolder,
"float64": placeholder.PlaceHolder,
"complex64": placeholder.PlaceHolder,
"complex128": placeholder.PlaceHolder,
"string": placeholder.PlaceHolder,
"int": placeholder.PlaceHolder,
"uint": placeholder.PlaceHolder,
"uintptr": placeholder.PlaceHolder,
"byte": placeholder.PlaceHolder,
"rune": placeholder.PlaceHolder,
"any": placeholder.PlaceHolder,
}
// LookupKeyword returns the keyword type if the given ident is keyword.
func LookupKeyword(ident string) (Type, bool) {
tp, ok := keywords[ident]
return tp, ok
}
// NewIllegalToken returns a new illegal token.
func NewIllegalToken(b rune, pos Position) Token {
return Token{
Type: ILLEGAL,
Text: string(b),
Position: pos,
}
}

View File

@ -111,3 +111,13 @@ func isGolangKeyword(s string) bool {
_, ok := goKeyword[s]
return ok
}
func TrimWhiteSpace(s string) string {
r := strings.NewReplacer(" ", "", "\t", "", "\n", "", "\f", "", "\r", "")
return r.Replace(s)
}
func IsEmptyStringOrWhiteSpace(s string) bool {
v := TrimWhiteSpace(s)
return len(v) == 0
}