fix syntax of the key expression (#4586)

This commit is contained in:
kesonan 2025-01-18 23:46:24 +08:00 committed by GitHub
parent 3d931d7030
commit b650c8c425
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 127 additions and 69 deletions

View File

@ -46,7 +46,8 @@ func (i *InfoStmt) Format(prefix ...string) string {
w.Write(withNode(infoNode, i.LParen))
w.NewLine()
for _, v := range i.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
node := transferNilInfixNode([]*TokenNode{v.Key, v.Colon})
node = transferTokenNode(node, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}

View File

@ -6,6 +6,8 @@ import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
type KVExpr struct {
// Key is the key of the key value expression.
Key *TokenNode
// Colon is the colon of the key value expression.
Colon *TokenNode
// Value is the value of the key value expression.
Value *TokenNode
}
@ -24,7 +26,8 @@ func (i *KVExpr) CommentGroup() (head, leading CommentGroup) {
func (i *KVExpr) Format(prefix ...string) string {
w := NewBufferWriter()
w.Write(withNode(i.Key, i.Value), withPrefix(prefix...), withInfix(Indent), withRawText())
node := transferNilInfixNode([]*TokenNode{i.Key, i.Colon})
w.Write(withNode(node, i.Value), withPrefix(prefix...), withInfix(Indent), withRawText())
return w.String()
}

View File

@ -46,7 +46,8 @@ func (a *AtServerStmt) Format(prefix ...string) string {
w.Write(withNode(atServerNode, a.LParen), expectSameLine())
w.NewLine()
for _, v := range a.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
node := transferNilInfixNode([]*TokenNode{v.Key, v.Colon})
node = transferTokenNode(node, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}
@ -148,7 +149,8 @@ func (a *AtDocGroupStmt) Format(prefix ...string) string {
w.Write(withNode(atDocNode, a.LParen), expectSameLine())
w.NewLine()
for _, v := range a.Values {
node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
node := transferNilInfixNode([]*TokenNode{v.Key, v.Colon})
node = transferTokenNode(node, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
w.NewLine()
}

View File

@ -545,7 +545,7 @@ func (p *Parser) parseAtDocGroupStmt() ast.AtDocStmt {
}
stmt.Values = append(stmt.Values, expr)
if p.notExpectPeekToken(token.RPAREN, token.KEY) {
if p.notExpectPeekToken(token.RPAREN, token.IDENT) {
return nil
}
}
@ -605,7 +605,7 @@ func (p *Parser) parseAtServerStmt() *ast.AtServerStmt {
}
stmt.Values = append(stmt.Values, expr)
if p.notExpectPeekToken(token.RPAREN, token.KEY) {
if p.notExpectPeekToken(token.RPAREN, token.IDENT) {
return nil
}
}
@ -1115,7 +1115,7 @@ func (p *Parser) parseInfoStmt() *ast.InfoStmt {
}
stmt.Values = append(stmt.Values, expr)
if p.notExpectPeekToken(token.RPAREN, token.KEY) {
if p.notExpectPeekToken(token.RPAREN, token.IDENT) {
return nil
}
}
@ -1134,12 +1134,17 @@ func (p *Parser) parseAtServerKVExpression() *ast.KVExpr {
var expr = &ast.KVExpr{}
// token IDENT
if !p.advanceIfPeekTokenIs(token.KEY, token.RPAREN) {
if !p.advanceIfPeekTokenIs(token.IDENT, token.RPAREN) {
return nil
}
expr.Key = p.curTokenNode()
if !p.advanceIfPeekTokenIs(token.COLON) {
return nil
}
expr.Colon = p.curTokenNode()
var valueTok token.Token
var leadingCommentGroup ast.CommentGroup
if p.notExpectPeekToken(token.QUO, token.DURATION, token.IDENT, token.INT, token.STRING) {
@ -1324,12 +1329,18 @@ func (p *Parser) parseKVExpression() *ast.KVExpr {
var expr = &ast.KVExpr{}
// token IDENT
if !p.advanceIfPeekTokenIs(token.KEY) {
if !p.advanceIfPeekTokenIs(token.IDENT) {
return nil
}
expr.Key = p.curTokenNode()
// token COLON
if !p.advanceIfPeekTokenIs(token.COLON) {
return nil
}
expr.Colon = p.curTokenNode()
// token STRING
if !p.advanceIfPeekTokenIs(token.STRING) {
return nil

View File

@ -125,11 +125,11 @@ var infoTestAPI string
func TestParser_Parse_infoStmt(t *testing.T) {
t.Run("valid", func(t *testing.T) {
expected := map[string]string{
"title:": `"type title here"`,
"desc:": `"type desc here"`,
"author:": `"type author here"`,
"email:": `"type email here"`,
"version:": `"type version here"`,
"title": `"type title here"`,
"desc": `"type desc here"`,
"author": `"type author here"`,
"email": `"type email here"`,
"version": `"type version here"`,
}
p := New("foo.api", infoTestAPI)
result := p.Parse()
@ -285,27 +285,27 @@ var atServerTestAPI string
func TestParser_Parse_atServerStmt(t *testing.T) {
t.Run("valid", func(t *testing.T) {
var expectedData = map[string]string{
"foo:": `bar`,
"bar:": `baz`,
"baz:": `foo`,
"qux:": `/v1`,
"quux:": `/v1/v2`,
"middleware:": `M1,M2`,
"timeout1:": "1h",
"timeout2:": "10m",
"timeout3:": "10s",
"timeout4:": "10ms",
"timeout5:": "10µs",
"timeout6:": "10ns",
"timeout7:": "1h10m10s10ms10µs10ns",
"maxBytes:": `1024`,
"prefix:": "/v1",
"prefix1:": "/v1/v2_test/v2-beta",
"prefix2:": "v1/v2_test/v2-beta",
"prefix3:": "v1/v2_",
"prefix4:": "a-b-c",
"summary:": `"test"`,
"key:": `"bar"`,
"foo": `bar`,
"bar": `baz`,
"baz": `foo`,
"qux": `/v1`,
"quux": `/v1/v2`,
"middleware": `M1,M2`,
"timeout1": "1h",
"timeout2": "10m",
"timeout3": "10s",
"timeout4": "10ms",
"timeout5": "10µs",
"timeout6": "10ns",
"timeout7": "1h10m10s10ms10µs10ns",
"maxBytes": `1024`,
"prefix": "/v1",
"prefix1": "/v1/v2_test/v2-beta",
"prefix2": "v1/v2_test/v2-beta",
"prefix3": "v1/v2_",
"prefix4": "a-b-c",
"summary": `"test"`,
"key": `"bar"`,
}
p := New("foo.api", atServerTestAPI)

View File

@ -151,13 +151,13 @@ service example {
@doc (
desc: "path demo"
)
@handler postPath
post /example/path (PostPathReq) returns (PostPathResp)
@handler getPath
get /example/path (PostPathReq) returns (PostPathResp)
}
@server (
group: array
prefix: /array
group : array
prefix : /array
maxBytes: 1024
)
service example {

View File

@ -444,16 +444,6 @@ func (s *Scanner) scanIdent() token.Token {
}
ident := string(s.data[position:s.position])
if s.ch == ':' {
s.readRune()
return token.Token{
Type: token.KEY,
Text: string(s.data[position:s.position]),
Position: s.newPosition(position),
}
}
if ident == "interface" && s.ch == '{' && s.peekRune() == '}' {
s.readRune()
s.readRune()

View File

@ -581,8 +581,8 @@ func TestScanner_NextToken_Key(t *testing.T) {
},
},
{
Type: token.KEY,
Text: "foo:",
Type: token.IDENT,
Text: "foo",
Position: token.Position{
Filename: "foo.api",
Line: 2,
@ -590,14 +590,32 @@ func TestScanner_NextToken_Key(t *testing.T) {
},
},
{
Type: token.KEY,
Text: "bar:",
Type: token.COLON,
Text: ":",
Position: token.Position{
Filename: "foo.api",
Line: 2,
Column: 4,
},
},
{
Type: token.IDENT,
Text: "bar",
Position: token.Position{
Filename: "foo.api",
Line: 3,
Column: 1,
},
},
{
Type: token.COLON,
Text: ":",
Position: token.Position{
Filename: "foo.api",
Line: 3,
Column: 4,
},
},
{
Type: token.COLON,
Text: ":",
@ -1090,50 +1108,75 @@ func TestScanner_NextToken(t *testing.T) {
Position: position(3, 5),
},
{
Type: token.KEY,
Text: `title:`,
Type: token.IDENT,
Text: `title`,
Position: position(4, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(4, 10),
},
{
Type: token.STRING,
Text: `"type title here"`,
Position: position(4, 12),
},
{
Type: token.KEY,
Text: `desc:`,
Type: token.IDENT,
Text: `desc`,
Position: position(5, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(5, 9),
},
{
Type: token.STRING,
Text: `"type desc here"`,
Position: position(5, 11),
},
{
Type: token.KEY,
Text: `author:`,
Type: token.IDENT,
Text: `author`,
Position: position(6, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(6, 11),
},
{
Type: token.STRING,
Text: `"type author here"`,
Position: position(6, 13),
},
{
Type: token.KEY,
Text: `email:`,
Type: token.IDENT,
Text: `email`,
Position: position(7, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(7, 10),
},
{
Type: token.STRING,
Text: `"type email here"`,
Position: position(7, 12),
},
{
Type: token.KEY,
Text: `version:`,
Type: token.IDENT,
Text: `version`,
Position: position(8, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(8, 12),
},
{
Type: token.STRING,
Text: `"type version here"`,
@ -1205,20 +1248,30 @@ func TestScanner_NextToken(t *testing.T) {
Position: position(20, 8),
},
{
Type: token.KEY,
Text: `jwt:`,
Type: token.IDENT,
Text: `jwt`,
Position: position(21, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(21, 8),
},
{
Type: token.IDENT,
Text: `Auth`,
Position: position(21, 10),
},
{
Type: token.KEY,
Text: `group:`,
Type: token.IDENT,
Text: `group`,
Position: position(22, 5),
},
{
Type: token.COLON,
Text: `:`,
Position: position(22, 10),
},
{
Type: token.IDENT,
Text: `template`,

View File

@ -127,7 +127,6 @@ const (
STRING // "abc"
RAW_STRING // `abc`
PATH // `abc`
KEY // `abc:`
literal_end
operator_beg
@ -213,7 +212,6 @@ var tokens = [...]string{
STRING: "STRING",
RAW_STRING: "RAW_STRING",
PATH: "PATH",
KEY: "KEY",
SUB: "-",
MUL: "*",