Browse Source

feat(goctl): Add api parser (#2585)

anqiansong 2 years ago
parent
commit
50bc361430
59 changed files with 11633 additions and 6 deletions
  1. 7 0
      tools/goctl/api/format/format.go
  2. 1 1
      tools/goctl/api/gogen/gen.go
  3. 6 0
      tools/goctl/api/parser/parser.go
  4. 5 0
      tools/goctl/api/parser/testdata/test.api
  5. 4 3
      tools/goctl/api/spec/spec.go
  6. 1 1
      tools/goctl/internal/version/version.go
  7. 15 1
      tools/goctl/pkg/env/env.go
  8. 32 0
      tools/goctl/pkg/parser/api/assertx/error.go
  9. 223 0
      tools/goctl/pkg/parser/api/ast/ast.go
  10. 75 0
      tools/goctl/pkg/parser/api/ast/comment.go
  11. 111 0
      tools/goctl/pkg/parser/api/ast/importstatement.go
  12. 65 0
      tools/goctl/pkg/parser/api/ast/infostatement.go
  13. 39 0
      tools/goctl/pkg/parser/api/ast/kvexpression.go
  14. 237 0
      tools/goctl/pkg/parser/api/ast/print.go
  15. 577 0
      tools/goctl/pkg/parser/api/ast/servicestatement.go
  16. 44 0
      tools/goctl/pkg/parser/api/ast/syntaxstatement.go
  17. 797 0
      tools/goctl/pkg/parser/api/ast/typestatement.go
  18. 403 0
      tools/goctl/pkg/parser/api/ast/writer.go
  19. 45 0
      tools/goctl/pkg/parser/api/format/format.go
  20. 1510 0
      tools/goctl/pkg/parser/api/format/format_test.go
  21. 36 0
      tools/goctl/pkg/parser/api/format/testdata/expected_service.api
  22. 37 0
      tools/goctl/pkg/parser/api/format/testdata/expected_type_struct_group.api
  23. 34 0
      tools/goctl/pkg/parser/api/format/testdata/expected_type_struct_lit.api
  24. 154 0
      tools/goctl/pkg/parser/api/format/testdata/test_format.api
  25. 35 0
      tools/goctl/pkg/parser/api/format/testdata/test_service.api
  26. 34 0
      tools/goctl/pkg/parser/api/format/testdata/test_type_struct_group.api
  27. 32 0
      tools/goctl/pkg/parser/api/format/testdata/test_type_struct_lit.api
  28. 412 0
      tools/goctl/pkg/parser/api/parser/analyzer.go
  29. 45 0
      tools/goctl/pkg/parser/api/parser/analyzer_test.go
  30. 311 0
      tools/goctl/pkg/parser/api/parser/api.go
  31. 28 0
      tools/goctl/pkg/parser/api/parser/error.go
  32. 55 0
      tools/goctl/pkg/parser/api/parser/filter.go
  33. 1506 0
      tools/goctl/pkg/parser/api/parser/parser.go
  34. 1484 0
      tools/goctl/pkg/parser/api/parser/parser_test.go
  35. 5 0
      tools/goctl/pkg/parser/api/parser/testdata/atdoc_group_test.api
  36. 3 0
      tools/goctl/pkg/parser/api/parser/testdata/atdoc_literal_test.api
  37. 3 0
      tools/goctl/pkg/parser/api/parser/testdata/athandler_test.api
  38. 16 0
      tools/goctl/pkg/parser/api/parser/testdata/atserver_test.api
  39. 11 0
      tools/goctl/pkg/parser/api/parser/testdata/base.api
  40. 11 0
      tools/goctl/pkg/parser/api/parser/testdata/base1.api
  41. 11 0
      tools/goctl/pkg/parser/api/parser/testdata/base2.api
  42. 4 0
      tools/goctl/pkg/parser/api/parser/testdata/comment_test.api
  43. 167 0
      tools/goctl/pkg/parser/api/parser/testdata/example.api
  44. 12 0
      tools/goctl/pkg/parser/api/parser/testdata/example_base1.api
  45. 12 0
      tools/goctl/pkg/parser/api/parser/testdata/example_base2.api
  46. 5 0
      tools/goctl/pkg/parser/api/parser/testdata/import_group_test.api
  47. 3 0
      tools/goctl/pkg/parser/api/parser/testdata/import_literal_test.api
  48. 7 0
      tools/goctl/pkg/parser/api/parser/testdata/info_test.api
  49. 136 0
      tools/goctl/pkg/parser/api/parser/testdata/invalid.api
  50. 37 0
      tools/goctl/pkg/parser/api/parser/testdata/service_test.api
  51. 160 0
      tools/goctl/pkg/parser/api/parser/testdata/test.api
  52. 50 0
      tools/goctl/pkg/parser/api/parser/testdata/test_format.api
  53. 7 0
      tools/goctl/pkg/parser/api/placeholder/placeholder.go
  54. 667 0
      tools/goctl/pkg/parser/api/scanner/scanner.go
  55. 1490 0
      tools/goctl/pkg/parser/api/scanner/scanner_test.go
  56. 28 0
      tools/goctl/pkg/parser/api/scanner/test.api
  57. 21 0
      tools/goctl/pkg/parser/api/token/position.go
  58. 357 0
      tools/goctl/pkg/parser/api/token/token.go
  59. 10 0
      tools/goctl/util/string.go

+ 7 - 0
tools/goctl/api/format/format.go

@@ -13,8 +13,11 @@ import (
 
 	"github.com/spf13/cobra"
 	"github.com/zeromicro/go-zero/core/errorx"
+
 	"github.com/zeromicro/go-zero/tools/goctl/api/parser"
 	"github.com/zeromicro/go-zero/tools/goctl/api/util"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/env"
+	apiF "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/format"
 	"github.com/zeromicro/go-zero/tools/goctl/util/pathx"
 )
 
@@ -90,6 +93,10 @@ func apiFormatReader(reader io.Reader, filename string, skipCheckDeclare bool) e
 
 // ApiFormatByPath format api from file path
 func ApiFormatByPath(apiFilePath string, skipCheckDeclare bool) error {
+	if env.UseExperimental() {
+		return apiF.File(apiFilePath)
+	}
+
 	data, err := os.ReadFile(apiFilePath)
 	if err != nil {
 		return err

+ 1 - 1
tools/goctl/api/gogen/gen.go

@@ -158,7 +158,7 @@ func sweep() error {
 
 			tm := time.Unix(seconds, 0)
 			if tm.Before(keepTime) {
-				if err := os.Remove(fpath); err != nil {
+				if err := os.RemoveAll(fpath); err != nil {
 					fmt.Println(aurora.Red(fmt.Sprintf("failed to remove file: %s", fpath)))
 					return err
 				}

+ 6 - 0
tools/goctl/api/parser/parser.go

@@ -8,6 +8,8 @@ import (
 	"github.com/zeromicro/go-zero/tools/goctl/api/parser/g4/ast"
 	"github.com/zeromicro/go-zero/tools/goctl/api/parser/g4/gen/api"
 	"github.com/zeromicro/go-zero/tools/goctl/api/spec"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/env"
+	apiParser "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/parser"
 )
 
 type parser struct {
@@ -17,6 +19,10 @@ type parser struct {
 
 // Parse parses the api file
 func Parse(filename string) (*spec.ApiSpec, error) {
+	if env.UseExperimental() {
+		return apiParser.Parse(filename, "")
+	}
+
 	astParser := ast.NewParser(ast.WithParserPrefix(filepath.Base(filename)), ast.WithParserDebug())
 	parsedApi, err := astParser.Parse(filename)
 	if err != nil {

+ 5 - 0
tools/goctl/api/parser/testdata/test.api

@@ -11,6 +11,11 @@
     }
 
     // service doc
+    @server(
+        group: test
+        middleware: m1,m2
+        prefix: v1
+    )
     service greet-api {
         // handler doc
         @handler GreetHandler // handler comment

+ 4 - 3
tools/goctl/api/spec/spec.go

@@ -21,9 +21,9 @@ type (
 
 	// ApiSpec describes an api file
 	ApiSpec struct {
-		Info    Info
-		Syntax  ApiSyntax
-		Imports []Import
+		Info    Info      // Deprecated: useless expression
+		Syntax  ApiSyntax // Deprecated: useless expression
+		Imports []Import  // Deprecated: useless expression
 		Types   []Type
 		Service Service
 	}
@@ -70,6 +70,7 @@ type (
 
 	// Route describes api route
 	Route struct {
+		// Deprecated: Use Service AtServer instead.
 		AtServerAnnotation Annotation
 		Method             string
 		Path               string

+ 1 - 1
tools/goctl/internal/version/version.go

@@ -6,7 +6,7 @@ import (
 )
 
 // BuildVersion is the version of goctl.
-const BuildVersion = "1.5.0"
+const BuildVersion = "1.5.1"
 
 var tag = map[string]int{"pre-alpha": 0, "alpha": 1, "pre-bata": 2, "beta": 3, "released": 4, "": 5}
 

+ 15 - 1
tools/goctl/pkg/env/env.go

@@ -25,11 +25,14 @@ const (
 	GoctlDebug             = "GOCTL_DEBUG"
 	GoctlCache             = "GOCTL_CACHE"
 	GoctlVersion           = "GOCTL_VERSION"
+	GoctlExperimental      = "GOCTL_EXPERIMENTAL"
 	ProtocVersion          = "PROTOC_VERSION"
 	ProtocGenGoVersion     = "PROTOC_GEN_GO_VERSION"
 	ProtocGenGoGRPCVersion = "PROTO_GEN_GO_GRPC_VERSION"
 
-	envFileDir = "env"
+	envFileDir      = "env"
+	ExperimentalOn  = "on"
+	ExperimentalOff = "off"
 )
 
 // init initializes the goctl environment variables, the environment variables of the function are set in order,
@@ -56,6 +59,8 @@ func init() {
 		if value := existsEnv.GetStringOr(GoctlCache, ""); value != "" {
 			goctlEnv.SetKV(GoctlCache, value)
 		}
+		experimental:=existsEnv.GetOr(GoctlExperimental,ExperimentalOff)
+		goctlEnv.SetKV(GoctlExperimental,experimental)
 	}
 	if !goctlEnv.HasKey(GoctlHome) {
 		goctlEnv.SetKV(GoctlHome, defaultGoctlHome)
@@ -69,7 +74,12 @@ func init() {
 		goctlEnv.SetKV(GoctlCache, cacheDir)
 	}
 
+	if !goctlEnv.HasKey(GoctlExperimental){
+		goctlEnv.SetKV(GoctlExperimental, ExperimentalOff)
+	}
+
 	goctlEnv.SetKV(GoctlVersion, version.BuildVersion)
+
 	protocVer, _ := protoc.Version()
 	goctlEnv.SetKV(ProtocVersion, protocVer)
 
@@ -92,6 +102,10 @@ func GetOr(key, def string) string {
 	return goctlEnv.GetStringOr(key, def)
 }
 
+func UseExperimental() bool {
+	return GetOr(GoctlExperimental, ExperimentalOff) == ExperimentalOn
+}
+
 func readEnv(goctlHome string) *sortedmap.SortedMap {
 	envFile := filepath.Join(goctlHome, envFileDir)
 	data, err := os.ReadFile(envFile)

+ 32 - 0
tools/goctl/pkg/parser/api/assertx/error.go

@@ -0,0 +1,32 @@
+package assertx
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+// ErrorOrigin is used to assert error and print source and error.
+func ErrorOrigin(t *testing.T, source string, err ...error) {
+	if len(err) == 0 {
+		t.Fatalf("expected errors, got 0 error")
+		return
+	}
+	for _, e := range err {
+		fmt.Printf("<%s>: %v\n", source, e)
+		assert.Error(t, e)
+	}
+}
+
+// Error is used to assert error.
+func Error(t *testing.T, err ...error) {
+	if len(err) == 0 {
+		t.Fatalf("expected errors, got 0 error")
+		return
+	}
+	for _, e := range err {
+		fmt.Println(e)
+		assert.Error(t, e)
+	}
+}

+ 223 - 0
tools/goctl/pkg/parser/api/ast/ast.go

@@ -0,0 +1,223 @@
+package ast
+
+import (
+	"fmt"
+	"io"
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+	"github.com/zeromicro/go-zero/tools/goctl/util"
+)
+
+// Node represents a node in the AST.
+type Node interface {
+	// Pos returns the position of the first character belonging to the node.
+	Pos() token.Position
+	// End returns the position of the first character immediately after the node.
+	End() token.Position
+	// Format returns the node's text after format.
+	Format(...string) string
+	// HasHeadCommentGroup returns true if the node has head comment group.
+	HasHeadCommentGroup() bool
+	// HasLeadingCommentGroup returns true if the node has leading comment group.
+	HasLeadingCommentGroup() bool
+	// CommentGroup returns the node's head comment group and leading comment group.
+	CommentGroup() (head, leading CommentGroup)
+}
+
+// Stmt represents a statement in the AST.
+type Stmt interface {
+	Node
+	stmtNode()
+}
+
+// Expr represents an expression in the AST.
+type Expr interface {
+	Node
+	exprNode()
+}
+
+// AST represents a parsed API file.
+type AST struct {
+	Filename     string
+	Stmts        []Stmt
+	readPosition int
+}
+
+// TokenNode represents a token node in the AST.
+type TokenNode struct {
+	// HeadCommentGroup are the comments in prev lines.
+	HeadCommentGroup CommentGroup
+	// Token is the token of the node.
+	Token token.Token
+	// LeadingCommentGroup are the tail comments in same line.
+	LeadingCommentGroup CommentGroup
+
+	// headFlag and leadingFlag is a comment flag only used in transfer another Node to TokenNode,
+	// headFlag's value is true do not represent HeadCommentGroup is not empty,
+	// leadingFlag's values is true do not represent LeadingCommentGroup is not empty.
+	headFlag, leadingFlag bool
+}
+
+// NewTokenNode creates and returns a new TokenNode.
+func NewTokenNode(tok token.Token) *TokenNode {
+	return &TokenNode{Token: tok}
+}
+
+// IsEmptyString returns true if the node is empty string.
+func (t *TokenNode) IsEmptyString() bool {
+	return t.Equal("")
+}
+
+// IsZeroString returns true if the node is zero string.
+func (t *TokenNode) IsZeroString() bool {
+	return t.Equal(`""`) || t.Equal("``")
+}
+
+// Equal returns true if the node's text is equal to the given text.
+func (t *TokenNode) Equal(s string) bool {
+	return t.Token.Text == s
+}
+
+// SetLeadingCommentGroup sets the node's leading comment group.
+func (t *TokenNode) SetLeadingCommentGroup(cg CommentGroup) {
+	t.LeadingCommentGroup = cg
+}
+
+func (t *TokenNode) HasLeadingCommentGroup() bool {
+	return t.LeadingCommentGroup.Valid() || t.leadingFlag
+}
+
+func (t *TokenNode) HasHeadCommentGroup() bool {
+	return t.HeadCommentGroup.Valid() || t.headFlag
+}
+
+func (t *TokenNode) CommentGroup() (head, leading CommentGroup) {
+	return t.HeadCommentGroup, t.LeadingCommentGroup
+}
+
+// PeekFirstLeadingComment returns the first leading comment of the node.
+func (t *TokenNode) PeekFirstLeadingComment() *CommentStmt {
+	if len(t.LeadingCommentGroup) > 0 {
+		return t.LeadingCommentGroup[0]
+	}
+	return nil
+}
+
+// PeekFirstHeadComment returns the first head comment of the node.
+func (t *TokenNode) PeekFirstHeadComment() *CommentStmt {
+	if len(t.HeadCommentGroup) > 0 {
+		return t.HeadCommentGroup[0]
+	}
+	return nil
+}
+
+func (t *TokenNode) Format(prefix ...string) string {
+	p := peekOne(prefix)
+	var textList []string
+	for _, v := range t.HeadCommentGroup {
+		textList = append(textList, v.Format(p))
+	}
+
+	var tokenText = p + t.Token.Text
+	var validLeadingCommentGroup CommentGroup
+	for _, e := range t.LeadingCommentGroup {
+		if util.IsEmptyStringOrWhiteSpace(e.Comment.Text) {
+			continue
+		}
+		validLeadingCommentGroup = append(validLeadingCommentGroup, e)
+	}
+
+	if len(validLeadingCommentGroup) > 0 {
+		tokenText = tokenText + WhiteSpace + t.LeadingCommentGroup.Join(WhiteSpace)
+	}
+
+	textList = append(textList, tokenText)
+	return strings.Join(textList, NewLine)
+}
+
+func (t *TokenNode) Pos() token.Position {
+	if len(t.HeadCommentGroup) > 0 {
+		return t.PeekFirstHeadComment().Pos()
+	}
+	return t.Token.Position
+}
+
+func (t *TokenNode) End() token.Position {
+	if len(t.LeadingCommentGroup) > 0 {
+		return t.LeadingCommentGroup[len(t.LeadingCommentGroup)-1].End()
+	}
+	return t.Token.Position
+}
+
+// Format formats the AST.
+func (a *AST) Format(w io.Writer) {
+	fw := NewWriter(w)
+	defer fw.Flush()
+	for idx, e := range a.Stmts {
+		if e.Format() == NilIndent {
+			continue
+		}
+
+		fw.Write(withNode(e))
+		fw.NewLine()
+		switch e.(type) {
+		case *SyntaxStmt:
+			fw.NewLine()
+		case *ImportGroupStmt:
+			fw.NewLine()
+		case *ImportLiteralStmt:
+			if idx < len(a.Stmts)-1 {
+				_, ok := a.Stmts[idx+1].(*ImportLiteralStmt)
+				if !ok {
+					fw.NewLine()
+				}
+			}
+		case *InfoStmt:
+			fw.NewLine()
+		case *ServiceStmt:
+			fw.NewLine()
+		case *TypeGroupStmt:
+			fw.NewLine()
+		case *TypeLiteralStmt:
+			fw.NewLine()
+		case *CommentStmt:
+		}
+	}
+}
+
+// FormatForUnitTest formats the AST for unit test.
+func (a *AST) FormatForUnitTest(w io.Writer) {
+	fw := NewWriter(w)
+	defer fw.Flush()
+	for _, e := range a.Stmts {
+		text := e.Format()
+		if text == NilIndent {
+			continue
+		}
+
+		fw.WriteText(text)
+	}
+}
+
+// Print prints the AST.
+func (a *AST) Print() {
+	_ = Print(a)
+}
+
+// SyntaxError represents a syntax error.
+func SyntaxError(pos token.Position, format string, v ...interface{}) error {
+	return fmt.Errorf("syntax error: %s %s", pos.String(), fmt.Sprintf(format, v...))
+}
+
+// DuplicateStmtError represents a duplicate statement error.
+func DuplicateStmtError(pos token.Position, msg string) error {
+	return fmt.Errorf("duplicate declaration: %s %s", pos.String(), msg)
+}
+
+func peekOne(list []string) string {
+	if len(list) == 0 {
+		return ""
+	}
+	return list[0]
+}

+ 75 - 0
tools/goctl/pkg/parser/api/ast/comment.go

@@ -0,0 +1,75 @@
+package ast
+
+import (
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+	"github.com/zeromicro/go-zero/tools/goctl/util"
+)
+
+// CommentGroup represents a list of comments.
+type CommentGroup []*CommentStmt
+
+// List returns the list of comments.
+func (cg CommentGroup) List() []string {
+	var list = make([]string, 0, len(cg))
+	for _, v := range cg {
+		comment := v.Comment.Text
+		if util.IsEmptyStringOrWhiteSpace(comment) {
+			continue
+		}
+		list = append(list, comment)
+	}
+	return list
+}
+
+// String joins and returns the comment text.
+func (cg CommentGroup) String() string {
+	return cg.Join(" ")
+}
+
+// Join joins the comments with the given separator.
+func (cg CommentGroup) Join(sep string) string {
+	if !cg.Valid() {
+		return ""
+	}
+	list := cg.List()
+	return strings.Join(list, sep)
+}
+
+// Valid returns true if the comment is valid.
+func (cg CommentGroup) Valid() bool {
+	return len(cg) > 0
+}
+
+// CommentStmt represents a comment statement.
+type CommentStmt struct {
+	// Comment is the comment token.
+	Comment token.Token
+}
+
+func (c *CommentStmt) HasHeadCommentGroup() bool {
+	return false
+}
+
+func (c *CommentStmt) HasLeadingCommentGroup() bool {
+	return false
+}
+
+func (c *CommentStmt) CommentGroup() (head, leading CommentGroup) {
+	return
+}
+
+func (c *CommentStmt) stmtNode() {}
+
+func (c *CommentStmt) Pos() token.Position {
+	return c.Comment.Position
+}
+
+func (c *CommentStmt) End() token.Position {
+	return c.Comment.Position
+}
+
+func (c *CommentStmt) Format(prefix ...string) string {
+	return peekOne(prefix) + c.Comment.Text
+}

+ 111 - 0
tools/goctl/pkg/parser/api/ast/importstatement.go

@@ -0,0 +1,111 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+// ImportStmt represents an import statement.
+type ImportStmt interface {
+	Stmt
+	importNode()
+}
+
+// ImportLiteralStmt represents an import literal statement.
+type ImportLiteralStmt struct {
+	// Import is the import token.
+	Import *TokenNode
+	// Value is the import value.
+	Value *TokenNode
+}
+
+func (i *ImportLiteralStmt) HasHeadCommentGroup() bool {
+	return i.Import.HasHeadCommentGroup()
+}
+
+func (i *ImportLiteralStmt) HasLeadingCommentGroup() bool {
+	return i.Value.HasLeadingCommentGroup()
+}
+
+func (i *ImportLiteralStmt) CommentGroup() (head, leading CommentGroup) {
+	return i.Import.HeadCommentGroup, i.Value.LeadingCommentGroup
+}
+
+func (i *ImportLiteralStmt) Format(prefix ...string) (result string) {
+	if i.Value.IsZeroString() {
+		return ""
+	}
+	w := NewBufferWriter()
+	importNode := transferTokenNode(i.Import, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
+	w.Write(withNode(importNode, i.Value), withMode(ModeExpectInSameLine))
+	return w.String()
+}
+
+func (i *ImportLiteralStmt) End() token.Position {
+	return i.Value.End()
+}
+
+func (i *ImportLiteralStmt) importNode() {}
+
+func (i *ImportLiteralStmt) Pos() token.Position {
+	return i.Import.Pos()
+}
+
+func (i *ImportLiteralStmt) stmtNode() {}
+
+type ImportGroupStmt struct {
+	// Import is the import token.
+	Import *TokenNode
+	// LParen is the left parenthesis token.
+	LParen *TokenNode
+	// Values is the import values.
+	Values []*TokenNode
+	// RParen is the right parenthesis token.
+	RParen *TokenNode
+}
+
+func (i *ImportGroupStmt) HasHeadCommentGroup() bool {
+	return i.Import.HasHeadCommentGroup()
+}
+
+func (i *ImportGroupStmt) HasLeadingCommentGroup() bool {
+	return i.RParen.HasLeadingCommentGroup()
+}
+
+func (i *ImportGroupStmt) CommentGroup() (head, leading CommentGroup) {
+	return i.Import.HeadCommentGroup, i.RParen.LeadingCommentGroup
+}
+
+func (i *ImportGroupStmt) Format(prefix ...string) string {
+	var textList []string
+	for _, v := range i.Values {
+		if v.IsZeroString() {
+			continue
+		}
+		textList = append(textList, v.Format(Indent))
+	}
+	if len(textList) == 0 {
+		return ""
+	}
+
+	importNode := transferTokenNode(i.Import, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
+	w := NewBufferWriter()
+	w.Write(withNode(importNode, i.LParen), expectSameLine())
+	w.NewLine()
+	for _, v := range i.Values {
+		node := transferTokenNode(v, withTokenNodePrefix(peekOne(prefix)+Indent))
+		w.Write(withNode(node), expectSameLine())
+		w.NewLine()
+	}
+	w.Write(withNode(transferTokenNode(i.RParen, withTokenNodePrefix(prefix...))))
+	return w.String()
+}
+
+func (i *ImportGroupStmt) End() token.Position {
+	return i.RParen.End()
+}
+
+func (i *ImportGroupStmt) importNode() {}
+
+func (i *ImportGroupStmt) Pos() token.Position {
+	return i.Import.Pos()
+}
+
+func (i *ImportGroupStmt) stmtNode() {}

+ 65 - 0
tools/goctl/pkg/parser/api/ast/infostatement.go

@@ -0,0 +1,65 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+// InfoStmt is the info statement.
+type InfoStmt struct {
+	// Info is the info keyword.
+	Info *TokenNode
+	// LParen is the left parenthesis.
+	LParen *TokenNode
+	// Values is the info values.
+	Values []*KVExpr
+	// RParen is the right parenthesis.
+	RParen *TokenNode
+}
+
+func (i *InfoStmt) HasHeadCommentGroup() bool {
+	return i.Info.HasHeadCommentGroup()
+}
+
+func (i *InfoStmt) HasLeadingCommentGroup() bool {
+	return i.RParen.HasLeadingCommentGroup()
+}
+
+func (i *InfoStmt) CommentGroup() (head, leading CommentGroup) {
+	return i.Info.HeadCommentGroup, i.RParen.LeadingCommentGroup
+}
+
+func (i *InfoStmt) Format(prefix ...string) string {
+	if len(i.Values) == 0 {
+		return ""
+	}
+	var textList []string
+	for _, v := range i.Values {
+		if v.Value.IsZeroString() {
+			continue
+		}
+		textList = append(textList, v.Format(Indent))
+	}
+	if len(textList) == 0 {
+		return ""
+	}
+
+	w := NewBufferWriter()
+	infoNode := transferTokenNode(i.Info, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	w.Write(withNode(infoNode, i.LParen))
+	w.NewLine()
+	for _, v := range i.Values {
+		node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
+		w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
+		w.NewLine()
+	}
+	w.Write(withNode(transferTokenNode(i.RParen, withTokenNodePrefix(prefix...))))
+	return w.String()
+}
+
+func (i *InfoStmt) End() token.Position {
+	return i.RParen.End()
+}
+
+func (i *InfoStmt) Pos() token.Position {
+	return i.Info.Pos()
+}
+
+func (i *InfoStmt) stmtNode() {}

+ 39 - 0
tools/goctl/pkg/parser/api/ast/kvexpression.go

@@ -0,0 +1,39 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+// KVExpr is a key value expression.
+type KVExpr struct {
+	// Key is the key of the key value expression.
+	Key *TokenNode
+	// Value is the value of the key value expression.
+	Value *TokenNode
+}
+
+func (i *KVExpr) HasHeadCommentGroup() bool {
+	return i.Key.HasHeadCommentGroup()
+}
+
+func (i *KVExpr) HasLeadingCommentGroup() bool {
+	return i.Value.HasLeadingCommentGroup()
+}
+
+func (i *KVExpr) CommentGroup() (head, leading CommentGroup) {
+	return i.Key.HeadCommentGroup, i.Value.LeadingCommentGroup
+}
+
+func (i *KVExpr) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	w.Write(withNode(i.Key, i.Value), withPrefix(prefix...), withInfix(Indent), withRawText())
+	return w.String()
+}
+
+func (i *KVExpr) End() token.Position {
+	return i.Value.End()
+}
+
+func (i *KVExpr) Pos() token.Position {
+	return i.Key.Pos()
+}
+
+func (i *KVExpr) exprNode() {}

+ 237 - 0
tools/goctl/pkg/parser/api/ast/print.go

@@ -0,0 +1,237 @@
+package ast
+
+import (
+	"fmt"
+	"go/token"
+	"io"
+	"os"
+	"reflect"
+
+	apitoken "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+// A FieldFilter may be provided to Fprint to control the output.
+type FieldFilter func(name string, value reflect.Value) bool
+
+// NotNilFilter returns true for field values that are not nil,
+// it returns false otherwise.
+func NotNilFilter(_ string, v reflect.Value) bool {
+	switch v.Kind() {
+	case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice:
+		return !v.IsNil()
+	}
+	return true
+}
+
+// Fprint prints the value of x to the writer w.
+func Fprint(w io.Writer, x interface{}, f FieldFilter) error {
+	return fprint(w, x, f)
+}
+
+func fprint(w io.Writer, x interface{}, f FieldFilter) (err error) {
+	// setup printer
+	p := printer{
+		output: w,
+		filter: f,
+		ptrmap: make(map[interface{}]int),
+		last:   '\n', // force printing of line number on first line
+	}
+
+	// install error handler
+	defer func() {
+		if e := recover(); e != nil {
+			err = e.(localError).err // re-panics if it's not a localError
+		}
+	}()
+
+	// print x
+	if x == nil {
+		p.printf("nil\n")
+		return
+	}
+	p.print(reflect.ValueOf(x))
+	p.printf("\n")
+
+	return
+}
+
+func Print(x interface{}) error {
+	return Fprint(os.Stdout, x, NotNilFilter)
+}
+
+type printer struct {
+	output       io.Writer
+	filter       FieldFilter
+	ptrmap       map[interface{}]int // *T -> line number
+	prefixIndent int         // current indentation level
+	last         byte        // the last byte processed by Write
+	line         int         // current line number
+}
+
+var prefixIndent = []byte(".  ")
+
+// Write implements io.Writer.
+func (p *printer) Write(data []byte) (n int, err error) {
+	var m int
+	for i, b := range data {
+		// invariant: data[0:n] has been written
+		if b == '\n' {
+			m, err = p.output.Write(data[n : i+1])
+			n += m
+			if err != nil {
+				return
+			}
+			p.line++
+		} else if p.last == '\n' {
+			_, err = fmt.Fprintf(p.output, "%6d  ", p.line)
+			if err != nil {
+				return
+			}
+			for j := p.prefixIndent; j > 0; j-- {
+				_, err = p.output.Write(prefixIndent)
+				if err != nil {
+					return
+				}
+			}
+		}
+		p.last = b
+	}
+	if len(data) > n {
+		m, err = p.output.Write(data[n:])
+		n += m
+	}
+	return
+}
+
+// localError wraps locally caught errors so we can distinguish
+// them from genuine panics which we don't want to return as errors.
+type localError struct {
+	err error
+}
+
+// printf is a convenience wrapper that takes care of print errors.
+func (p *printer) printf(format string, args ...interface{}) {
+	if _, err := fmt.Fprintf(p, format, args...); err != nil {
+		panic(localError{err})
+	}
+}
+
+// Implementation note: Print is written for AST nodes but could be
+// used to print arbitrary data structures; such a version should
+// probably be in a different package.
+//
+// Note: This code detects (some) cycles created via pointers but
+// not cycles that are created via slices or maps containing the
+// same slice or map. Code for general data structures probably
+// should catch those as well.
+
+func (p *printer) print(x reflect.Value) {
+	if !NotNilFilter("", x) {
+		p.printf("nil")
+		return
+	}
+
+	switch x.Kind() {
+	case reflect.Interface:
+		p.print(x.Elem())
+
+	case reflect.Map:
+		p.printf("%s (len = %d) {", x.Type(), x.Len())
+		if x.Len() > 0 {
+			p.prefixIndent++
+			p.printf("\n")
+			for _, key := range x.MapKeys() {
+				p.print(key)
+				p.printf(": ")
+				p.print(x.MapIndex(key))
+				p.printf("\n")
+			}
+			p.prefixIndent--
+		}
+		p.printf("}")
+
+	case reflect.Pointer:
+		p.printf("*")
+		// type-checked ASTs may contain cycles - use ptrmap
+		// to keep track of objects that have been printed
+		// already and print the respective line number instead
+		ptr := x.Interface()
+		if line, exists := p.ptrmap[ptr]; exists {
+			p.printf("(obj @ %d)", line)
+		} else {
+			p.ptrmap[ptr] = p.line
+			p.print(x.Elem())
+		}
+
+	case reflect.Array:
+		p.printf("%s {", x.Type())
+		if x.Len() > 0 {
+			p.prefixIndent++
+			p.printf("\n")
+			for i, n := 0, x.Len(); i < n; i++ {
+				p.printf("%d: ", i)
+				p.print(x.Index(i))
+				p.printf("\n")
+			}
+			p.prefixIndent--
+		}
+		p.printf("}")
+
+	case reflect.Slice:
+		if s, ok := x.Interface().([]byte); ok {
+			p.printf("%#q", s)
+			return
+		}
+		p.printf("%s (len = %d) {", x.Type(), x.Len())
+		if x.Len() > 0 {
+			p.prefixIndent++
+			p.printf("\n")
+			for i, n := 0, x.Len(); i < n; i++ {
+				p.printf("%d: ", i)
+				p.print(x.Index(i))
+				p.printf("\n")
+			}
+			p.prefixIndent--
+		}
+		p.printf("}")
+
+	case reflect.Struct:
+		if val, ok := x.Interface().(apitoken.Position); ok {
+			p.printf(val.String())
+			return
+		}
+		t := x.Type()
+		p.printf("%s {", t)
+		p.prefixIndent++
+		first := true
+		for i, n := 0, t.NumField(); i < n; i++ {
+			// exclude non-exported fields because their
+			// values cannot be accessed via reflection
+			if name := t.Field(i).Name; token.IsExported(name) {
+				value := x.Field(i)
+				if p.filter == nil || p.filter(name, value) {
+					if first {
+						p.printf("\n")
+						first = false
+					}
+					p.printf("%s: ", name)
+					p.print(value)
+					p.printf("\n")
+				}
+			}
+		}
+		p.prefixIndent--
+		p.printf("}")
+
+	default:
+		v := x.Interface()
+		switch v := v.(type) {
+		case string:
+			// print strings in quotes
+			p.printf("%q", v)
+			return
+		}
+		// default
+		p.printf("%v", v)
+	}
+}

+ 577 - 0
tools/goctl/pkg/parser/api/ast/servicestatement.go

@@ -0,0 +1,577 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+// AtServerStmt represents @server statement.
+type AtServerStmt struct {
+	// AtServer is the @server token.
+	AtServer *TokenNode
+	// LParen is the left parenthesis token.
+	LParen *TokenNode
+	// Values is the key-value pairs.
+	Values []*KVExpr
+	// RParen is the right parenthesis token.
+	RParen *TokenNode
+}
+
+func (a *AtServerStmt) HasHeadCommentGroup() bool {
+	return a.AtServer.HasHeadCommentGroup()
+}
+
+func (a *AtServerStmt) HasLeadingCommentGroup() bool {
+	return a.RParen.HasLeadingCommentGroup()
+}
+
+func (a *AtServerStmt) CommentGroup() (head, leading CommentGroup) {
+	return a.AtServer.HeadCommentGroup, a.RParen.LeadingCommentGroup
+}
+
+func (a *AtServerStmt) Format(prefix ...string) string {
+	if len(a.Values) == 0 {
+		return ""
+	}
+	var textList []string
+	for _, v := range a.Values {
+		if v.Value.IsZeroString() {
+			continue
+		}
+		textList = append(textList, v.Format())
+	}
+	if len(textList) == 0 {
+		return ""
+	}
+
+	w := NewBufferWriter()
+	atServerNode := transferTokenNode(a.AtServer, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	w.Write(withNode(atServerNode, a.LParen), expectSameLine())
+	w.NewLine()
+	for _, v := range a.Values {
+		node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
+		w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
+		w.NewLine()
+	}
+	w.Write(withNode(transferTokenNode(a.RParen, withTokenNodePrefix(prefix...))))
+	return w.String()
+}
+
+func (a *AtServerStmt) End() token.Position {
+	return a.RParen.End()
+}
+
+func (a *AtServerStmt) Pos() token.Position {
+	return a.AtServer.Pos()
+}
+
+func (a *AtServerStmt) stmtNode() {}
+
+type AtDocStmt interface {
+	Stmt
+	atDocNode()
+}
+
+type AtDocLiteralStmt struct {
+	AtDoc *TokenNode
+	Value *TokenNode
+}
+
+func (a *AtDocLiteralStmt) HasHeadCommentGroup() bool {
+	return a.AtDoc.HasHeadCommentGroup()
+}
+
+func (a *AtDocLiteralStmt) HasLeadingCommentGroup() bool {
+	return a.Value.HasLeadingCommentGroup()
+}
+
+func (a *AtDocLiteralStmt) CommentGroup() (head, leading CommentGroup) {
+	return a.AtDoc.HeadCommentGroup, a.Value.LeadingCommentGroup
+}
+
+func (a *AtDocLiteralStmt) Format(prefix ...string) string {
+	if a.Value.IsZeroString() {
+		return ""
+	}
+	w := NewBufferWriter()
+	atDocNode := transferTokenNode(a.AtDoc, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	valueNode := transferTokenNode(a.Value, ignoreHeadComment())
+	w.Write(withNode(atDocNode, valueNode), expectSameLine())
+	return w.String()
+}
+
+func (a *AtDocLiteralStmt) End() token.Position {
+	return a.Value.End()
+}
+
+func (a *AtDocLiteralStmt) atDocNode() {}
+
+func (a *AtDocLiteralStmt) Pos() token.Position {
+	return a.AtDoc.Pos()
+}
+
+func (a *AtDocLiteralStmt) stmtNode() {}
+
+type AtDocGroupStmt struct {
+	AtDoc  *TokenNode
+	LParen *TokenNode
+	Values []*KVExpr
+	RParen *TokenNode
+}
+
+func (a *AtDocGroupStmt) HasHeadCommentGroup() bool {
+	return a.AtDoc.HasHeadCommentGroup()
+}
+
+func (a *AtDocGroupStmt) HasLeadingCommentGroup() bool {
+	return a.RParen.HasLeadingCommentGroup()
+}
+
+func (a *AtDocGroupStmt) CommentGroup() (head, leading CommentGroup) {
+	return a.AtDoc.HeadCommentGroup, a.RParen.LeadingCommentGroup
+}
+
+func (a *AtDocGroupStmt) Format(prefix ...string) string {
+	if len(a.Values) == 0 {
+		return ""
+	}
+	var textList []string
+	for _, v := range a.Values {
+		if v.Value.IsZeroString() {
+			continue
+		}
+		textList = append(textList, v.Format(peekOne(prefix)+Indent))
+	}
+	if len(textList) == 0 {
+		return ""
+	}
+
+	w := NewBufferWriter()
+	atDocNode := transferTokenNode(a.AtDoc, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	w.Write(withNode(atDocNode, a.LParen), expectSameLine())
+	w.NewLine()
+	for _, v := range a.Values {
+		node := transferTokenNode(v.Key, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment())
+		w.Write(withNode(node, v.Value), expectIndentInfix(), expectSameLine())
+		w.NewLine()
+	}
+	w.Write(withNode(transferTokenNode(a.RParen, withTokenNodePrefix(prefix...))))
+	return w.String()
+}
+
+func (a *AtDocGroupStmt) End() token.Position {
+	return a.RParen.End()
+}
+
+func (a *AtDocGroupStmt) atDocNode() {}
+
+func (a *AtDocGroupStmt) Pos() token.Position {
+	return a.AtDoc.Pos()
+}
+
+func (a *AtDocGroupStmt) stmtNode() {}
+
+type ServiceStmt struct {
+	AtServerStmt *AtServerStmt
+	Service      *TokenNode
+	Name         *ServiceNameExpr
+	LBrace       *TokenNode
+	Routes       []*ServiceItemStmt
+	RBrace       *TokenNode
+}
+
+func (s *ServiceStmt) HasHeadCommentGroup() bool {
+	if s.AtServerStmt != nil {
+		return s.AtServerStmt.HasHeadCommentGroup()
+	}
+	return s.Service.HasHeadCommentGroup()
+}
+
+func (s *ServiceStmt) HasLeadingCommentGroup() bool {
+	return s.RBrace.HasLeadingCommentGroup()
+}
+
+func (s *ServiceStmt) CommentGroup() (head, leading CommentGroup) {
+	if s.AtServerStmt != nil {
+		head, _ = s.AtServerStmt.CommentGroup()
+		return head, s.RBrace.LeadingCommentGroup
+	}
+	return s.Service.HeadCommentGroup, s.RBrace.LeadingCommentGroup
+}
+
+func (s *ServiceStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	if s.AtServerStmt != nil {
+		text := s.AtServerStmt.Format()
+		if len(text) > 0 {
+			w.WriteText(text)
+			w.NewLine()
+		}
+	}
+	serviceNode := transferTokenNode(s.Service, withTokenNodePrefix(prefix...))
+	w.Write(withNode(serviceNode, s.Name, s.LBrace), expectSameLine())
+	if len(s.Routes) == 0 {
+		w.Write(withNode(transferTokenNode(s.RBrace, withTokenNodePrefix(prefix...))))
+		return w.String()
+	}
+	w.NewLine()
+	for idx, route := range s.Routes {
+		routeNode := transfer2TokenNode(route, false, withTokenNodePrefix(peekOne(prefix)+Indent))
+		w.Write(withNode(routeNode))
+		if idx < len(s.Routes)-1 {
+			w.NewLine()
+		}
+	}
+	w.Write(withNode(transferTokenNode(s.RBrace, withTokenNodePrefix(prefix...))))
+	return w.String()
+}
+
+func (s *ServiceStmt) End() token.Position {
+	return s.RBrace.End()
+}
+
+func (s *ServiceStmt) Pos() token.Position {
+	if s.AtServerStmt != nil {
+		return s.AtServerStmt.Pos()
+	}
+	return s.Service.Pos()
+}
+
+func (s *ServiceStmt) stmtNode() {}
+
+type ServiceNameExpr struct {
+	Name *TokenNode
+}
+
+func (s *ServiceNameExpr) HasHeadCommentGroup() bool {
+	return s.Name.HasHeadCommentGroup()
+}
+
+func (s *ServiceNameExpr) HasLeadingCommentGroup() bool {
+	return s.Name.HasLeadingCommentGroup()
+}
+
+func (s *ServiceNameExpr) CommentGroup() (head, leading CommentGroup) {
+	return s.Name.HeadCommentGroup, s.Name.LeadingCommentGroup
+}
+
+func (s *ServiceNameExpr) Format(...string) string {
+	w := NewBufferWriter()
+	w.WriteText(s.Name.Format())
+	return w.String()
+}
+
+func (s *ServiceNameExpr) End() token.Position {
+	return s.Name.End()
+}
+
+func (s *ServiceNameExpr) Pos() token.Position {
+	return s.Name.Pos()
+}
+
+func (s *ServiceNameExpr) exprNode() {}
+
+type AtHandlerStmt struct {
+	AtHandler *TokenNode
+	Name      *TokenNode
+}
+
+func (a *AtHandlerStmt) HasHeadCommentGroup() bool {
+	return a.AtHandler.HasHeadCommentGroup()
+}
+
+func (a *AtHandlerStmt) HasLeadingCommentGroup() bool {
+	return a.Name.HasLeadingCommentGroup()
+}
+
+func (a *AtHandlerStmt) CommentGroup() (head, leading CommentGroup) {
+	return a.AtHandler.HeadCommentGroup, a.Name.LeadingCommentGroup
+}
+
+func (a *AtHandlerStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	atDocNode := transferTokenNode(a.AtHandler, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	nameNode := transferTokenNode(a.Name, ignoreHeadComment())
+	w.Write(withNode(atDocNode, nameNode), expectSameLine())
+	return w.String()
+}
+
+func (a *AtHandlerStmt) End() token.Position {
+	return a.Name.End()
+}
+
+func (a *AtHandlerStmt) Pos() token.Position {
+	return a.AtHandler.Pos()
+}
+
+func (a *AtHandlerStmt) stmtNode() {}
+
+type ServiceItemStmt struct {
+	AtDoc     AtDocStmt
+	AtHandler *AtHandlerStmt
+	Route     *RouteStmt
+}
+
+func (s *ServiceItemStmt) HasHeadCommentGroup() bool {
+	if s.AtDoc != nil {
+		return s.AtDoc.HasHeadCommentGroup()
+	}
+	return s.AtHandler.HasHeadCommentGroup()
+}
+
+func (s *ServiceItemStmt) HasLeadingCommentGroup() bool {
+	return s.Route.HasLeadingCommentGroup()
+}
+
+func (s *ServiceItemStmt) CommentGroup() (head, leading CommentGroup) {
+	_, leading = s.Route.CommentGroup()
+	if s.AtDoc != nil {
+		head, _ = s.AtDoc.CommentGroup()
+		return head, leading
+	}
+	head, _ = s.AtHandler.CommentGroup()
+	return head, leading
+}
+
+func (s *ServiceItemStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	if s.AtDoc != nil {
+		w.WriteText(s.AtDoc.Format(prefix...))
+		w.NewLine()
+	}
+	w.WriteText(s.AtHandler.Format(prefix...))
+	w.NewLine()
+	routeNode := transfer2TokenNode(s.Route, false, withTokenNodePrefix(prefix...))
+	w.Write(withNode(routeNode))
+	w.NewLine()
+	return w.String()
+}
+
+func (s *ServiceItemStmt) End() token.Position {
+	return s.Route.End()
+}
+
+func (s *ServiceItemStmt) Pos() token.Position {
+	if s.AtDoc != nil {
+		return s.AtDoc.Pos()
+	}
+	return s.AtHandler.Pos()
+}
+
+func (s *ServiceItemStmt) stmtNode() {}
+
+type RouteStmt struct {
+	Method   *TokenNode
+	Path     *PathExpr
+	Request  *BodyStmt
+	Returns  *TokenNode
+	Response *BodyStmt
+}
+
+func (r *RouteStmt) HasHeadCommentGroup() bool {
+	return r.Method.HasHeadCommentGroup()
+}
+
+func (r *RouteStmt) HasLeadingCommentGroup() bool {
+	if r.Response != nil {
+		return r.Response.HasLeadingCommentGroup()
+	} else if r.Returns != nil {
+		return r.Returns.HasLeadingCommentGroup()
+	} else if r.Request != nil {
+		return r.Request.HasLeadingCommentGroup()
+	}
+	return r.Path.HasLeadingCommentGroup()
+}
+
+func (r *RouteStmt) CommentGroup() (head, leading CommentGroup) {
+	head, _ = r.Method.CommentGroup()
+	if r.Response != nil {
+		_, leading = r.Response.CommentGroup()
+	} else if r.Returns != nil {
+		_, leading = r.Returns.CommentGroup()
+	} else if r.Request != nil {
+		_, leading = r.Request.CommentGroup()
+	}
+	return head, leading
+}
+
+func (r *RouteStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	methodNode := transferTokenNode(r.Method, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	if r.Response != nil {
+		if r.Response.Body == nil {
+			r.Response.RParen = transferTokenNode(r.Response.RParen, ignoreHeadComment())
+			if r.Request != nil {
+				w.Write(withNode(methodNode, r.Path, r.Request), expectSameLine())
+			} else {
+				w.Write(withNode(methodNode, r.Path), expectSameLine())
+			}
+		} else {
+			r.Response.RParen = transferTokenNode(r.Response.RParen, ignoreHeadComment())
+			if r.Request != nil {
+				w.Write(withNode(methodNode, r.Path, r.Request, r.Returns, r.Response), expectSameLine())
+			} else {
+				w.Write(withNode(methodNode, r.Path, r.Returns, r.Response), expectSameLine())
+			}
+		}
+	} else if r.Request != nil {
+		r.Request.RParen = transferTokenNode(r.Request.RParen, ignoreHeadComment())
+		w.Write(withNode(methodNode, r.Path, r.Request), expectSameLine())
+	} else {
+		pathNode := transferTokenNode(r.Path.Value, ignoreHeadComment())
+		w.Write(withNode(methodNode, pathNode), expectSameLine())
+	}
+	return w.String()
+}
+
+func (r *RouteStmt) End() token.Position {
+	if r.Response != nil {
+		return r.Response.End()
+	}
+	if r.Returns != nil {
+		return r.Returns.Pos()
+	}
+	if r.Request != nil {
+		return r.Request.End()
+	}
+	return r.Path.End()
+}
+
+func (r *RouteStmt) Pos() token.Position {
+	return r.Method.Pos()
+}
+
+func (r *RouteStmt) stmtNode() {}
+
+type PathExpr struct {
+	Value *TokenNode
+}
+
+func (p *PathExpr) HasHeadCommentGroup() bool {
+	return p.Value.HasHeadCommentGroup()
+}
+
+func (p *PathExpr) HasLeadingCommentGroup() bool {
+	return p.Value.HasLeadingCommentGroup()
+}
+
+func (p *PathExpr) CommentGroup() (head, leading CommentGroup) {
+	return p.Value.CommentGroup()
+}
+
+func (p *PathExpr) Format(prefix ...string) string {
+	pathNode := transferTokenNode(p.Value, ignoreComment())
+	return pathNode.Format(prefix...)
+}
+
+func (p *PathExpr) End() token.Position {
+	return p.Value.End()
+}
+
+func (p *PathExpr) Pos() token.Position {
+	return p.Value.Pos()
+}
+
+func (p *PathExpr) exprNode() {}
+
+type BodyStmt struct {
+	LParen *TokenNode
+	Body   *BodyExpr
+	RParen *TokenNode
+}
+
+func (b *BodyStmt) HasHeadCommentGroup() bool {
+	return b.LParen.HasHeadCommentGroup()
+}
+
+func (b *BodyStmt) HasLeadingCommentGroup() bool {
+	return b.RParen.HasLeadingCommentGroup()
+}
+
+func (b *BodyStmt) CommentGroup() (head, leading CommentGroup) {
+	return b.LParen.HeadCommentGroup, b.RParen.LeadingCommentGroup
+}
+
+func (b *BodyStmt) Format(...string) string {
+	w := NewBufferWriter()
+	if b.Body == nil {
+		return ""
+	}
+	w.Write(withNode(b.LParen, b.Body, b.RParen), withInfix(NilIndent), expectSameLine())
+	return w.String()
+}
+
+func (b *BodyStmt) End() token.Position {
+	return b.RParen.End()
+}
+
+func (b *BodyStmt) Pos() token.Position {
+	return b.LParen.Pos()
+}
+
+func (b *BodyStmt) stmtNode() {}
+
+type BodyExpr struct {
+	LBrack *TokenNode
+	RBrack *TokenNode
+	Star   *TokenNode
+	Value  *TokenNode
+}
+
+func (e *BodyExpr) HasHeadCommentGroup() bool {
+	if e.LBrack != nil {
+		return e.LBrack.HasHeadCommentGroup()
+	} else if e.Star != nil {
+		return e.Star.HasHeadCommentGroup()
+	} else {
+		return e.Value.HasHeadCommentGroup()
+	}
+}
+
+func (e *BodyExpr) HasLeadingCommentGroup() bool {
+	return e.Value.HasLeadingCommentGroup()
+}
+
+func (e *BodyExpr) CommentGroup() (head, leading CommentGroup) {
+	if e.LBrack != nil {
+		head = e.LBrack.HeadCommentGroup
+	} else if e.Star != nil {
+		head = e.Star.HeadCommentGroup
+	} else {
+		head = e.Value.HeadCommentGroup
+	}
+	return head, e.Value.LeadingCommentGroup
+}
+
+func (e *BodyExpr) End() token.Position {
+	return e.Value.End()
+}
+
+func (e *BodyExpr) Format(...string) string {
+	w := NewBufferWriter()
+	if e.LBrack != nil {
+		lbrackNode := transferTokenNode(e.LBrack, ignoreComment())
+		rbrackNode := transferTokenNode(e.RBrack, ignoreComment())
+		if e.Star != nil {
+			starNode := transferTokenNode(e.Star, ignoreComment())
+			w.Write(withNode(lbrackNode, rbrackNode, starNode, e.Value), withInfix(NilIndent), expectSameLine())
+		} else {
+			w.Write(withNode(lbrackNode, rbrackNode, e.Value), withInfix(NilIndent), expectSameLine())
+		}
+	} else if e.Star != nil {
+		starNode := transferTokenNode(e.Star, ignoreComment())
+		w.Write(withNode(starNode, e.Value), withInfix(NilIndent), expectSameLine())
+	} else {
+		w.Write(withNode(e.Value))
+	}
+	return w.String()
+}
+
+func (e *BodyExpr) Pos() token.Position {
+	if e.LBrack != nil {
+		return e.LBrack.Pos()
+	}
+	if e.Star != nil {
+		return e.Star.Pos()
+	}
+	return e.Value.Pos()
+}
+
+func (e *BodyExpr) exprNode() {}

+ 44 - 0
tools/goctl/pkg/parser/api/ast/syntaxstatement.go

@@ -0,0 +1,44 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+// SyntaxStmt represents a syntax statement.
+type SyntaxStmt struct {
+	// Syntax is the syntax token.
+	Syntax *TokenNode
+	// Assign is the assign token.
+	Assign *TokenNode
+	// Value is the syntax value.
+	Value *TokenNode
+}
+
+func (s *SyntaxStmt) HasHeadCommentGroup() bool {
+	return s.Syntax.HasHeadCommentGroup()
+}
+
+func (s *SyntaxStmt) HasLeadingCommentGroup() bool {
+	return s.Value.HasLeadingCommentGroup()
+}
+
+func (s *SyntaxStmt) CommentGroup() (head, leading CommentGroup) {
+	return s.Syntax.HeadCommentGroup, s.Syntax.LeadingCommentGroup
+}
+
+func (s *SyntaxStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	syntaxNode := transferTokenNode(s.Syntax,
+		withTokenNodePrefix(prefix...), ignoreLeadingComment())
+	assignNode := transferTokenNode(s.Assign, ignoreLeadingComment())
+	w.Write(withNode(syntaxNode, assignNode, s.Value), withPrefix(prefix...), expectSameLine())
+	return w.String()
+}
+
+func (s *SyntaxStmt) End() token.Position {
+	return s.Value.End()
+}
+
+func (s *SyntaxStmt) Pos() token.Position {
+	return s.Syntax.Pos()
+}
+
+func (s *SyntaxStmt) stmtNode() {}

+ 797 - 0
tools/goctl/pkg/parser/api/ast/typestatement.go

@@ -0,0 +1,797 @@
+package ast
+
+import "github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+
+/*******************TypeStmt Begin********************/
+
+// TypeStmt is the interface for type statement.
+type TypeStmt interface {
+	Stmt
+	typeNode()
+}
+
+// TypeLiteralStmt is the type statement for type literal.
+type TypeLiteralStmt struct {
+	// Type is the type keyword.
+	Type *TokenNode
+	// Expr is the type expression.
+	Expr *TypeExpr
+}
+
+func (t *TypeLiteralStmt) HasHeadCommentGroup() bool {
+	return t.Type.HasHeadCommentGroup()
+}
+
+func (t *TypeLiteralStmt) HasLeadingCommentGroup() bool {
+	return t.Expr.HasLeadingCommentGroup()
+}
+
+func (t *TypeLiteralStmt) CommentGroup() (head, leading CommentGroup) {
+	_, leading = t.Expr.CommentGroup()
+	return t.Type.HeadCommentGroup, leading
+}
+
+func (t *TypeLiteralStmt) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	w.Write(withNode(t.Type, t.Expr), withPrefix(prefix...), expectSameLine())
+	return w.String()
+}
+
+func (t *TypeLiteralStmt) End() token.Position {
+	return t.Expr.End()
+}
+
+func (t *TypeLiteralStmt) Pos() token.Position {
+	return t.Type.Pos()
+}
+
+func (t *TypeLiteralStmt) stmtNode() {}
+func (t *TypeLiteralStmt) typeNode() {}
+
+// TypeGroupStmt is the type statement for type group.
+type TypeGroupStmt struct {
+	// Type is the type keyword.
+	Type *TokenNode
+	// LParen is the left parenthesis.
+	LParen *TokenNode
+	// ExprList is the type expression list.
+	ExprList []*TypeExpr
+	// RParen is the right parenthesis.
+	RParen *TokenNode
+}
+
+func (t *TypeGroupStmt) HasHeadCommentGroup() bool {
+	return t.Type.HasHeadCommentGroup()
+}
+
+func (t *TypeGroupStmt) HasLeadingCommentGroup() bool {
+	return t.RParen.HasLeadingCommentGroup()
+}
+
+func (t *TypeGroupStmt) CommentGroup() (head, leading CommentGroup) {
+	return t.Type.HeadCommentGroup, t.RParen.LeadingCommentGroup
+}
+
+func (t *TypeGroupStmt) Format(prefix ...string) string {
+	if len(t.ExprList) == 0 {
+		return ""
+	}
+	w := NewBufferWriter()
+	typeNode := transferTokenNode(t.Type, withTokenNodePrefix(prefix...))
+	w.Write(withNode(typeNode, t.LParen), expectSameLine())
+	w.NewLine()
+	for _, e := range t.ExprList {
+		w.Write(withNode(e), withPrefix(peekOne(prefix)+Indent))
+		w.NewLine()
+	}
+	w.WriteText(t.RParen.Format(prefix...))
+	return w.String()
+}
+
+func (t *TypeGroupStmt) End() token.Position {
+	return t.RParen.End()
+}
+
+func (t *TypeGroupStmt) Pos() token.Position {
+	return t.Type.Pos()
+}
+
+func (t *TypeGroupStmt) stmtNode() {}
+func (t *TypeGroupStmt) typeNode() {}
+
+/*******************TypeStmt End********************/
+
+/*******************TypeExpr Begin********************/
+
+// TypeExpr is the type expression.
+type TypeExpr struct {
+	// Name is the type name.
+	Name *TokenNode
+	// Assign is the assign operator.
+	Assign *TokenNode
+	// DataType is the data type.
+	DataType DataType
+}
+
+func (e *TypeExpr) HasHeadCommentGroup() bool {
+	return e.Name.HasHeadCommentGroup()
+}
+
+func (e *TypeExpr) HasLeadingCommentGroup() bool {
+	return e.DataType.HasLeadingCommentGroup()
+}
+
+func (e *TypeExpr) CommentGroup() (head, leading CommentGroup) {
+	_, leading = e.DataType.CommentGroup()
+	return e.Name.HeadCommentGroup, leading
+}
+
+func (e *TypeExpr) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	nameNode := transferTokenNode(e.Name, withTokenNodePrefix(prefix...))
+	dataTypeNode := transfer2TokenNode(e.DataType, false, withTokenNodePrefix(prefix...))
+	if e.Assign != nil {
+		w.Write(withNode(nameNode, e.Assign, dataTypeNode), expectSameLine())
+	} else {
+		w.Write(withNode(nameNode, dataTypeNode), expectSameLine())
+	}
+	return w.String()
+}
+
+func (e *TypeExpr) End() token.Position {
+	return e.DataType.End()
+}
+
+func (e *TypeExpr) Pos() token.Position {
+	return e.Name.Pos()
+}
+
+func (e *TypeExpr) exprNode() {}
+
+func (e *TypeExpr) isStruct() bool {
+	return e.DataType.ContainsStruct()
+}
+
+/*******************TypeExpr Begin********************/
+
+/*******************Elem Begin********************/
+
+// ElemExpr is the element expression.
+type ElemExpr struct {
+	// Name is the field element name.
+	Name []*TokenNode
+	// DataType is the field data type.
+	DataType DataType
+	// Tag is the field tag.
+	Tag *TokenNode
+}
+
+// IsAnonymous returns true if the element is anonymous.
+func (e *ElemExpr) IsAnonymous() bool {
+	return len(e.Name) == 0
+}
+
+func (e *ElemExpr) HasHeadCommentGroup() bool {
+	if e.IsAnonymous() {
+		return e.DataType.HasHeadCommentGroup()
+	}
+	return e.Name[0].HasHeadCommentGroup()
+}
+
+func (e *ElemExpr) HasLeadingCommentGroup() bool {
+	if e.Tag != nil {
+		return e.Tag.HasLeadingCommentGroup()
+	}
+	return e.DataType.HasLeadingCommentGroup()
+}
+
+func (e *ElemExpr) CommentGroup() (head, leading CommentGroup) {
+	if e.Tag != nil {
+		leading = e.Tag.LeadingCommentGroup
+	} else {
+		_, leading = e.DataType.CommentGroup()
+	}
+	if e.IsAnonymous() {
+		head, _ := e.DataType.CommentGroup()
+		return head, leading
+	}
+	return e.Name[0].HeadCommentGroup, leading
+}
+
+func (e *ElemExpr) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	var nameNodeList []*TokenNode
+	for idx, n := range e.Name {
+		if idx == 0 {
+			nameNodeList = append(nameNodeList,
+				transferTokenNode(n, ignoreLeadingComment()))
+		} else if idx < len(e.Name)-1 {
+			nameNodeList = append(nameNodeList,
+				transferTokenNode(n, ignoreLeadingComment(), ignoreHeadComment()))
+		} else {
+			nameNodeList = append(nameNodeList, transferTokenNode(n, ignoreHeadComment()))
+		}
+	}
+
+	var dataTypeOption []tokenNodeOption
+	if e.DataType.ContainsStruct() {
+		dataTypeOption = append(dataTypeOption, withTokenNodePrefix(peekOne(prefix)+Indent))
+	} else {
+		dataTypeOption = append(dataTypeOption, withTokenNodePrefix(prefix...))
+	}
+	dataTypeNode := transfer2TokenNode(e.DataType, false, dataTypeOption...)
+	if len(nameNodeList) > 0 {
+		nameNode := transferNilInfixNode(nameNodeList,
+			withTokenNodePrefix(prefix...), withTokenNodeInfix(", "))
+		if e.Tag != nil {
+			w.Write(withNode(nameNode, dataTypeNode, e.Tag), expectIndentInfix(), expectSameLine())
+		} else {
+			w.Write(withNode(nameNode, dataTypeNode), expectIndentInfix(), expectSameLine())
+		}
+	} else {
+		if e.Tag != nil {
+			w.Write(withNode(dataTypeNode, e.Tag), expectIndentInfix(), expectSameLine())
+		} else {
+			w.Write(withNode(dataTypeNode), expectIndentInfix(), expectSameLine())
+		}
+	}
+	return w.String()
+}
+
+func (e *ElemExpr) End() token.Position {
+	if e.Tag != nil {
+		return e.Tag.End()
+	}
+	return e.DataType.End()
+}
+
+func (e *ElemExpr) Pos() token.Position {
+	if len(e.Name) > 0 {
+		return e.Name[0].Pos()
+	}
+	return token.IllegalPosition
+}
+
+func (e *ElemExpr) exprNode() {}
+
+/*******************Elem End********************/
+
+/*******************ElemExprList Begin********************/
+
+// ElemExprList is the element expression list.
+type ElemExprList []*ElemExpr
+
+/*******************ElemExprList Begin********************/
+
+/*******************DataType Begin********************/
+
+// DataType represents the data type.
+type DataType interface {
+	Expr
+	dataTypeNode()
+	// CanEqual returns true if the data type can be equal.
+	CanEqual() bool
+	// ContainsStruct returns true if the data type contains struct.
+	ContainsStruct() bool
+	// RawText returns the raw text of the data type.
+	RawText() string
+}
+
+// AnyDataType is the any data type.
+type AnyDataType struct {
+	// Any is the any token node.
+	Any     *TokenNode
+	isChild bool
+}
+
+func (t *AnyDataType) HasHeadCommentGroup() bool {
+	return t.Any.HasHeadCommentGroup()
+}
+
+func (t *AnyDataType) HasLeadingCommentGroup() bool {
+	return t.Any.HasLeadingCommentGroup()
+}
+
+func (t *AnyDataType) CommentGroup() (head, leading CommentGroup) {
+	return t.Any.HeadCommentGroup, t.Any.LeadingCommentGroup
+}
+
+func (t *AnyDataType) Format(prefix ...string) string {
+	return t.Any.Format(prefix...)
+}
+
+func (t *AnyDataType) End() token.Position {
+	return t.Any.End()
+}
+
+func (t *AnyDataType) RawText() string {
+	return t.Any.Token.Text
+}
+
+func (t *AnyDataType) ContainsStruct() bool {
+	return false
+}
+
+func (t *AnyDataType) Pos() token.Position {
+	return t.Any.Pos()
+}
+
+func (t *AnyDataType) exprNode() {}
+
+func (t *AnyDataType) dataTypeNode() {}
+
+func (t *AnyDataType) CanEqual() bool {
+	return true
+}
+
+// ArrayDataType is the array data type.
+type ArrayDataType struct {
+	// LB is the left bracket token node.
+	LBrack *TokenNode
+	// Len is the array length.
+	Length *TokenNode
+	// RB is the right bracket token node.
+	RBrack *TokenNode
+	// DataType is the array data type.
+	DataType DataType
+	isChild  bool
+}
+
+func (t *ArrayDataType) HasHeadCommentGroup() bool {
+	return t.LBrack.HasHeadCommentGroup()
+}
+
+func (t *ArrayDataType) HasLeadingCommentGroup() bool {
+	return t.DataType.HasLeadingCommentGroup()
+}
+
+func (t *ArrayDataType) CommentGroup() (head, leading CommentGroup) {
+	_, leading = t.DataType.CommentGroup()
+	return t.LBrack.HeadCommentGroup, leading
+}
+
+func (t *ArrayDataType) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
+	lengthNode := transferTokenNode(t.Length, ignoreLeadingComment())
+	rbrack := transferTokenNode(t.RBrack, ignoreHeadComment())
+	var dataType *TokenNode
+	var options []tokenNodeOption
+	options = append(options, withTokenNodePrefix(prefix...))
+	if t.isChild {
+		options = append(options, ignoreComment())
+	} else {
+		options = append(options, ignoreHeadComment())
+	}
+
+	dataType = transfer2TokenNode(t.DataType, false, options...)
+	node := transferNilInfixNode([]*TokenNode{lbrack, lengthNode, rbrack, dataType})
+	w.Write(withNode(node))
+	return w.String()
+}
+
+func (t *ArrayDataType) End() token.Position {
+	return t.DataType.End()
+}
+
+func (t *ArrayDataType) RawText() string {
+	return t.Format("")
+}
+
+func (t *ArrayDataType) ContainsStruct() bool {
+	return t.DataType.ContainsStruct()
+}
+
+func (t *ArrayDataType) CanEqual() bool {
+	return t.DataType.CanEqual()
+}
+
+func (t *ArrayDataType) Pos() token.Position {
+	return t.LBrack.Pos()
+}
+
+func (t *ArrayDataType) exprNode()     {}
+func (t *ArrayDataType) dataTypeNode() {}
+
+// BaseDataType is a common id type which contains bool, uint8, uint16, uint32,
+// uint64, int8, int16, int32, int64, float32, float64, complex64, complex128,
+// string, int, uint, uintptr, byte, rune, any.
+type BaseDataType struct {
+	// Base is the base token node.
+	Base    *TokenNode
+	isChild bool
+}
+
+func (t *BaseDataType) HasHeadCommentGroup() bool {
+	return t.Base.HasHeadCommentGroup()
+}
+
+func (t *BaseDataType) HasLeadingCommentGroup() bool {
+	return t.Base.HasLeadingCommentGroup()
+}
+
+func (t *BaseDataType) CommentGroup() (head, leading CommentGroup) {
+	return t.Base.HeadCommentGroup, t.Base.LeadingCommentGroup
+}
+
+func (t *BaseDataType) Format(prefix ...string) string {
+	return t.Base.Format(prefix...)
+}
+
+func (t *BaseDataType) End() token.Position {
+	return t.Base.End()
+}
+
+func (t *BaseDataType) RawText() string {
+	return t.Base.Token.Text
+}
+
+func (t *BaseDataType) ContainsStruct() bool {
+	return false
+}
+
+func (t *BaseDataType) CanEqual() bool {
+	return true
+}
+
+func (t *BaseDataType) Pos() token.Position {
+	return t.Base.Pos()
+}
+
+func (t *BaseDataType) exprNode()     {}
+func (t *BaseDataType) dataTypeNode() {}
+
+// InterfaceDataType is the interface data type.
+type InterfaceDataType struct {
+	// Interface is the interface token node.
+	Interface *TokenNode
+	isChild   bool
+}
+
+func (t *InterfaceDataType) HasHeadCommentGroup() bool {
+	return t.Interface.HasHeadCommentGroup()
+}
+
+func (t *InterfaceDataType) HasLeadingCommentGroup() bool {
+	return t.Interface.HasLeadingCommentGroup()
+}
+
+func (t *InterfaceDataType) CommentGroup() (head, leading CommentGroup) {
+	return t.Interface.HeadCommentGroup, t.Interface.LeadingCommentGroup
+}
+
+func (t *InterfaceDataType) Format(prefix ...string) string {
+	return t.Interface.Format(prefix...)
+}
+
+func (t *InterfaceDataType) End() token.Position {
+	return t.Interface.End()
+}
+
+func (t *InterfaceDataType) RawText() string {
+	return t.Interface.Token.Text
+}
+
+func (t *InterfaceDataType) ContainsStruct() bool {
+	return false
+}
+
+func (t *InterfaceDataType) CanEqual() bool {
+	return true
+}
+
+func (t *InterfaceDataType) Pos() token.Position {
+	return t.Interface.Pos()
+}
+
+func (t *InterfaceDataType) exprNode() {}
+
+func (t *InterfaceDataType) dataTypeNode() {}
+
+// MapDataType is the map data type.
+type MapDataType struct {
+	// Map is the map token node.
+	Map *TokenNode
+	// Lbrack is the left bracket token node.
+	LBrack *TokenNode
+	// Key is the map key data type.
+	Key DataType
+	// Rbrack is the right bracket token node.
+	RBrack *TokenNode
+	// Value is the map value data type.
+	Value   DataType
+	isChild bool
+}
+
+func (t *MapDataType) HasHeadCommentGroup() bool {
+	return t.Map.HasHeadCommentGroup()
+}
+
+func (t *MapDataType) HasLeadingCommentGroup() bool {
+	return t.Value.HasLeadingCommentGroup()
+}
+
+func (t *MapDataType) CommentGroup() (head, leading CommentGroup) {
+	_, leading = t.Value.CommentGroup()
+	return t.Map.HeadCommentGroup, leading
+}
+
+func (t *MapDataType) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	mapNode := transferTokenNode(t.Map, ignoreLeadingComment())
+	lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
+	rbrack := transferTokenNode(t.RBrack, ignoreComment())
+	var keyOption, valueOption []tokenNodeOption
+	keyOption = append(keyOption, ignoreComment())
+	valueOption = append(valueOption, withTokenNodePrefix(prefix...))
+
+	if t.isChild {
+		valueOption = append(valueOption, ignoreComment())
+	} else {
+		valueOption = append(valueOption, ignoreHeadComment())
+	}
+
+	keyDataType := transfer2TokenNode(t.Key, true, keyOption...)
+	valueDataType := transfer2TokenNode(t.Value, false, valueOption...)
+	node := transferNilInfixNode([]*TokenNode{mapNode, lbrack, keyDataType, rbrack, valueDataType})
+	w.Write(withNode(node))
+	return w.String()
+}
+
+func (t *MapDataType) End() token.Position {
+	return t.Value.End()
+}
+
+func (t *MapDataType) RawText() string {
+	return t.Format("")
+}
+
+func (t *MapDataType) ContainsStruct() bool {
+	return t.Key.ContainsStruct() || t.Value.ContainsStruct()
+}
+
+func (t *MapDataType) CanEqual() bool {
+	return false
+}
+
+func (t *MapDataType) Pos() token.Position {
+	return t.Map.Pos()
+}
+
+func (t *MapDataType) exprNode()     {}
+func (t *MapDataType) dataTypeNode() {}
+
+// PointerDataType is the pointer data type.
+type PointerDataType struct {
+	// Star is the star token node.
+	Star *TokenNode
+	// DataType is the pointer data type.
+	DataType DataType
+	isChild  bool
+}
+
+func (t *PointerDataType) HasHeadCommentGroup() bool {
+	return t.Star.HasHeadCommentGroup()
+}
+
+func (t *PointerDataType) HasLeadingCommentGroup() bool {
+	return t.DataType.HasLeadingCommentGroup()
+}
+
+func (t *PointerDataType) CommentGroup() (head, leading CommentGroup) {
+	_, leading = t.DataType.CommentGroup()
+	return t.Star.HeadCommentGroup, leading
+}
+
+func (t *PointerDataType) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	star := transferTokenNode(t.Star, ignoreLeadingComment(), withTokenNodePrefix(prefix...))
+	var dataTypeOption []tokenNodeOption
+	dataTypeOption = append(dataTypeOption, ignoreHeadComment())
+	dataType := transfer2TokenNode(t.DataType, false, dataTypeOption...)
+	node := transferNilInfixNode([]*TokenNode{star, dataType})
+	w.Write(withNode(node))
+	return w.String()
+}
+
+func (t *PointerDataType) End() token.Position {
+	return t.DataType.End()
+}
+
+func (t *PointerDataType) RawText() string {
+	return t.Format("")
+}
+
+func (t *PointerDataType) ContainsStruct() bool {
+	return t.DataType.ContainsStruct()
+}
+
+func (t *PointerDataType) CanEqual() bool {
+	return t.DataType.CanEqual()
+}
+
+func (t *PointerDataType) Pos() token.Position {
+	return t.Star.Pos()
+}
+
+func (t *PointerDataType) exprNode()     {}
+func (t *PointerDataType) dataTypeNode() {}
+
+// SliceDataType is the slice data type.
+type SliceDataType struct {
+	// Lbrack is the left bracket token node.
+	LBrack *TokenNode
+	// Rbrack is the right bracket token node.
+	RBrack *TokenNode
+	// DataType is the slice data type.
+	DataType DataType
+	isChild  bool
+}
+
+func (t *SliceDataType) HasHeadCommentGroup() bool {
+	return t.LBrack.HasHeadCommentGroup()
+}
+
+func (t *SliceDataType) HasLeadingCommentGroup() bool {
+	return t.DataType.HasLeadingCommentGroup()
+}
+
+func (t *SliceDataType) CommentGroup() (head, leading CommentGroup) {
+	_, leading = t.DataType.CommentGroup()
+	return t.LBrack.HeadCommentGroup, leading
+}
+
+func (t *SliceDataType) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	lbrack := transferTokenNode(t.LBrack, ignoreLeadingComment())
+	rbrack := transferTokenNode(t.RBrack, ignoreHeadComment())
+	dataType := transfer2TokenNode(t.DataType, false, withTokenNodePrefix(prefix...), ignoreHeadComment())
+	node := transferNilInfixNode([]*TokenNode{lbrack, rbrack, dataType})
+	w.Write(withNode(node))
+	return w.String()
+}
+
+func (t *SliceDataType) End() token.Position {
+	return t.DataType.End()
+}
+
+func (t *SliceDataType) RawText() string {
+	return t.Format("")
+}
+
+func (t *SliceDataType) ContainsStruct() bool {
+	return t.DataType.ContainsStruct()
+}
+
+func (t *SliceDataType) CanEqual() bool {
+	return false
+}
+
+func (t *SliceDataType) Pos() token.Position {
+	return t.LBrack.Pos()
+}
+
+func (t *SliceDataType) exprNode()     {}
+func (t *SliceDataType) dataTypeNode() {}
+
+// StructDataType is the structure data type.
+type StructDataType struct {
+	// Lbrace is the left brace token node.
+	LBrace *TokenNode
+	// Elements is the structure elements.
+	Elements ElemExprList
+	// Rbrace is the right brace token node.
+	RBrace  *TokenNode
+	isChild bool
+}
+
+func (t *StructDataType) HasHeadCommentGroup() bool {
+	return t.LBrace.HasHeadCommentGroup()
+}
+
+func (t *StructDataType) HasLeadingCommentGroup() bool {
+	return t.RBrace.HasLeadingCommentGroup()
+}
+
+func (t *StructDataType) CommentGroup() (head, leading CommentGroup) {
+	return t.LBrace.HeadCommentGroup, t.RBrace.LeadingCommentGroup
+}
+
+func (t *StructDataType) Format(prefix ...string) string {
+	w := NewBufferWriter()
+	if len(t.Elements) == 0 {
+		lbrace := transferTokenNode(t.LBrace, withTokenNodePrefix(prefix...), ignoreLeadingComment())
+		rbrace := transferTokenNode(t.RBrace, ignoreHeadComment())
+		brace := transferNilInfixNode([]*TokenNode{lbrace, rbrace})
+		w.Write(withNode(brace), expectSameLine())
+		return w.String()
+	}
+	w.WriteText(t.LBrace.Format(NilIndent))
+	w.NewLine()
+	for _, e := range t.Elements {
+		var nameNodeList []*TokenNode
+		if len(e.Name) > 0 {
+			for idx, n := range e.Name {
+				if idx == 0 {
+					nameNodeList = append(nameNodeList,
+						transferTokenNode(n, withTokenNodePrefix(peekOne(prefix)+Indent), ignoreLeadingComment()))
+				} else if idx < len(e.Name)-1 {
+					nameNodeList = append(nameNodeList,
+						transferTokenNode(n, ignoreLeadingComment(), ignoreHeadComment()))
+				} else {
+					nameNodeList = append(nameNodeList, transferTokenNode(n, ignoreHeadComment()))
+				}
+			}
+		}
+		var dataTypeOption []tokenNodeOption
+		if e.DataType.ContainsStruct() || e.IsAnonymous() {
+			dataTypeOption = append(dataTypeOption, withTokenNodePrefix(peekOne(prefix)+Indent))
+		} else {
+			dataTypeOption = append(dataTypeOption, withTokenNodePrefix(prefix...))
+		}
+		dataTypeNode := transfer2TokenNode(e.DataType, false, dataTypeOption...)
+		if len(nameNodeList) > 0 {
+			nameNode := transferNilInfixNode(nameNodeList, withTokenNodeInfix(", "))
+			if e.Tag != nil {
+				if e.DataType.ContainsStruct() {
+					w.Write(withNode(nameNode, dataTypeNode, e.Tag), expectSameLine())
+				} else {
+					w.Write(withNode(nameNode, e.DataType, e.Tag), expectIndentInfix(), expectSameLine())
+				}
+			} else {
+				if e.DataType.ContainsStruct() {
+					w.Write(withNode(nameNode, dataTypeNode), expectSameLine())
+				} else {
+					w.Write(withNode(nameNode, e.DataType), expectIndentInfix(), expectSameLine())
+				}
+			}
+		} else {
+			if e.Tag != nil {
+				if e.DataType.ContainsStruct() {
+					w.Write(withNode(dataTypeNode, e.Tag), expectSameLine())
+				} else {
+					w.Write(withNode(e.DataType, e.Tag), expectIndentInfix(), expectSameLine())
+				}
+			} else {
+				if e.DataType.ContainsStruct() {
+					w.Write(withNode(dataTypeNode), expectSameLine())
+				} else {
+					w.Write(withNode(dataTypeNode), expectIndentInfix(), expectSameLine())
+				}
+			}
+		}
+		w.NewLine()
+	}
+	w.WriteText(t.RBrace.Format(prefix...))
+	return w.String()
+}
+
+func (t *StructDataType) End() token.Position {
+	return t.RBrace.End()
+}
+
+func (t *StructDataType) RawText() string {
+	return t.Format("")
+}
+
+func (t *StructDataType) ContainsStruct() bool {
+	return true
+}
+
+func (t *StructDataType) CanEqual() bool {
+	for _, v := range t.Elements {
+		if !v.DataType.CanEqual() {
+			return false
+		}
+	}
+	return true
+}
+
+func (t *StructDataType) Pos() token.Position {
+	return t.LBrace.Pos()
+}
+
+func (t *StructDataType) exprNode()     {}
+func (t *StructDataType) dataTypeNode() {}
+
+/*******************DataType End********************/

+ 403 - 0
tools/goctl/pkg/parser/api/ast/writer.go

@@ -0,0 +1,403 @@
+package ast
+
+import (
+	"bytes"
+	"fmt"
+	"io"
+	"strings"
+	"text/tabwriter"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+	"github.com/zeromicro/go-zero/tools/goctl/util"
+)
+
+const (
+	NilIndent  = ""
+	WhiteSpace = " "
+	Indent     = "\t"
+	NewLine    = "\n"
+)
+
+const (
+	_ WriteMode = 1 << iota
+	// ModeAuto is the default mode, which will automatically
+	//determine whether to write a newline.
+	ModeAuto
+
+	// ModeExpectInSameLine will write in the same line.
+	ModeExpectInSameLine
+)
+
+type Option func(o *option)
+
+type option struct {
+	prefix  string
+	infix   string
+	mode    WriteMode
+	nodes   []Node
+	rawText bool
+}
+
+type tokenNodeOption func(o *tokenNodeOpt)
+type tokenNodeOpt struct {
+	prefix               string
+	infix                string
+	ignoreHeadComment    bool
+	ignoreLeadingComment bool
+}
+
+// WriteMode is the mode of writing.
+type WriteMode int
+
+// Writer is the writer of ast.
+type Writer struct {
+	tw     *tabwriter.Writer
+	writer io.Writer
+}
+
+func transfer2TokenNode(node Node, isChild bool, opt ...tokenNodeOption) *TokenNode {
+	option := new(tokenNodeOpt)
+	for _, o := range opt {
+		o(option)
+	}
+
+	var copyOpt = append([]tokenNodeOption(nil), opt...)
+	var tn *TokenNode
+	switch val := node.(type) {
+	case *AnyDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.Any, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.Any = tn
+	case *ArrayDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.LBrack, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.LBrack = tn
+	case *BaseDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.Base, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.Base = tn
+	case *InterfaceDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.Interface, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.Interface = tn
+	case *MapDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.Map, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.Map = tn
+	case *PointerDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.Star, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.Star = tn
+	case *SliceDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.LBrack, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.LBrack = tn
+	case *StructDataType:
+		copyOpt = append(copyOpt, withTokenNodePrefix(NilIndent))
+		tn = transferTokenNode(val.LBrace, copyOpt...)
+		if option.ignoreHeadComment {
+			tn.HeadCommentGroup = nil
+		}
+		if option.ignoreLeadingComment {
+			tn.LeadingCommentGroup = nil
+		}
+		val.isChild = isChild
+		val.LBrace = tn
+	default:
+	}
+
+	return &TokenNode{
+		headFlag:    node.HasHeadCommentGroup(),
+		leadingFlag: node.HasLeadingCommentGroup(),
+		Token: token.Token{
+			Text:     node.Format(option.prefix),
+			Position: node.Pos(),
+		},
+		LeadingCommentGroup: CommentGroup{
+			{
+				token.Token{Position: node.End()},
+			},
+		},
+	}
+}
+
+func transferNilInfixNode(nodes []*TokenNode, opt ...tokenNodeOption) *TokenNode {
+	result := &TokenNode{}
+	var option = new(tokenNodeOpt)
+	for _, o := range opt {
+		o(option)
+	}
+
+	var list []string
+	for _, n := range nodes {
+		list = append(list, n.Token.Text)
+	}
+
+	result.Token = token.Token{
+		Text:     option.prefix + strings.Join(list, option.infix),
+		Position: nodes[0].Pos(),
+	}
+
+	if !option.ignoreHeadComment {
+		result.HeadCommentGroup = nodes[0].HeadCommentGroup
+	}
+	if !option.ignoreLeadingComment {
+		result.LeadingCommentGroup = nodes[len(nodes)-1].LeadingCommentGroup
+	}
+
+	return result
+}
+
+func transferTokenNode(node *TokenNode, opt ...tokenNodeOption) *TokenNode {
+	result := &TokenNode{}
+	var option = new(tokenNodeOpt)
+	for _, o := range opt {
+		o(option)
+	}
+	result.Token = token.Token{
+		Type:     node.Token.Type,
+		Text:     option.prefix + node.Token.Text,
+		Position: node.Token.Position,
+	}
+	if !option.ignoreHeadComment {
+		for _, v := range node.HeadCommentGroup {
+			result.HeadCommentGroup = append(result.HeadCommentGroup,
+				&CommentStmt{Comment: token.Token{
+					Type:     v.Comment.Type,
+					Text:     option.prefix + v.Comment.Text,
+					Position: v.Comment.Position,
+				}})
+		}
+	}
+	if !option.ignoreLeadingComment {
+		for _, v := range node.LeadingCommentGroup {
+			result.LeadingCommentGroup = append(result.LeadingCommentGroup, v)
+		}
+	}
+	return result
+}
+
+func ignoreHeadComment() tokenNodeOption {
+	return func(o *tokenNodeOpt) {
+		o.ignoreHeadComment = true
+	}
+}
+
+func ignoreLeadingComment() tokenNodeOption {
+	return func(o *tokenNodeOpt) {
+		o.ignoreLeadingComment = true
+	}
+}
+
+func ignoreComment() tokenNodeOption {
+	return func(o *tokenNodeOpt) {
+		o.ignoreHeadComment = true
+		o.ignoreLeadingComment = true
+	}
+}
+
+func withTokenNodePrefix(prefix ...string) tokenNodeOption {
+	return func(o *tokenNodeOpt) {
+		for _, p := range prefix {
+			o.prefix = p
+		}
+	}
+
+}
+func withTokenNodeInfix(infix string) tokenNodeOption {
+	return func(o *tokenNodeOpt) {
+		o.infix = infix
+	}
+}
+
+func expectSameLine() Option {
+	return func(o *option) {
+		o.mode = ModeExpectInSameLine
+	}
+}
+
+func expectIndentInfix() Option {
+	return func(o *option) {
+		o.infix = Indent
+	}
+}
+
+func withNode(nodes ...Node) Option {
+	return func(o *option) {
+		o.nodes = nodes
+	}
+}
+
+func withMode(mode WriteMode) Option {
+	return func(o *option) {
+		o.mode = mode
+	}
+}
+
+func withPrefix(prefix ...string) Option {
+	return func(o *option) {
+		for _, p := range prefix {
+			o.prefix = p
+		}
+	}
+}
+
+func withInfix(infix string) Option {
+	return func(o *option) {
+		o.infix = infix
+	}
+}
+
+func withRawText() Option {
+	return func(o *option) {
+		o.rawText = true
+	}
+}
+
+// NewWriter returns a new Writer.
+func NewWriter(writer io.Writer) *Writer {
+	return &Writer{
+		tw:     tabwriter.NewWriter(writer, 1, 8, 1, ' ', tabwriter.TabIndent),
+		writer: writer,
+	}
+}
+
+// NewBufferWriter returns a new buffer Writer.
+func NewBufferWriter() *Writer {
+	writer := bytes.NewBuffer(nil)
+	return &Writer{
+		tw:     tabwriter.NewWriter(writer, 1, 8, 1, ' ', tabwriter.TabIndent),
+		writer: writer,
+	}
+}
+
+// String returns the string of the buffer.
+func (w *Writer) String() string {
+	buffer, ok := w.writer.(*bytes.Buffer)
+	if !ok {
+		return ""
+	}
+	w.Flush()
+	return buffer.String()
+}
+
+// Flush flushes the buffer.
+func (w *Writer) Flush() {
+	_ = w.tw.Flush()
+}
+
+// NewLine writes a new line.
+func (w *Writer) NewLine() {
+	_, _ = fmt.Fprint(w.tw, NewLine)
+}
+
+// Write writes the node.
+func (w *Writer) Write(opts ...Option) {
+	if len(opts) == 0 {
+		return
+	}
+
+	var opt = new(option)
+	opt.mode = ModeAuto
+	opt.prefix = NilIndent
+	opt.infix = WhiteSpace
+	for _, v := range opts {
+		v(opt)
+	}
+
+	w.write(opt)
+}
+
+// WriteText writes the text.
+func (w *Writer) WriteText(text string) {
+	_, _ = fmt.Fprintf(w.tw, text)
+}
+
+func (w *Writer) write(opt *option) {
+	if len(opt.nodes) == 0 {
+		return
+	}
+
+	var textList []string
+	line := opt.nodes[0].End().Line
+	for idx, node := range opt.nodes {
+		mode := opt.mode
+		preIdx := idx - 1
+		var preNodeHasLeading bool
+		if preIdx > -1 && preIdx < len(opt.nodes) {
+			preNode := opt.nodes[preIdx]
+			preNodeHasLeading = preNode.HasLeadingCommentGroup()
+		}
+		if node.HasHeadCommentGroup() || preNodeHasLeading {
+			mode = ModeAuto
+		}
+
+		if mode == ModeAuto && node.Pos().Line > line {
+			textList = append(textList, NewLine)
+		}
+		line = node.End().Line
+		if util.TrimWhiteSpace(node.Format()) == "" {
+			continue
+		}
+
+		textList = append(textList, node.Format(opt.prefix))
+	}
+
+	text := strings.Join(textList, opt.infix)
+	text = strings.ReplaceAll(text, " \n", "\n")
+	text = strings.ReplaceAll(text, "\n ", "\n")
+	if opt.rawText {
+		_, _ = fmt.Fprint(w.writer, text)
+		return
+	}
+	_, _ = fmt.Fprint(w.tw, text)
+}

+ 45 - 0
tools/goctl/pkg/parser/api/format/format.go

@@ -0,0 +1,45 @@
+package format
+
+import (
+	"bytes"
+	"io"
+	"io/ioutil"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/parser"
+)
+
+// File formats the api file.
+func File(filename string) error {
+	data, err := ioutil.ReadFile(filename)
+	if err != nil {
+		return err
+	}
+	buffer := bytes.NewBuffer(nil)
+	if err := Source(data, buffer); err != nil {
+		return err
+	}
+	return ioutil.WriteFile(filename, buffer.Bytes(), 0666)
+}
+
+// Source formats the api source.
+func Source(source []byte, w io.Writer) error {
+	p := parser.New("", source)
+	result := p.Parse()
+	if err := p.CheckErrors(); err != nil {
+		return err
+	}
+
+	result.Format(w)
+	return nil
+}
+
+func formatForUnitTest(source []byte, w io.Writer) error {
+	p := parser.New("", source)
+	result := p.Parse()
+	if err := p.CheckErrors(); err != nil {
+		return err
+	}
+
+	result.FormatForUnitTest(w)
+	return nil
+}

+ 1510 - 0
tools/goctl/pkg/parser/api/format/format_test.go

@@ -0,0 +1,1510 @@
+package format
+
+import (
+	"bytes"
+	_ "embed"
+	"os"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/assertx"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/parser"
+)
+
+type formatData struct {
+	input     string
+	expected  string
+	converter formatResultConvert
+}
+
+type formatResultConvert func(s string) string
+
+// EXPERIMENTAL: just for view format code.
+func TestFormat(t *testing.T) {
+	assert.NoError(t, File("testdata/test_format.api"))
+}
+
+//go:embed testdata/test_type_struct_lit.api
+var testStructLitData string
+
+//go:embed testdata/expected_type_struct_lit.api
+var expectedStructLitData string
+
+func TestFormat_ImportLiteralStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    `import ""`,
+			expected: ``,
+		},
+		{
+			input:    `import"aa"`,
+			expected: `import "aa"`,
+		},
+		{
+			input: `/*aa*/import "aa"`,
+			expected: `/*aa*/
+import "aa"`,
+		},
+		{
+			input: `/*aa*/import /*bb*/"aa"`,
+			expected: `/*aa*/
+import "aa"`,
+		},
+		{
+			input: `/*aa*/import /*bb*/"aa"// cc`,
+			expected: `/*aa*/
+import "aa" // cc`,
+		},
+	})
+}
+
+func TestFormat_ImportGroupStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    `import()`,
+			expected: ``,
+		},
+		{
+			input: `import("aa")`,
+			expected: `import (
+	"aa"
+)`,
+		},
+		{
+			input: `import(
+"aa")`,
+			expected: `import (
+	"aa"
+)`,
+		},
+		{
+			input: `import(
+"aa"
+)`,
+			expected: `import (
+	"aa"
+)`,
+		},
+		{
+			input: `import("aa""bb")`,
+			expected: `import (
+	"aa"
+	"bb"
+)`,
+		},
+		{
+			input: `/*aa*/import("aa""bb")`,
+			expected: `/*aa*/
+import (
+	"aa"
+	"bb"
+)`,
+		},
+		{
+			input: `/*aa*/import("aa""bb")// bb`,
+			expected: `/*aa*/
+import (
+	"aa"
+	"bb"
+) // bb`,
+		},
+		{
+			input: `/*aa*/import(// bb
+"aa""bb")// cc`,
+			expected: `/*aa*/
+import ( // bb
+	"aa"
+	"bb"
+) // cc`,
+		},
+		{
+			input: `import(// aa
+"aa" // bb
+"bb" // cc
+)// dd`,
+			expected: `import ( // aa
+	"aa" // bb
+	"bb" // cc
+) // dd`,
+		},
+		{
+			input: `import (// aa
+/*bb*/
+	"aa" // cc
+/*dd*/
+	"bb" // ee
+) // ff`,
+			expected: `import ( // aa
+	/*bb*/
+	"aa" // cc
+	/*dd*/
+	"bb" // ee
+) // ff`,
+		},
+	})
+}
+
+func TestFormat_InfoStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    `info()`,
+			expected: ``,
+		},
+		{
+			input: `info(foo:"foo")`,
+			expected: `info (
+	foo: "foo"
+)`,
+		},
+		{
+			input: `info(foo:"foo" bar:"bar")`,
+			expected: `info (
+	foo: "foo"
+	bar: "bar"
+)`,
+		},
+		{
+			input: `info(foo:"foo" bar:"bar" quux:"quux")`,
+			expected: `info (
+	foo:  "foo"
+	bar:  "bar"
+	quux: "quux"
+)`,
+		},
+		{
+			input: `info(foo:"foo"
+bar: "bar")`,
+			expected: `info (
+	foo: "foo"
+	bar: "bar"
+)`,
+		},
+		{
+			input: `info(foo:"foo"// aa
+bar: "bar"// bb
+)`,
+			expected: `info (
+	foo: "foo" // aa
+	bar: "bar" // bb
+)`,
+		},
+		{
+			input: `info(// aa
+foo:"foo"// bb
+bar: "bar"// cc
+)`,
+			expected: `info ( // aa
+	foo: "foo" // bb
+	bar: "bar" // cc
+)`,
+		},
+		{
+			input: `/*aa*/info(// bb
+foo:"foo"// cc
+bar: "bar"// dd
+)`,
+			expected: `/*aa*/
+info ( // bb
+	foo: "foo" // cc
+	bar: "bar" // dd
+)`,
+		},
+		{
+			input: `/*aa*/
+info(// bb
+foo:"foo"// cc
+bar: "bar"// dd
+)// ee`,
+			expected: `/*aa*/
+info ( // bb
+	foo: "foo" // cc
+	bar: "bar" // dd
+) // ee`,
+		},
+		{
+			input: `/*aa*/
+info ( // bb
+	/*cc*/foo: "foo" // dd
+	/*ee*/bar: "bar" // ff
+) // gg`,
+			expected: `/*aa*/
+info ( // bb
+	/*cc*/
+	foo: "foo" // dd
+	/*ee*/
+	bar: "bar" // ff
+) // gg`,
+		},
+		{
+			input: `/*aa*/
+info/*xx*/( // bb
+	/*cc*/foo:/*xx*/ "foo" // dd
+	/*ee*/bar:/*xx*/ "bar" // ff
+) // gg`,
+			expected: `/*aa*/
+info ( // bb
+	/*cc*/
+	foo: "foo" // dd
+	/*ee*/
+	bar: "bar" // ff
+) // gg`,
+		},
+	})
+}
+
+func TestFormat_SyntaxStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    `syntax="v1"`,
+			expected: `syntax = "v1"`,
+		},
+		{
+			input:    `syntax="v1"// aa`,
+			expected: `syntax = "v1" // aa`,
+		},
+		{
+			input: `syntax
+="v1"// aa`,
+			expected: `syntax = "v1" // aa`,
+		},
+		{
+			input: `syntax=
+"v1"// aa`,
+			expected: `syntax = "v1" // aa`,
+		},
+		{
+			input: `/*aa*/syntax="v1"// bb`,
+			expected: `/*aa*/
+syntax = "v1" // bb`,
+		},
+		{
+			input: `/*aa*/
+syntax="v1"// bb`,
+			expected: `/*aa*/
+syntax = "v1" // bb`,
+		},
+		{
+			input:    `syntax/*xx*/=/*xx*/"v1"// bb`,
+			expected: `syntax = "v1" // bb`,
+		},
+	})
+}
+
+func TestFormat_TypeLiteralStmt(t *testing.T) {
+	t.Run("any", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type Any any`,
+				expected: `type Any any`,
+			},
+			{
+				input: `type
+Any
+any
+`,
+				expected: `type Any any`,
+			},
+			{
+				input:    `type Any=any`,
+				expected: `type Any = any`,
+			},
+			{
+				input: `
+type
+Any
+=
+any
+`,
+				expected: `type Any = any`,
+			},
+			{
+				input: `type // aa
+Any  // bb
+any // cc
+`,
+				expected: `type // aa
+Any // bb
+any // cc`,
+			},
+			{
+				input: `
+type
+Any
+=
+any`,
+				expected: `type Any = any`,
+			},
+			{
+				input: `
+type
+Any
+=
+any
+`,
+				expected: `type Any = any`,
+			},
+			{
+				input:    `type Any any// aa`,
+				expected: `type Any any // aa`,
+			},
+			{
+				input:    `type Any=any// aa`,
+				expected: `type Any = any // aa`,
+			},
+			{
+				input:    `type Any any/*aa*/// bb`,
+				expected: `type Any any /*aa*/ // bb`,
+			},
+			{
+				input:    `type Any = any/*aa*/// bb`,
+				expected: `type Any = any /*aa*/ // bb`,
+			},
+			{
+				input:    `type Any/*aa*/ =/*bb*/ any/*cc*/// dd`,
+				expected: `type Any /*aa*/ = /*bb*/ any /*cc*/ // dd`,
+			},
+			{
+				input: `/*aa*/type Any any/*bb*/// cc`,
+				expected: `/*aa*/
+type Any any /*bb*/ // cc`,
+			},
+			{
+				input: `/*aa*/
+type
+/*bb*/
+Any
+/*cc*/
+any/*dd*/// ee`,
+				expected: `/*aa*/
+type
+/*bb*/
+Any
+/*cc*/
+any /*dd*/ // ee`,
+			},
+		})
+	})
+	t.Run("array", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type A [2]int`,
+				expected: `type A [2]int`,
+			},
+			{
+				input: `type
+A
+[2]int
+`,
+				expected: `type A [2]int`,
+			},
+			{
+				input:    `type A=[2]int`,
+				expected: `type A = [2]int`,
+			},
+			{
+				input: `type
+A
+=
+[2]int
+`,
+				expected: `type A = [2]int`,
+			},
+			{
+				input:    `type A [/*xx*/2/*xx*/]/*xx*/int// aa`,
+				expected: `type A [2]int // aa`,
+			},
+			{
+				input: `/*aa*/type/*bb*/A/*cc*/[/*xx*/2/*xx*/]/*xx*/int// dd`,
+				expected: `/*aa*/
+type /*bb*/ A /*cc*/ [2]int // dd`,
+			},
+			{
+				input: `/*aa*/type
+/*bb*/A
+/*cc*/[/*xx*/2/*xx*/]/*xx*/int// dd`,
+				expected: `/*aa*/
+type
+/*bb*/
+A
+/*cc*/
+[2]int // dd`,
+			},
+			{
+				input:    `type A [ 2 ] int`,
+				expected: `type A [2]int`,
+			},
+			{
+				input: `type A [
+2
+]
+int`,
+				expected: `type A [2]int`,
+			},
+			{
+				input: `type A [// aa
+2 // bb
+] // cc
+int`,
+				expected: `type A [2]int`,
+			},
+			{
+				input: `type A [// aa
+/*xx*/
+2 // bb
+/*xx*/
+] // cc
+/*xx*/
+int`,
+				expected: `type A [2]int`,
+			},
+			{
+				input:    `type A [...]int`,
+				expected: `type A [...]int`,
+			},
+			{
+				input:    `type A=[...]int`,
+				expected: `type A = [...]int`,
+			},
+			{
+				input:    `type A/*aa*/[/*xx*/.../*xx*/]/*xx*/int// bb`,
+				expected: `type A /*aa*/ [...]int // bb`,
+			},
+			{
+				input: `/*aa*/
+// bb
+type /*cc*/
+// dd
+A /*ee*/
+// ff
+[/*xx*/.../*xx*/]/*xx*/int// bb`,
+				expected: `/*aa*/
+// bb
+type /*cc*/
+// dd
+A /*ee*/
+// ff
+[...]int // bb`,
+			},
+			{
+				input:    `type A [2][2]int`,
+				expected: `type A [2][2]int`,
+			},
+			{
+				input:    `type A=[2][2]int`,
+				expected: `type A = [2][2]int`,
+			},
+			{
+				input:    `type A [2][]int`,
+				expected: `type A [2][]int`,
+			},
+			{
+				input:    `type A=[2][]int`,
+				expected: `type A = [2][]int`,
+			},
+		})
+	})
+	t.Run("base", func(t *testing.T) {
+		testRun(t, []formatData{
+			// base
+			{
+				input:    `type A int`,
+				expected: `type A int`,
+			},
+			{
+				input:    `type A =int`,
+				expected: `type A = int`,
+			},
+			{
+				input:    `type/*aa*/A/*bb*/ int// cc`,
+				expected: `type /*aa*/ A /*bb*/ int // cc`,
+			},
+			{
+				input:    `type/*aa*/A/*bb*/ =int// cc`,
+				expected: `type /*aa*/ A /*bb*/ = int // cc`,
+			},
+			{
+				input:    `type A int// aa`,
+				expected: `type A int // aa`,
+			},
+			{
+				input:    `type A=int// aa`,
+				expected: `type A = int // aa`,
+			},
+			{
+				input: `/*aa*/type A int`,
+				expected: `/*aa*/
+type A int`,
+			},
+			{
+				input: `/*aa*/type A = int`,
+				expected: `/*aa*/
+type A = int`,
+			},
+			{
+				input: `/*aa*/type/*bb*/ A/*cc*/ int// dd`,
+				expected: `/*aa*/
+type /*bb*/ A /*cc*/ int // dd`,
+			},
+			{
+				input: `/*aa*/type/*bb*/ A/*cc*/ = /*dd*/int// ee`,
+				expected: `/*aa*/
+type /*bb*/ A /*cc*/ = /*dd*/ int // ee`,
+			},
+			{
+				input: `/*aa*/
+type 
+/*bb*/
+A 
+/*cc*/
+int`,
+				expected: `/*aa*/
+type
+/*bb*/
+A
+/*cc*/
+int`,
+			},
+		})
+	})
+	t.Run("interface", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type any interface{}`,
+				expected: `type any interface{}`,
+			},
+			{
+				input:    `type any=interface{}`,
+				expected: `type any = interface{}`,
+			},
+			{
+				input: `type
+any
+interface{}
+`,
+				expected: `type any interface{}`,
+			},
+			{
+				input: `/*aa*/type /*bb*/any /*cc*/interface{} // dd`,
+				expected: `/*aa*/
+type /*bb*/ any /*cc*/ interface{} // dd`,
+			},
+			{
+				input: `/*aa*/type 
+/*bb*/any 
+/*cc*/interface{} // dd`,
+				expected: `/*aa*/
+type
+/*bb*/
+any
+/*cc*/
+interface{} // dd`,
+			},
+			{
+				input: `/*aa*/type 
+// bb
+any 
+// cc
+interface{} // dd`,
+				expected: `/*aa*/
+type
+// bb
+any
+// cc
+interface{} // dd`,
+			},
+		})
+	})
+	t.Run("map", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type M map[int]int`,
+				expected: `type M map[int]int`,
+			},
+			{
+				input:    `type M map [ int ] int`,
+				expected: `type M map[int]int`,
+			},
+			{
+				input:    `type M map [/*xx*/int/*xx*/]/*xx*/int // aa`,
+				expected: `type M map[int]int // aa`,
+			},
+			{
+				input: `/*aa*/type /*bb*/ M/*cc*/map[int]int // dd`,
+				expected: `/*aa*/
+type /*bb*/ M /*cc*/ map[int]int // dd`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+M // dd
+// ee
+map // ff
+[int]// gg
+// hh
+int // dd`,
+				expected: `/*aa*/
+type // bb
+// cc
+M // dd
+// ee
+map[int]int // dd`,
+			},
+			{
+				input:    `type M map[string][2]int // aa`,
+				expected: `type M map[string][2]int // aa`,
+			},
+			{
+				input:    `type M map[string]any`,
+				expected: `type M map[string]any`,
+			},
+			{
+				input:    `type M /*aa*/map/*xx*/[/*xx*/string/*xx*/]/*xx*/[/*xx*/2/*xx*/]/*xx*/int// bb`,
+				expected: `type M /*aa*/ map[string][2]int // bb`,
+			},
+			{
+				input: `type M /*aa*/
+// bb
+map/*xx*/
+//
+[/*xx*/
+//
+string/*xx*/
+//
+]/*xx*/
+//
+[/*xx*/
+//
+2/*xx*/
+//
+]/*xx*/
+//
+int// bb`,
+				expected: `type M /*aa*/
+// bb
+map[string][2]int // bb`,
+			},
+			{
+				input:    `type M map[int]map[string]int`,
+				expected: `type M map[int]map[string]int`,
+			},
+			{
+				input:    `type M map/*xx*/[/*xx*/int/*xx*/]/*xx*/map/*xx*/[/*xx*/string/*xx*/]/*xx*/int// aa`,
+				expected: `type M map[int]map[string]int // aa`,
+			},
+			{
+				input:    `type M map/*xx*/[/*xx*/map/*xx*/[/*xx*/string/*xx*/]/*xx*/int/*xx*/]/*xx*/string // aa`,
+				expected: `type M map[map[string]int]string // aa`,
+			},
+			{
+				input:    `type M map[[2]int]int`,
+				expected: `type M map[[2]int]int`,
+			},
+			{
+				input:    `type M map/*xx*/[/*xx*/[/*xx*/2/*xx*/]/*xx*/int/*xx*/]/*xx*/int// aa`,
+				expected: `type M map[[2]int]int // aa`,
+			},
+		})
+	})
+	t.Run("pointer", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type P *int`,
+				expected: `type P *int`,
+			},
+			{
+				input:    `type P=*int`,
+				expected: `type P = *int`,
+			},
+			{
+				input: `type 
+P 
+*int
+`,
+				expected: `type P *int`,
+			},
+			{
+				input: `/*aa*/type // bb
+/*cc*/
+P // dd
+/*ee*/
+*/*ff*/int // gg
+`,
+				expected: `/*aa*/
+type // bb
+/*cc*/
+P // dd
+/*ee*/
+*int // gg`,
+			},
+			{
+				input:    `type P *bool`,
+				expected: `type P *bool`,
+			},
+			{
+				input:    `type P *[2]int`,
+				expected: `type P *[2]int`,
+			},
+			{
+				input:    `type P=*[2]int`,
+				expected: `type P = *[2]int`,
+			},
+			{
+				input: `/*aa*/type /*bb*/P /*cc*/*/*xx*/[/*xx*/2/*xx*/]/*xx*/int // dd`,
+				expected: `/*aa*/
+type /*bb*/ P /*cc*/ *[2]int // dd`,
+			},
+			{
+				input:    `type P *[...]int`,
+				expected: `type P *[...]int`,
+			},
+			{
+				input:    `type P=*[...]int`,
+				expected: `type P = *[...]int`,
+			},
+			{
+				input: `/*aa*/type /*bb*/P /*cc*/*/*xx*/[/*xx*/.../*xx*/]/*xx*/int // dd`,
+				expected: `/*aa*/
+type /*bb*/ P /*cc*/ *[...]int // dd`,
+			},
+			{
+				input:    `type P *map[string]int`,
+				expected: `type P *map[string]int`,
+			},
+			{
+				input:    `type P=*map[string]int`,
+				expected: `type P = *map[string]int`,
+			},
+			{
+				input:    `type P /*aa*/*/*xx*/map/*xx*/[/*xx*/string/*xx*/]/*xx*/int// bb`,
+				expected: `type P /*aa*/ *map[string]int // bb`,
+			},
+			{
+				input:    `type P *interface{}`,
+				expected: `type P *interface{}`,
+			},
+			{
+				input:    `type P=*interface{}`,
+				expected: `type P = *interface{}`,
+			},
+			{
+				input:    `type P /*aa*/*/*xx*/interface{}// bb`,
+				expected: `type P /*aa*/ *interface{} // bb`,
+			},
+			{
+				input:    `type P *any`,
+				expected: `type P *any`,
+			},
+			{
+				input:    `type P=*any`,
+				expected: `type P = *any`,
+			},
+			{
+				input:    `type P *map[int][2]int`,
+				expected: `type P *map[int][2]int`,
+			},
+			{
+				input:    `type P=*map[int][2]int`,
+				expected: `type P = *map[int][2]int`,
+			},
+			{
+				input:    `type P /*aa*/*/*xx*/map/*xx*/[/*xx*/int/*xx*/]/*xx*/[/*xx*/2/*xx*/]/*xx*/int// bb`,
+				expected: `type P /*aa*/ *map[int][2]int // bb`,
+			},
+			{
+				input:    `type P *map[[2]int]int`,
+				expected: `type P *map[[2]int]int`,
+			},
+			{
+				input:    `type P=*map[[2]int]int`,
+				expected: `type P = *map[[2]int]int`,
+			},
+			{
+				input:    `type P /*aa*/*/*xx*/map/*xx*/[/*xx*/[/*xx*/2/*xx*/]/*xx*/int/*xx*/]/*xx*/int// bb`,
+				expected: `type P /*aa*/ *map[[2]int]int // bb`,
+			},
+		})
+
+	})
+
+	t.Run("slice", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type S []int`,
+				expected: `type S []int`,
+			},
+			{
+				input:    `type S=[]int`,
+				expected: `type S = []int`,
+			},
+			{
+				input:    `type S	[	]	int	`,
+				expected: `type S []int`,
+			},
+			{
+				input:    `type S	[ /*xx*/	]	/*xx*/ int	`,
+				expected: `type S []int`,
+			},
+			{
+				input:    `type S [][]int`,
+				expected: `type S [][]int`,
+			},
+			{
+				input:    `type S=[][]int`,
+				expected: `type S = [][]int`,
+			},
+			{
+				input:    `type S	[	]	[	]	int`,
+				expected: `type S [][]int`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/[/*xx*/]/*xx*/int`,
+				expected: `type S [][]int`,
+			},
+			{
+				input: `type S [//
+]//
+[//
+]//
+int`,
+				expected: `type S [][]int`,
+			},
+			{
+				input:    `type S []map[string]int`,
+				expected: `type S []map[string]int`,
+			},
+			{
+				input:    `type S=[]map[string]int`,
+				expected: `type S = []map[string]int`,
+			},
+			{
+				input: `type S [	]	
+map	[	string	]	
+int`,
+				expected: `type S []map[string]int`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/map/*xx*/[/*xx*/string/*xx*/]/*xx*/int`,
+				expected: `type S []map[string]int`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+S// dd
+// ff
+/*gg*/[ // hh
+/*xx*/] // ii
+/*xx*/map// jj
+/*xx*/[/*xx*/string/*xx*/]/*xx*/int// mm`,
+				expected: `/*aa*/
+type // bb
+// cc
+S // dd
+// ff
+/*gg*/
+[]map[string]int // mm`,
+			},
+			{
+				input:    `type S []map[[2]int]int`,
+				expected: `type S []map[[2]int]int`,
+			},
+			{
+				input:    `type S=[]map[[2]int]int`,
+				expected: `type S = []map[[2]int]int`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/map/*xx*/[/*xx*/[/*xx*/2/*xx*/]/*xx*/int/*xx*/]/*xx*/int`,
+				expected: `type S []map[[2]int]int`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+/*dd*/S// ee
+// ff
+/*gg*/[//
+/*xx*/]//
+/*xx*/map//
+/*xx*/[//
+/*xx*/[//
+/*xx*/2//
+/*xx*/]//
+/*xx*/int//
+/*xx*/]//
+/*xx*/int // hh`,
+				expected: `/*aa*/
+type // bb
+// cc
+/*dd*/
+S // ee
+// ff
+/*gg*/
+[]map[[2]int]int // hh`,
+			},
+			{
+				input:    `type S []map[[2]int]map[int]string`,
+				expected: `type S []map[[2]int]map[int]string`,
+			},
+			{
+				input:    `type S=[]map[[2]int]map[int]string`,
+				expected: `type S = []map[[2]int]map[int]string`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/map/*xx*/[/*xx*/[/*xx*/2/*xx*/]/*xx*/int/*xx*/]/*xx*/map/*xx*/[/*xx*/int/*xx*/]/*xx*/string`,
+				expected: `type S []map[[2]int]map[int]string`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+/*dd*/S// ee
+/*ff*/[//
+/*xx*/]//
+/*xx*/map
+/*xx*/[//
+/*xx*/[//
+/*xx*/2//
+/*xx*/]//
+/*xx*/int//
+/*xx*/]//
+/*xx*/map//
+/*xx*/[//
+/*xx*/int//
+/*xx*/]//
+/*xx*/string// gg`,
+				expected: `/*aa*/
+type // bb
+// cc
+/*dd*/
+S // ee
+/*ff*/
+[]map[[2]int]map[int]string // gg`,
+			},
+			{
+				input:    `type S []*P`,
+				expected: `type S []*P`,
+			},
+			{
+				input:    `type S=[]*P`,
+				expected: `type S = []*P`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/*/*xx*/P`,
+				expected: `type S []*P`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+/*dd*/S// ee 
+/*ff*/[//
+/*xx*/]//
+/*xx*/*//
+/*xx*/P // gg`,
+				expected: `/*aa*/
+type // bb
+// cc
+/*dd*/
+S // ee
+/*ff*/
+[]*P // gg`,
+			},
+			{
+				input:    `type S []*[]int`,
+				expected: `type S []*[]int`,
+			},
+			{
+				input:    `type S=[]*[]int`,
+				expected: `type S = []*[]int`,
+			},
+			{
+				input:    `type S [/*xx*/]/*xx*/*/*xx*/[/*xx*/]/*xx*/int`,
+				expected: `type S []*[]int`,
+			},
+			{
+				input: `/*aa*/
+type // bb
+// cc
+/*dd*/S// ee
+/*ff*/[//
+/*xx*/]//
+/*xx*/*//
+/*xx*/[//
+/*xx*/]//
+/*xx*/int // gg`,
+				expected: `/*aa*/
+type // bb
+// cc
+/*dd*/
+S // ee
+/*ff*/
+[]*[]int // gg`,
+			},
+		})
+	})
+
+	t.Run("struct", func(t *testing.T) {
+		testRun(t, []formatData{
+			{
+				input:    `type T {}`,
+				expected: `type T {}`,
+			},
+			{
+				input: `type T 	{
+			}	`,
+				expected: `type T {}`,
+			},
+			{
+				input:    `type T={}`,
+				expected: `type T = {}`,
+			},
+			{
+				input:    `type T /*aa*/{/*xx*/}// cc`,
+				expected: `type T /*aa*/ {} // cc`,
+			},
+			{
+				input: `/*aa*/type// bb
+// cc
+/*dd*/T // ee
+/*ff*/{//
+/*xx*/}// cc`,
+				expected: `/*aa*/
+type // bb
+// cc
+/*dd*/
+T // ee
+/*ff*/
+{} // cc`,
+			},
+			{
+				input: `type T {
+			Name string
+			}`,
+				expected: `type T {
+	Name string
+}`,
+			},
+			{
+				input: `type T {
+			Foo
+			}`,
+				expected: `type T {
+	Foo
+}`,
+			},
+			{
+				input: `type T {
+			*Foo
+			}`,
+				expected: `type T {
+	*Foo
+}`,
+			},
+			{
+				input:    testStructLitData,
+				expected: expectedStructLitData,
+				converter: func(s string) string {
+					return strings.ReplaceAll(s, "\t", "    ")
+				},
+			},
+		})
+	})
+}
+
+//go:embed testdata/test_type_struct_group.api
+var testStructGroupData string
+
+//go:embed testdata/expected_type_struct_group.api
+var expectedStructgroupData string
+
+func TestFormat_TypeGroupStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    testStructGroupData,
+			expected: expectedStructgroupData,
+			converter: func(s string) string {
+				return strings.ReplaceAll(s, "\t", "    ")
+			},
+		},
+	})
+}
+
+func TestFormat_AtServerStmt(t *testing.T) {
+	testRunStmt(t, []formatData{
+		{
+			input:    `@server()`,
+			expected: ``,
+		},
+		{
+			input: `@server(foo:foo)`,
+			expected: `@server (
+	foo: foo
+)`,
+		},
+		{
+			input: `@server(foo:foo quux:quux)`,
+			expected: `@server (
+	foo:  foo
+	quux: quux
+)`,
+		},
+		{
+			input: `@server(
+foo:
+foo
+quux:
+quux
+)`,
+			expected: `@server (
+	foo:  foo
+	quux: quux
+)`,
+		},
+		{
+			input: `/*aa*/@server/*bb*/(/*cc*/foo:/**/foo /*dd*/quux:/**/quux/*ee*/)`,
+			expected: `/*aa*/
+@server ( /*cc*/
+	foo:  foo /*dd*/
+	quux: quux /*ee*/
+)`,
+		},
+		{
+			input: `/*aa*/
+@server
+/*bb*/(// cc
+/*dd*/foo:/**/foo// ee
+/*ff*/quux:/**/quux// gg
+)`,
+			expected: `/*aa*/
+@server
+/*bb*/
+( // cc
+	/*dd*/
+	foo: foo // ee
+	/*ff*/
+	quux: quux // gg
+)`,
+		},
+	})
+}
+
+func TestFormat_AtDocStmt(t *testing.T) {
+	t.Run("AtDocLiteralStmt", func(t *testing.T) {
+		testRunStmt(t, []formatData{
+			{
+				input:    `@doc ""`,
+				expected: ``,
+			},
+			{
+				input:    `@doc "foo"`,
+				expected: `@doc "foo"`,
+			},
+			{
+				input:    `@doc 		"foo"`,
+				expected: `@doc "foo"`,
+			},
+			{
+				input:    `@doc"foo"`,
+				expected: `@doc "foo"`,
+			},
+			{
+				input: `/*aa*/@doc/**/"foo"// bb`,
+				expected: `/*aa*/
+@doc "foo" // bb`,
+			},
+			{
+				input: `/*aa*/
+/*bb*/@doc // cc
+"foo"// ee`,
+				expected: `/*aa*/
+/*bb*/
+@doc "foo" // ee`,
+			},
+		})
+	})
+	t.Run("AtDocGroupStmt", func(t *testing.T) {
+		testRunStmt(t, []formatData{
+			{
+				input:    `@doc()`,
+				expected: ``,
+			},
+			{
+				input: `@doc(foo:"foo")`,
+				expected: `@doc (
+	foo: "foo"
+)`,
+			},
+			{
+				input: `@doc(foo:"foo" bar:"bar")`,
+				expected: `@doc (
+	foo: "foo"
+	bar: "bar"
+)`,
+			},
+			{
+				input: `@doc(foo:"foo" bar:"bar" quux:"quux")`,
+				expected: `@doc (
+	foo:  "foo"
+	bar:  "bar"
+	quux: "quux"
+)`,
+			},
+			{
+				input: `@doc(foo:"foo"
+bar: "bar")`,
+				expected: `@doc (
+	foo: "foo"
+	bar: "bar"
+)`,
+			},
+			{
+				input: `@doc(foo:"foo"// aa
+bar: "bar"// bb
+)`,
+				expected: `@doc (
+	foo: "foo" // aa
+	bar: "bar" // bb
+)`,
+			},
+			{
+				input: `@doc(// aa
+foo:"foo"// bb
+bar: "bar"// cc
+)`,
+				expected: `@doc ( // aa
+	foo: "foo" // bb
+	bar: "bar" // cc
+)`,
+			},
+			{
+				input: `/*aa*/@doc(// bb
+foo:"foo"// cc
+bar: "bar"// dd
+)`,
+				expected: `/*aa*/
+@doc ( // bb
+	foo: "foo" // cc
+	bar: "bar" // dd
+)`,
+			},
+			{
+				input: `/*aa*/
+@doc(// bb
+foo:"foo"// cc
+bar: "bar"// dd
+)// ee`,
+				expected: `/*aa*/
+@doc ( // bb
+	foo: "foo" // cc
+	bar: "bar" // dd
+) // ee`,
+			},
+			{
+				input: `/*aa*/
+@doc ( // bb
+	/*cc*/foo: "foo" // dd
+	/*ee*/bar: "bar" // ff
+) // gg`,
+				expected: `/*aa*/
+@doc ( // bb
+	/*cc*/
+	foo: "foo" // dd
+	/*ee*/
+	bar: "bar" // ff
+) // gg`,
+			},
+			{
+				input: `/*aa*/
+@doc/*xx*/( // bb
+	/*cc*/foo:/*xx*/ "foo" // dd
+	/*ee*/bar:/*xx*/ "bar" // ff
+) // gg`,
+				expected: `/*aa*/
+@doc ( // bb
+	/*cc*/
+	foo: "foo" // dd
+	/*ee*/
+	bar: "bar" // ff
+) // gg`,
+			},
+		})
+	})
+}
+
+func TestFormat_AtHandlerStmt(t *testing.T) {
+	testRunStmt(t, []formatData{
+		{
+			input:    `@handler foo`,
+			expected: `@handler foo`,
+		},
+		{
+			input:    `@handler 		foo`,
+			expected: `@handler foo`,
+		},
+		{
+			input: `/*aa*/@handler/**/foo// bb`,
+			expected: `/*aa*/
+@handler foo // bb`,
+		},
+		{
+			input: `/*aa*/
+/*bb*/@handler // cc
+foo// ee`,
+			expected: `/*aa*/
+/*bb*/
+@handler foo // ee`,
+		},
+	})
+}
+
+//go:embed testdata/test_service.api
+var testServiceData string
+
+//go:embed testdata/expected_service.api
+var expectedServiceData string
+
+func TestFormat_ServiceStmt(t *testing.T) {
+	testRun(t, []formatData{
+		{
+			input:    `service foo{}`,
+			expected: `service foo {}`,
+		},
+		{
+			input:    `service foo	{	}`,
+			expected: `service foo {}`,
+		},
+		{
+			input:    `@server()service foo	{	}`,
+			expected: `service foo {}`,
+		},
+		{
+			input: `@server(foo:foo quux:quux)service foo	{	}`,
+			expected: `@server (
+	foo:  foo
+	quux: quux
+)
+service foo {}`,
+		},
+		{
+			input:    `service foo-api	{	}`,
+			expected: `service foo-api {}`,
+		},
+		{
+			input: `service foo-api	{
+@doc "foo"
+@handler foo
+post /ping
+}`,
+			expected: `service foo-api {
+	@doc "foo"
+	@handler foo
+	post /ping
+}`,
+		},
+		{
+			input: `service foo-api	{
+@doc(foo: "foo" bar: "bar")
+@handler foo
+post /ping
+}`,
+			expected: `service foo-api {
+	@doc (
+		foo: "foo"
+		bar: "bar"
+	)
+	@handler foo
+	post /ping
+}`,
+		},
+		{
+			input: `service foo-api	{
+@doc(foo: "foo" bar: "bar"
+quux: "quux"
+)
+@handler 	foo
+post 	/ping
+}`,
+			expected: `service foo-api {
+	@doc (
+		foo:  "foo"
+		bar:  "bar"
+		quux: "quux"
+	)
+	@handler foo
+	post /ping
+}`,
+		},
+		{
+			input: `service
+foo-api
+{
+@doc
+(foo: "foo" bar: "bar"
+quux: "quux"
+)
+@handler
+foo
+post
+/aa/:bb/cc-dd/ee
+
+@handler bar
+get /bar () returns (Bar);
+
+@handler baz
+get /bar (Baz) returns ();
+}`,
+			expected: `service foo-api {
+	@doc (
+		foo:  "foo"
+		bar:  "bar"
+		quux: "quux"
+	)
+	@handler foo
+	post /aa/:bb/cc-dd/ee
+
+	@handler bar
+	get /bar returns (Bar)
+
+	@handler baz
+	get /bar (Baz)
+}`,
+		},
+		{
+			input:    testServiceData,
+			expected: expectedServiceData,
+			converter: func(s string) string {
+				return strings.ReplaceAll(s, "\t", "    ")
+			},
+		},
+	})
+}
+
+func TestFormat_error(t *testing.T) {
+	err := Source([]byte("aaa"), os.Stdout)
+	assertx.Error(t, err)
+}
+
+func testRun(t *testing.T, testData []formatData) {
+	for _, v := range testData {
+		buffer := bytes.NewBuffer(nil)
+		err := formatForUnitTest([]byte(v.input), buffer)
+		assert.NoError(t, err)
+		var result = buffer.String()
+		if v.converter != nil {
+			result = v.converter(result)
+		}
+		assert.Equal(t, v.expected, result)
+	}
+}
+
+func testRunStmt(t *testing.T, testData []formatData) {
+	for _, v := range testData {
+		p := parser.New("foo.api", v.input)
+		ast := p.ParseForUintTest()
+		assert.NoError(t, p.CheckErrors())
+		assert.True(t, len(ast.Stmts) > 0)
+		one := ast.Stmts[0]
+		actual := one.Format()
+		if v.converter != nil {
+			actual = v.converter(actual)
+		}
+		assert.Equal(t, v.expected, actual)
+	}
+}

+ 36 - 0
tools/goctl/pkg/parser/api/format/testdata/expected_service.api

@@ -0,0 +1,36 @@
+// server foo
+@server ( // server
+    // key-value form
+    key1:   value1
+    key2:   value2
+    jwt:    Auth // enable jwt
+    prefix: /v1 // the route prefix
+)
+// service foo
+service foo { // foo
+    // example1
+    @doc "example1"
+    @handler example1 // handler declare
+    get /path/example1 // no body
+
+    @doc ( // doc group
+        key1:   "value1"
+        key11:  "value11"
+        key111: "value111"
+    )
+    @handler example2 // handler example2
+    get /path/example2/:id // path arg
+
+    @doc "example3"
+    @handler example3
+    get /path/example3/:id (Foo) // no response
+
+    @doc "example4"
+    @handler example4
+    post /path/example4/a-b returns (Foo) // no request
+
+    @doc "example5"
+    @handler example5
+    // example5
+    post /path/example5/a-b (Foo) returns (Bar) // do not comment between path and body
+}

+ 37 - 0
tools/goctl/pkg/parser/api/format/testdata/expected_type_struct_group.api

@@ -0,0 +1,37 @@
+/*aa*/
+type (
+    /*bb*/
+    T /*cc*/ { // T.bg
+        // Name head1
+        /*Name head2*/
+        Name string `json:"name"` // name
+        Age  int    `json:"age"` // age
+        Extra
+        Address string
+        Hobby []{
+            Name string // hobby.name
+            Rate string
+        } `json:"hobby"` // hobby
+        Child { // child.bg
+            Name     string `json:"name"` // child.name
+            Gender   int    `json:"gender"` // child.gender
+            Birthday string `json:"birthday"` // child.birthday
+            Desc     string // son.desc
+            Son { // son.bg
+                Name     string `json:"name"` // son.name
+                Gender   int    `json:"gender"` // son.gender
+                Birthday string `json:"birthday"` // son.birthday
+                Desc     string // son.desc
+                Hobby []{
+                    Name        string // hobby.name
+                    Description string
+                    // Map
+                    Map map[string]{
+                        Name string `json:"name"`
+                        Age  string `json:"age"`
+                    } `json:"map"`
+                } `json:"hobby"` // hobby
+            } // son.end
+        } // child.end
+    } // T.end
+)

+ 34 - 0
tools/goctl/pkg/parser/api/format/testdata/expected_type_struct_lit.api

@@ -0,0 +1,34 @@
+/*aa*/
+type /*bb*/ T /*cc*/ { // T.bg
+    // Name head1
+    /*Name head2*/
+    Name string `json:"name"` // name
+    Age  int    `json:"age"` // age
+    Extra
+    Address string
+    Hobby []{
+        Name string // hobby.name
+        Rate string
+    } `json:"hobby"` // hobby
+    Child { // child.bg
+        Name     string `json:"name"` // child.name
+        Gender   int    `json:"gender"` // child.gender
+        Birthday string `json:"birthday"` // child.birthday
+        Desc     string // son.desc
+        Son { // son.bg
+            Name     string `json:"name"` // son.name
+            Gender   int    `json:"gender"` // son.gender
+            Birthday string `json:"birthday"` // son.birthday
+            Desc     string // son.desc
+            Hobby []{
+                Name        string // hobby.name
+                Description string
+                // Map
+                Map map[string]{
+                    Name string `json:"name"`
+                    Age  string `json:"age"`
+                } `json:"map"`
+            } `json:"hobby"` // hobby
+        } // son.end
+    } // child.end
+} // T.end

+ 154 - 0
tools/goctl/pkg/parser/api/format/testdata/test_format.api

@@ -0,0 +1,154 @@
+syntax = "v1"
+
+import "example_base1.api"
+
+import (
+	"example_base2.api"
+)
+
+info (
+	title:   "type title here"
+	desc:    "type desc here"
+	author:  "type author here"
+	email:   "type email here"
+	version: "type version here"
+)
+
+type GetFormReq {
+	Name    string   `form:"name"`
+	Age     int      `form:"age"`
+	Hobbits []string `form:"hobbits"`
+	Any     any      `form:"any"`
+}
+
+type GetFormREsp {
+	Name    string   `json:"name"`
+	Age     int      `json:"age"`
+	Hobbits []string `json:"hobbits"`
+}
+
+type (
+	PostFormReq {
+		Name    string   `form:"name"`
+		Age     int      `form:"age"`
+		Hobbits []string `form:"hobbits"`
+	}
+	PostFormResp {
+		Name    string   `json:"name"`
+		Age     int      `json:"age"`
+		Hobbits []string `json:"hobbits"`
+	}
+)
+
+type (
+	PostJsonReq {
+		Name    string   `json:"name"`
+		Age     int      `json:"age"`
+		Hobbits []string `json:"hobbits"`
+	}
+	PostJsonResp {
+		Name    string            `json:"name"`
+		Age     int               `json:"age"`
+		Hobbits []string          `json:"hobbits"`
+		Extra   map[string]string `json:"extra"`
+		Data    interface{}       `json:"data"`
+	}
+)
+
+type (
+	PostPathReq {
+		Id string `path:"id"`
+	}
+	PostPathResp {
+		Name    string            `json:"name"`
+		Age     int               `json:"age"`
+		Hobbits []string          `json:"hobbits"`
+		Extra   map[string]string `json:"extra"`
+		Data    interface{}       `json:"data"`
+	}
+)
+
+type (
+	DemoOfArrayReq {
+		In string `json:"in"`
+	}
+	DemoOfArrayResp {
+		Out string `json:"out"`
+	}
+)
+
+type (
+	Nest {
+		Name string `json:"name"`
+	}
+	NestDemoReq {
+		Nest *Nest `json:"nest"`
+	}
+	NestDemoResp {
+		Nest []*Nest `json:"nest"`
+	}
+)
+
+@server (
+	group: form
+)
+service example {
+	@handler getForm
+	get /example/form (GetFormReq) returns (GetFormREsp)
+
+	@handler postForm
+	post /example/form (PostFormReq) returns (PostFormResp)
+}
+
+@server (
+	group: json
+	jwt:   Auth
+)
+service example {
+	@doc "json demo"
+	@handler postJson
+	post /example/json (PostJsonReq) returns (PostJsonResp)
+}
+
+@server (
+	group:      path
+	middleware: Path
+	prefix:     /v1/v2
+)
+service example {
+	@doc (
+		desc: "path demo"
+	)
+	@handler postPath
+	post /example/path (PostPathReq) returns (PostPathResp)
+}
+
+@server (
+	group:  array
+	prefix: /array
+)
+service example {
+	@doc (
+		desc: "array response demo"
+	)
+	@handler getArray
+	post /example/array (DemoOfArrayReq) returns ([]DemoOfArrayResp)
+
+	@doc (
+		desc: "array pointer response demo"
+	)
+	@handler getArrayPointer
+	post /example/array/pointer (DemoOfArrayReq) returns ([]*DemoOfArrayResp)
+
+	@doc (
+		desc: "array base response demo"
+	)
+	@handler getArrayBase
+	post /example/array/pointer (DemoOfArrayReq) returns ([]string)
+}
+
+service example {
+	@handler nestDemo
+	post /example/nest (NestDemoReq) returns (NestDemoResp)
+}
+

+ 35 - 0
tools/goctl/pkg/parser/api/format/testdata/test_service.api

@@ -0,0 +1,35 @@
+// server foo
+@server(// server
+// key-value form
+key1:value1
+key2:value2
+jwt:Auth // enable jwt
+prefix: /v1 // the route prefix
+)
+// service foo
+service foo{// foo
+    // example1
+    @doc "example1"
+    @handler example1 // handler declare
+    get /path/example1// no body
+@doc( // doc group
+key1:"value1"
+key11:"value11"
+key111:"value111"
+)
+    @handler example2 // handler example2
+get /path/example2/:id// path arg
+    @doc
+    "example3"
+    @handler
+example3
+    get     /path/example3/:id
+    ( Foo )// no response
+    @doc "example4"
+    @handler example4
+    post /path/example4/a-b returns ( Foo )// no request
+    @doc "example5"
+    @handler example5
+    // example5
+    post        /path/example5/a-b ( Foo ) returns ( Bar ) // do not comment between path and body
+}

+ 34 - 0
tools/goctl/pkg/parser/api/format/testdata/test_type_struct_group.api

@@ -0,0 +1,34 @@
+/*aa*/type (
+/*bb*/T /*cc*/{// T.bg
+// Name head1
+/*Name head2*/Name string `json:"name"`// name
+Age int `json:"age"` // age
+Extra
+Address string
+Hobby []{
+Name string // hobby.name
+Rate string
+} `json:"hobby"` // hobby
+Child {// child.bg
+Name string `json:"name"`// child.name
+Gender int `json:"gender"`// child.gender
+Birthday string `json:"birthday"`// child.birthday
+Desc string // son.desc
+Son {// son.bg
+Name string `json:"name"`// son.name
+Gender int `json:"gender"`// son.gender
+Birthday string `json:"birthday"`// son.birthday
+Desc string // son.desc
+Hobby []{
+    Name string // hobby.name
+    Description string
+    // Map
+    Map map[string]{
+Name string `json:"name"`
+Age string `json:"age"`
+}`json:"map"`
+} `json:"hobby"` // hobby
+}// son.end
+}// child.end
+}// T.end
+)

+ 32 - 0
tools/goctl/pkg/parser/api/format/testdata/test_type_struct_lit.api

@@ -0,0 +1,32 @@
+/*aa*/type /*bb*/T /*cc*/{// T.bg
+// Name head1
+/*Name head2*/Name string `json:"name"`// name
+Age int `json:"age"` // age
+Extra
+Address string
+Hobby []{
+Name string // hobby.name
+Rate string
+} `json:"hobby"` // hobby
+Child {// child.bg
+Name string `json:"name"`// child.name
+Gender int `json:"gender"`// child.gender
+Birthday string `json:"birthday"`// child.birthday
+Desc string // son.desc
+Son {// son.bg
+Name string `json:"name"`// son.name
+Gender int `json:"gender"`// son.gender
+Birthday string `json:"birthday"`// son.birthday
+Desc string // son.desc
+Hobby []{
+    Name string // hobby.name
+    Description string
+    // Map
+    Map map[string]{
+Name string `json:"name"`
+Age string `json:"age"`
+}`json:"map"`
+} `json:"hobby"` // hobby
+}// son.end
+}// child.end
+}// T.end

+ 412 - 0
tools/goctl/pkg/parser/api/parser/analyzer.go

@@ -0,0 +1,412 @@
+package parser
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/api/spec"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+// Analyzer analyzes the ast and converts it to spec.
+type Analyzer struct {
+	api  *API
+	spec *spec.ApiSpec
+}
+
+func (a *Analyzer) astTypeToSpec(in ast.DataType) (spec.Type, error) {
+	isLiteralType := func(dt ast.DataType) bool {
+		_, ok := dt.(*ast.BaseDataType)
+		if ok {
+			return true
+		}
+		_, ok = dt.(*ast.AnyDataType)
+		return ok
+	}
+	switch v := (in).(type) {
+	case *ast.BaseDataType:
+		raw := v.RawText()
+		if IsBaseType(raw) {
+			return spec.PrimitiveType{
+				RawName: raw,
+			}, nil
+		}
+		return spec.DefineStruct{RawName: raw}, nil
+	case *ast.AnyDataType:
+		return nil, ast.SyntaxError(v.Pos(), "unsupported any type")
+	case *ast.StructDataType:
+		// TODO(keson) feature: can be extended
+	case *ast.InterfaceDataType:
+		return spec.InterfaceType{RawName: v.RawText()}, nil
+	case *ast.MapDataType:
+		if !isLiteralType(v.Key) {
+			return nil, ast.SyntaxError(v.Pos(), "expected literal type, got <%T>", v)
+		}
+		if !v.Key.CanEqual() {
+			return nil, ast.SyntaxError(v.Pos(), "map key <%T> must be equal data type", v)
+		}
+		value, err := a.astTypeToSpec(v.Value)
+		if err != nil {
+			return nil, err
+		}
+		return spec.MapType{
+			RawName: v.RawText(),
+			Key:     v.RawText(),
+			Value:   value,
+		}, nil
+	case *ast.PointerDataType:
+		raw := v.DataType.RawText()
+		if IsBaseType(raw) {
+			return spec.PointerType{RawName: v.RawText(), Type: spec.PrimitiveType{RawName: raw}}, nil
+		}
+
+		value, err := a.astTypeToSpec(v.DataType)
+		if err != nil {
+			return nil, err
+		}
+		return spec.PointerType{
+			RawName: v.RawText(),
+			Type:    value,
+		}, nil
+	case *ast.ArrayDataType:
+		if v.Length.Token.Type == token.ELLIPSIS {
+			return nil, ast.SyntaxError(v.Pos(), "Array: unsupported dynamic length")
+		}
+		value, err := a.astTypeToSpec(v.DataType)
+		if err != nil {
+			return nil, err
+		}
+		return spec.ArrayType{
+			RawName: v.RawText(),
+			Value:   value,
+		}, nil
+	case *ast.SliceDataType:
+		value, err := a.astTypeToSpec(v.DataType)
+		if err != nil {
+			return nil, err
+		}
+		return spec.ArrayType{
+			RawName: v.RawText(),
+			Value:   value,
+		}, nil
+	}
+
+	return nil, ast.SyntaxError(in.Pos(), "unsupported type <%T>", in)
+}
+
+func (a *Analyzer) convert2Spec() error {
+	if err := a.fillTypes(); err != nil {
+		return err
+	}
+
+	return a.fillService()
+}
+
+func (a *Analyzer) convertAtDoc(atDoc ast.AtDocStmt) spec.AtDoc {
+	var ret spec.AtDoc
+	switch val := atDoc.(type) {
+	case *ast.AtDocLiteralStmt:
+		ret.Text = val.Value.Token.Text
+	case *ast.AtDocGroupStmt:
+		ret.Properties = a.convertKV(val.Values)
+	}
+	return ret
+}
+
+func (a *Analyzer) convertKV(kv []*ast.KVExpr) map[string]string {
+	var ret = map[string]string{}
+	for _, v := range kv {
+		key := strings.TrimSuffix(v.Key.Token.Text, ":")
+		ret[key] = v.Value.Token.Text
+	}
+	return ret
+}
+
+func (a *Analyzer) fieldToMember(field *ast.ElemExpr) (spec.Member, error) {
+	var name []string
+	for _, v := range field.Name {
+		name = append(name, v.Token.Text)
+	}
+
+	tp, err := a.astTypeToSpec(field.DataType)
+	if err != nil {
+		return spec.Member{}, err
+	}
+
+	head, leading := field.CommentGroup()
+	m := spec.Member{
+		Name:     strings.Join(name, ", "),
+		Type:     tp,
+		Docs:     head.List(),
+		Comment:  leading.String(),
+		IsInline: field.IsAnonymous(),
+	}
+	if field.Tag != nil {
+		m.Tag = field.Tag.Token.Text
+	}
+	return m, nil
+}
+
+func (a *Analyzer) fillRouteType(route *spec.Route) error {
+	if route.RequestType != nil {
+		switch route.RequestType.(type) {
+		case spec.DefineStruct:
+			tp, err := a.findDefinedType(route.RequestType.Name())
+			if err != nil {
+				return err
+			}
+
+			route.RequestType = tp
+		}
+	}
+
+	if route.ResponseType != nil {
+		switch route.ResponseType.(type) {
+		case spec.DefineStruct:
+			tp, err := a.findDefinedType(route.ResponseType.Name())
+			if err != nil {
+				return err
+			}
+
+			route.ResponseType = tp
+		}
+	}
+
+	return nil
+}
+
+func (a *Analyzer) fillService() error {
+	var groups []spec.Group
+	for _, item := range a.api.ServiceStmts {
+		var group spec.Group
+		if item.AtServerStmt != nil {
+			group.Annotation.Properties = a.convertKV(item.AtServerStmt.Values)
+		}
+
+		for _, astRoute := range item.Routes {
+			head, leading := astRoute.CommentGroup()
+			route := spec.Route{
+				Method:  astRoute.Route.Method.Token.Text,
+				Path:    astRoute.Route.Path.Format(""),
+				Doc:     head.List(),
+				Comment: leading.List(),
+			}
+			if astRoute.AtDoc != nil {
+				route.AtDoc = a.convertAtDoc(astRoute.AtDoc)
+			}
+			if astRoute.AtHandler != nil {
+				route.AtDoc = a.convertAtDoc(astRoute.AtDoc)
+				route.Handler = astRoute.AtHandler.Name.Token.Text
+				head, leading := astRoute.AtHandler.CommentGroup()
+				route.HandlerDoc = head.List()
+				route.HandlerComment = leading.List()
+			}
+
+			if astRoute.Route.Request != nil && astRoute.Route.Request.Body != nil {
+				requestType, err := a.getType(astRoute.Route.Request)
+				if err != nil {
+					return err
+				}
+				route.RequestType = requestType
+			}
+			if astRoute.Route.Response != nil && astRoute.Route.Response.Body != nil {
+				responseType, err := a.getType(astRoute.Route.Response)
+				if err != nil {
+					return err
+				}
+				route.ResponseType = responseType
+			}
+
+			if err := a.fillRouteType(&route); err != nil {
+				return err
+			}
+
+			group.Routes = append(group.Routes, route)
+
+			name := item.Name.Format("")
+			if len(a.spec.Service.Name) > 0 && a.spec.Service.Name != name {
+				return ast.SyntaxError(item.Name.Pos(), "multiple service names defined <%s> and <%s>", name, a.spec.Service.Name)
+			}
+			a.spec.Service.Name = name
+		}
+		groups = append(groups, group)
+	}
+
+	a.spec.Service.Groups = groups
+	return nil
+}
+
+func (a *Analyzer) fillTypes() error {
+	for _, item := range a.api.TypeStmt {
+		switch v := (item).(type) {
+		case *ast.TypeLiteralStmt:
+			err := a.fillTypeExpr(v.Expr)
+			if err != nil {
+				return err
+			}
+		case *ast.TypeGroupStmt:
+			for _, expr := range v.ExprList {
+				err := a.fillTypeExpr(expr)
+				if err != nil {
+					return err
+				}
+			}
+		}
+	}
+
+	var types []spec.Type
+	for _, item := range a.spec.Types {
+		switch v := (item).(type) {
+		case spec.DefineStruct:
+			var members []spec.Member
+			for _, member := range v.Members {
+				switch v := member.Type.(type) {
+				case spec.DefineStruct:
+					tp, err := a.findDefinedType(v.RawName)
+					if err != nil {
+						return err
+					}
+
+					member.Type = tp
+				}
+				members = append(members, member)
+			}
+			v.Members = members
+			types = append(types, v)
+		default:
+			return fmt.Errorf("unknown type %+v", v)
+		}
+	}
+	a.spec.Types = types
+
+	return nil
+}
+
+func (a *Analyzer) fillTypeExpr(expr *ast.TypeExpr) error {
+	head, _ := expr.CommentGroup()
+	switch val := expr.DataType.(type) {
+	case *ast.StructDataType:
+		var members []spec.Member
+		for _, item := range val.Elements {
+			m, err := a.fieldToMember(item)
+			if err != nil {
+				return err
+			}
+			members = append(members, m)
+		}
+		a.spec.Types = append(a.spec.Types, spec.DefineStruct{
+			RawName: expr.Name.Token.Text,
+			Members: members,
+			Docs:    head.List(),
+		})
+		return nil
+	default:
+		return ast.SyntaxError(expr.Pos(), "expected <struct> expr, got <%T>", expr.DataType)
+	}
+}
+
+func (a *Analyzer) findDefinedType(name string) (spec.Type, error) {
+	for _, item := range a.spec.Types {
+		if _, ok := item.(spec.DefineStruct); ok {
+			if item.Name() == name {
+				return item, nil
+			}
+		}
+	}
+
+	return nil, fmt.Errorf("type %s not defined", name)
+}
+
+func (a *Analyzer) getType(expr *ast.BodyStmt) (spec.Type, error) {
+	body := expr.Body
+	var tp spec.Type
+	var err error
+	var rawText = body.Format("")
+	if IsBaseType(body.Value.Token.Text) {
+		tp = spec.PrimitiveType{RawName: body.Value.Token.Text}
+	} else {
+		tp, err = a.findDefinedType(body.Value.Token.Text)
+		if err != nil {
+			return nil, err
+		}
+	}
+	if body.LBrack != nil {
+		if body.Star != nil {
+			return spec.PointerType{
+				RawName: rawText,
+				Type:    tp,
+			}, nil
+		}
+		return spec.ArrayType{
+			RawName: rawText,
+			Value:   tp,
+		}, nil
+	}
+	if body.Star != nil {
+		return spec.PointerType{
+			RawName: rawText,
+			Type:    tp,
+		}, nil
+	}
+	return tp, nil
+}
+
+// Parse parses the given file and returns the parsed spec.
+func Parse(filename string, src interface{}) (*spec.ApiSpec, error) {
+	p := New(filename, src)
+	ast := p.Parse()
+	if err := p.CheckErrors(); err != nil {
+		return nil, err
+	}
+
+	var importManager = make(map[string]placeholder.Type)
+	importManager[ast.Filename] = placeholder.PlaceHolder
+	api, err := convert2API(ast, importManager)
+	if err != nil {
+		return nil, err
+	}
+
+	var result = new(spec.ApiSpec)
+	analyzer := Analyzer{
+		api:  api,
+		spec: result,
+	}
+
+	err = analyzer.convert2Spec()
+	if err != nil {
+		return nil, err
+	}
+
+	return result, nil
+}
+
+var kind = map[string]placeholder.Type{
+	"bool":       placeholder.PlaceHolder,
+	"int":        placeholder.PlaceHolder,
+	"int8":       placeholder.PlaceHolder,
+	"int16":      placeholder.PlaceHolder,
+	"int32":      placeholder.PlaceHolder,
+	"int64":      placeholder.PlaceHolder,
+	"uint":       placeholder.PlaceHolder,
+	"uint8":      placeholder.PlaceHolder,
+	"uint16":     placeholder.PlaceHolder,
+	"uint32":     placeholder.PlaceHolder,
+	"uint64":     placeholder.PlaceHolder,
+	"uintptr":    placeholder.PlaceHolder,
+	"float32":    placeholder.PlaceHolder,
+	"float64":    placeholder.PlaceHolder,
+	"complex64":  placeholder.PlaceHolder,
+	"complex128": placeholder.PlaceHolder,
+	"string":     placeholder.PlaceHolder,
+	"byte":       placeholder.PlaceHolder,
+	"rune":       placeholder.PlaceHolder,
+	"any":        placeholder.PlaceHolder,
+}
+
+// IsBaseType returns true if the given type is a base type.
+func IsBaseType(text string) bool {
+	_, ok := kind[text]
+	return ok
+}

+ 45 - 0
tools/goctl/pkg/parser/api/parser/analyzer_test.go

@@ -0,0 +1,45 @@
+package parser
+
+import (
+	"bytes"
+	"fmt"
+	"io/ioutil"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/assertx"
+)
+
+func Test_Parse(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		_, err := Parse("./testdata/example.api", nil)
+		assert.Nil(t, err)
+	})
+	t.Run("invalid", func(t *testing.T) {
+		data, err := ioutil.ReadFile("./testdata/invalid.api")
+		assert.NoError(t, err)
+		splits := bytes.Split(data, []byte("-----"))
+		var testFile []string
+		for idx, split := range splits {
+			replacer := strings.NewReplacer(" ", "", "\t", "", "\n", "", "\r", "", "\f", "")
+			r := replacer.Replace(string(split))
+			if len(r) == 0 {
+				continue
+			}
+			filename := filepath.Join(t.TempDir(), fmt.Sprintf("invalid%d.api", idx))
+			err := ioutil.WriteFile(filename, split, 0666)
+			assert.NoError(t, err)
+			testFile = append(testFile, filename)
+		}
+		for _, v := range testFile {
+			_, err := Parse(v, nil)
+			assertx.Error(t, err)
+		}
+	})
+	t.Run("circleImport", func(t *testing.T) {
+		_, err := Parse("./testdata/base.api", nil)
+		assertx.Error(t, err)
+	})
+}

+ 311 - 0
tools/goctl/pkg/parser/api/parser/api.go

@@ -0,0 +1,311 @@
+package parser
+
+import (
+	"fmt"
+	"path/filepath"
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+// API is the parsed api file.
+type API struct {
+	Filename      string
+	Syntax        *ast.SyntaxStmt
+	info          *ast.InfoStmt    // Info block does not participate in code generation.
+	importStmt    []ast.ImportStmt // ImportStmt block does not participate in code generation.
+	TypeStmt      []ast.TypeStmt
+	ServiceStmts  []*ast.ServiceStmt
+	importManager map[string]placeholder.Type
+}
+
+func convert2API(a *ast.AST, importManager map[string]placeholder.Type) (*API, error) {
+	var api = new(API)
+	api.importManager = make(map[string]placeholder.Type)
+	api.Filename = a.Filename
+	for k, v := range importManager {
+		api.importManager[k] = v
+	}
+	one := a.Stmts[0]
+	syntax, ok := one.(*ast.SyntaxStmt)
+	if !ok {
+		return nil, ast.SyntaxError(one.Pos(), "expected syntax statement, got <%T>", one)
+	}
+	api.Syntax = syntax
+
+	for i := 1; i < len(a.Stmts); i++ {
+		one := a.Stmts[i]
+		switch val := one.(type) {
+		case *ast.SyntaxStmt:
+			return nil, ast.DuplicateStmtError(val.Pos(), "duplicate syntax statement")
+		case *ast.InfoStmt:
+			if api.info != nil {
+				return nil, ast.DuplicateStmtError(val.Pos(), "duplicate info statement")
+			}
+			api.info = val
+		case ast.ImportStmt:
+			api.importStmt = append(api.importStmt, val)
+		case ast.TypeStmt:
+			api.TypeStmt = append(api.TypeStmt, val)
+		case *ast.ServiceStmt:
+			api.ServiceStmts = append(api.ServiceStmts, val)
+		}
+	}
+
+	if err := api.SelfCheck(); err != nil {
+		return nil, err
+	}
+	return api, nil
+}
+
+func (api *API) checkImportStmt() error {
+	f := newFilter()
+	b := f.addCheckItem("import value expression")
+	for _, v := range api.importStmt {
+		switch val := v.(type) {
+		case *ast.ImportLiteralStmt:
+			b.check(val.Value)
+		case *ast.ImportGroupStmt:
+			b.check(val.Values...)
+		}
+	}
+	return f.error()
+}
+
+func (api *API) checkInfoStmt() error {
+	if api.info == nil {
+		return nil
+	}
+	f := newFilter()
+	b := f.addCheckItem("info key expression")
+	for _, v := range api.info.Values {
+		b.check(v.Key)
+	}
+	return f.error()
+}
+
+func (api *API) checkServiceStmt() error {
+	f := newFilter()
+	serviceNameChecker := f.addCheckItem("service name expression")
+	handlerChecker := f.addCheckItem("handler expression")
+	pathChecker := f.addCheckItem("path expression")
+	var serviceName = map[string]string{}
+	for _, v := range api.ServiceStmts {
+		name := strings.TrimSuffix(v.Name.Format(""), "-api")
+		if sn, ok := serviceName[name]; ok {
+			if sn != name {
+				serviceNameChecker.errorManager.add(ast.SyntaxError(v.Name.Pos(), "multiple service name"))
+			}
+		} else {
+			serviceName[name] = name
+		}
+		var group = api.getAtServerValue(v.AtServerStmt, "prefix")
+		for _, item := range v.Routes {
+			handlerChecker.check(item.AtHandler.Name)
+			path := fmt.Sprintf("[%s]:%s", group, item.Route.Format(""))
+			pathChecker.check(ast.NewTokenNode(token.Token{
+				Text:     path,
+				Position: item.Route.Pos(),
+			}))
+		}
+	}
+	return f.error()
+}
+
+func (api *API) checkTypeStmt() error {
+	f := newFilter()
+	b := f.addCheckItem("type expression")
+	for _, v := range api.TypeStmt {
+		switch val := v.(type) {
+		case *ast.TypeLiteralStmt:
+			b.check(val.Expr.Name)
+		case *ast.TypeGroupStmt:
+			for _, expr := range val.ExprList {
+				b.check(expr.Name)
+			}
+		}
+	}
+	return f.error()
+}
+
+func (api *API) checkTypeDeclareContext() error {
+	var typeMap = map[string]placeholder.Type{}
+	for _, v := range api.TypeStmt {
+		switch tp := v.(type) {
+		case *ast.TypeLiteralStmt:
+			typeMap[tp.Expr.Name.Token.Text] = placeholder.PlaceHolder
+		case *ast.TypeGroupStmt:
+			for _, v := range tp.ExprList {
+				typeMap[v.Name.Token.Text] = placeholder.PlaceHolder
+			}
+		}
+	}
+
+	return api.checkTypeContext(typeMap)
+}
+
+func (api *API) checkTypeContext(declareContext map[string]placeholder.Type) error {
+	var em = newErrorManager()
+	for _, v := range api.TypeStmt {
+		switch tp := v.(type) {
+		case *ast.TypeLiteralStmt:
+			em.add(api.checkTypeExprContext(declareContext, tp.Expr.DataType))
+		case *ast.TypeGroupStmt:
+			for _, v := range tp.ExprList {
+				em.add(api.checkTypeExprContext(declareContext, v.DataType))
+			}
+		}
+	}
+	return em.error()
+}
+
+func (api *API) checkTypeExprContext(declareContext map[string]placeholder.Type, tp ast.DataType) error {
+	switch val := tp.(type) {
+	case *ast.ArrayDataType:
+		return api.checkTypeExprContext(declareContext, val.DataType)
+	case *ast.BaseDataType:
+		if IsBaseType(val.Base.Token.Text) {
+			return nil
+		}
+		_, ok := declareContext[val.Base.Token.Text]
+		if !ok {
+			return ast.SyntaxError(val.Base.Pos(), "unresolved type <%s>", val.Base.Token.Text)
+		}
+		return nil
+	case *ast.MapDataType:
+		var manager = newErrorManager()
+		manager.add(api.checkTypeExprContext(declareContext, val.Key))
+		manager.add(api.checkTypeExprContext(declareContext, val.Value))
+		return manager.error()
+	case *ast.PointerDataType:
+		return api.checkTypeExprContext(declareContext, val.DataType)
+	case *ast.SliceDataType:
+		return api.checkTypeExprContext(declareContext, val.DataType)
+	case *ast.StructDataType:
+		var manager = newErrorManager()
+		for _, e := range val.Elements {
+			manager.add(api.checkTypeExprContext(declareContext, e.DataType))
+		}
+		return manager.error()
+	}
+	return nil
+}
+
+func (api *API) getAtServerValue(atServer *ast.AtServerStmt, key string) string {
+	if atServer == nil {
+		return ""
+	}
+
+	for _, val := range atServer.Values {
+		if val.Key.Token.Text == key {
+			return val.Value.Token.Text
+		}
+	}
+
+	return ""
+}
+
+func (api *API) mergeAPI(in *API) error {
+	for k, v := range in.importManager {
+		api.importManager[k] = v
+	}
+	if api.Syntax.Value.Format() != in.Syntax.Value.Format() {
+		return ast.SyntaxError(in.Syntax.Value.Pos(),
+			"multiple syntax value expression, expected <%s>, got <%s>",
+			api.Syntax.Value.Format(),
+			in.Syntax.Value.Format(),
+		)
+	}
+	api.TypeStmt = append(api.TypeStmt, in.TypeStmt...)
+	api.ServiceStmts = append(api.ServiceStmts, in.ServiceStmts...)
+	return nil
+}
+
+func (api *API) parseImportedAPI(imports []ast.ImportStmt) ([]*API, error) {
+	var list []*API
+	if len(imports) == 0 {
+		return list, nil
+	}
+
+	var importValueSet = map[string]token.Token{}
+	for _, imp := range imports {
+		switch val := imp.(type) {
+		case *ast.ImportLiteralStmt:
+			importValueSet[strings.ReplaceAll(val.Value.Token.Text, `"`, "")] = val.Value.Token
+		case *ast.ImportGroupStmt:
+			for _, v := range val.Values {
+				importValueSet[strings.ReplaceAll(v.Token.Text, `"`, "")] = v.Token
+			}
+		}
+	}
+
+	dir := filepath.Dir(api.Filename)
+	for impPath, tok := range importValueSet {
+		if !filepath.IsAbs(impPath) {
+			impPath = filepath.Join(dir, impPath)
+		}
+		// import cycle check
+		if _, ok := api.importManager[impPath]; ok {
+			return nil, ast.SyntaxError(tok.Position, "import circle not allowed")
+		} else {
+			api.importManager[impPath] = placeholder.PlaceHolder
+		}
+
+		p := New(impPath, "")
+		ast := p.Parse()
+		if err := p.CheckErrors(); err != nil {
+			return nil, err
+		}
+
+		nestedApi, err := convert2API(ast, api.importManager)
+		if err != nil {
+			return nil, err
+		}
+
+		if err = nestedApi.parseReverse(); err != nil {
+			return nil, err
+		}
+
+		list = append(list, nestedApi)
+
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	return list, nil
+}
+
+func (api *API) parseReverse() error {
+	list, err := api.parseImportedAPI(api.importStmt)
+	if err != nil {
+		return err
+	}
+	for _, e := range list {
+		if err = api.mergeAPI(e); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (api *API) SelfCheck() error {
+	if err := api.parseReverse(); err != nil {
+		return err
+	}
+	if err := api.checkImportStmt(); err != nil {
+		return err
+	}
+	if err := api.checkInfoStmt(); err != nil {
+		return err
+	}
+	if err := api.checkTypeStmt(); err != nil {
+		return err
+	}
+	if err := api.checkServiceStmt(); err != nil {
+		return err
+	}
+	return api.checkTypeDeclareContext()
+}

+ 28 - 0
tools/goctl/pkg/parser/api/parser/error.go

@@ -0,0 +1,28 @@
+package parser
+
+import (
+	"fmt"
+	"strings"
+)
+
+type errorManager struct {
+	errors []string
+}
+
+func newErrorManager() *errorManager {
+	return &errorManager{}
+}
+
+func (e *errorManager) add(err error) {
+	if err == nil {
+		return
+	}
+	e.errors = append(e.errors, err.Error())
+}
+
+func (e *errorManager) error() error {
+	if len(e.errors)==0{
+		return nil
+	}
+	return fmt.Errorf(strings.Join(e.errors, "\n"))
+}

+ 55 - 0
tools/goctl/pkg/parser/api/parser/filter.go

@@ -0,0 +1,55 @@
+package parser
+
+import (
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
+)
+
+type filterBuilder struct {
+	m             map[string]placeholder.Type
+	checkExprName string
+	errorManager  *errorManager
+}
+
+func (b *filterBuilder) check(nodes ...*ast.TokenNode) {
+	for _, node := range nodes {
+		if _, ok := b.m[node.Token.Text]; ok {
+			b.errorManager.add(ast.DuplicateStmtError(node.Pos(), "duplicate "+b.checkExprName))
+		} else {
+			b.m[node.Token.Text] = placeholder.PlaceHolder
+		}
+	}
+}
+
+func (b *filterBuilder) error() error {
+	return b.errorManager.error()
+}
+
+type filter struct {
+	builders []*filterBuilder
+}
+
+func newFilter() *filter {
+	return &filter{}
+}
+
+func (f *filter) addCheckItem(checkExprName string) *filterBuilder {
+	b := &filterBuilder{
+		m:             make(map[string]placeholder.Type),
+		checkExprName: checkExprName,
+		errorManager:  newErrorManager(),
+	}
+	f.builders = append(f.builders, b)
+	return b
+}
+
+func (f *filter) error() error {
+	if len(f.builders) == 0 {
+		return nil
+	}
+	var errorManager = newErrorManager()
+	for _, b := range f.builders {
+		errorManager.add(b.error())
+	}
+	return errorManager.error()
+}

+ 1506 - 0
tools/goctl/pkg/parser/api/parser/parser.go

@@ -0,0 +1,1506 @@
+package parser
+
+import (
+	"fmt"
+	"log"
+	"path/filepath"
+	"reflect"
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/scanner"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+const idAPI = "api"
+
+// Parser is the parser for api file.
+type Parser struct {
+	s      *scanner.Scanner
+	errors []error
+
+	curTok  token.Token
+	peekTok token.Token
+
+	headCommentGroup ast.CommentGroup
+	api              *ast.AST
+	node             map[token.Token]*ast.TokenNode
+}
+
+// New creates a new parser.
+func New(filename string, src interface{}) *Parser {
+	abs, err := filepath.Abs(filename)
+	if err != nil {
+		log.Fatalln(err)
+	}
+
+	p := &Parser{
+		s:    scanner.MustNewScanner(abs, src),
+		api:  &ast.AST{Filename: abs},
+		node: make(map[token.Token]*ast.TokenNode),
+	}
+
+	return p
+}
+
+// Parse parses the api file.
+func (p *Parser) Parse() *ast.AST {
+	if !p.init() {
+		return nil
+	}
+
+	for p.curTokenIsNotEof() {
+		stmt := p.parseStmt()
+		if isNil(stmt) {
+			return nil
+		}
+
+		p.appendStmt(stmt)
+		if !p.nextToken() {
+			return nil
+		}
+	}
+
+	return p.api
+}
+
+func (p *Parser) parseStmt() ast.Stmt {
+	switch p.curTok.Type {
+	case token.IDENT:
+		switch {
+		case p.curTok.Is(token.Syntax):
+			return p.parseSyntaxStmt()
+		case p.curTok.Is(token.Info):
+			return p.parseInfoStmt()
+		case p.curTok.Is(token.Service):
+			return p.parseService()
+		case p.curTok.Is(token.TypeKeyword):
+			return p.parseTypeStmt()
+		case p.curTok.Is(token.ImportKeyword):
+			return p.parseImportStmt()
+		default:
+			p.expectIdentError(p.curTok, token.Syntax, token.Info, token.Service, token.TYPE)
+			return nil
+		}
+	case token.AT_SERVER:
+		return p.parseService()
+	default:
+		p.errors = append(p.errors, fmt.Errorf("%s unexpected token '%s'", p.curTok.Position.String(), p.peekTok.Text))
+		return nil
+	}
+}
+
+func (p *Parser) parseService() *ast.ServiceStmt {
+	var stmt = &ast.ServiceStmt{}
+	if p.curTokenIs(token.AT_SERVER) {
+		atServerStmt := p.parseAtServerStmt()
+		if atServerStmt == nil {
+			return nil
+		}
+
+		stmt.AtServerStmt = atServerStmt
+		if !p.advanceIfPeekTokenIs(token.Service) {
+			return nil
+		}
+	}
+	stmt.Service = p.curTokenNode()
+
+	if !p.advanceIfPeekTokenIs(token.IDENT) {
+		return nil
+	}
+
+	// service name expr
+	nameExpr := p.parseServiceNameExpr()
+	if nameExpr == nil {
+		return nil
+	}
+	stmt.Name = nameExpr
+
+	// token '{'
+	if !p.advanceIfPeekTokenIs(token.LBRACE) {
+		return nil
+	}
+	stmt.LBrace = p.curTokenNode()
+
+	// service item statements
+	routes := p.parseServiceItemsStmt()
+	if routes == nil {
+		return nil
+	}
+	stmt.Routes = routes
+
+	// token '}'
+	if !p.advanceIfPeekTokenIs(token.RBRACE) {
+		return nil
+	}
+	stmt.RBrace = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseServiceItemsStmt() []*ast.ServiceItemStmt {
+	var stmt = make([]*ast.ServiceItemStmt, 0)
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RBRACE) {
+		item := p.parseServiceItemStmt()
+		if item == nil {
+			return nil
+		}
+
+		stmt = append(stmt, item)
+		if p.peekTokenIs(token.RBRACE) {
+			break
+		}
+		if p.notExpectPeekToken(token.AT_DOC, token.AT_HANDLER, token.RBRACE) {
+			return nil
+		}
+	}
+
+	return stmt
+}
+
+func (p *Parser) parseServiceItemStmt() *ast.ServiceItemStmt {
+	var stmt = &ast.ServiceItemStmt{}
+	// statement @doc
+	if p.peekTokenIs(token.AT_DOC) {
+		if !p.nextToken() {
+			return nil
+		}
+		atDocStmt := p.parseAtDocStmt()
+		if atDocStmt == nil {
+			return nil
+		}
+		stmt.AtDoc = atDocStmt
+	}
+
+	// statement @handler
+	if !p.advanceIfPeekTokenIs(token.AT_HANDLER, token.RBRACE) {
+		return nil
+	}
+	if p.peekTokenIs(token.RBRACE) {
+		return stmt
+	}
+	atHandlerStmt := p.parseAtHandlerStmt()
+	if atHandlerStmt == nil {
+		return nil
+	}
+	stmt.AtHandler = atHandlerStmt
+
+	// statement route
+	route := p.parseRouteStmt()
+	if route == nil {
+		return nil
+	}
+	stmt.Route = route
+
+	return stmt
+}
+
+func (p *Parser) parseRouteStmt() *ast.RouteStmt {
+	var stmt = &ast.RouteStmt{}
+	// token http method
+	if !p.advanceIfPeekTokenIs(token.HttpMethods...) {
+		return nil
+	}
+	stmt.Method = p.curTokenNode()
+
+	// path expression
+	pathExpr := p.parsePathExpr()
+	if pathExpr == nil {
+		return nil
+	}
+	stmt.Path = pathExpr
+
+	if p.peekTokenIs(token.AT_DOC, token.AT_HANDLER, token.RBRACE) {
+		return stmt
+	}
+	if p.peekTokenIs(token.SEMICOLON) {
+		p.nextToken()
+		return stmt
+	}
+
+	if p.notExpectPeekToken(token.Returns, token.LPAREN) {
+		return nil
+	}
+
+	if p.peekTokenIs(token.LPAREN) {
+		// request expression
+		requestBodyStmt := p.parseBodyStmt()
+		if requestBodyStmt == nil {
+			return nil
+		}
+		stmt.Request = requestBodyStmt
+	}
+
+	if p.notExpectPeekToken(token.Returns, token.AT_DOC, token.AT_HANDLER, token.RBRACE, token.SEMICOLON) {
+		return nil
+	}
+
+	// token 'returns'
+	if p.peekTokenIs(token.Returns) {
+		if !p.nextToken() {
+			return nil
+		}
+		stmt.Returns = p.curTokenNode()
+
+		responseBodyStmt := p.parseBodyStmt()
+		if responseBodyStmt == nil {
+			return nil
+		}
+
+		stmt.Response = responseBodyStmt
+	}
+	if p.peekTokenIs(token.SEMICOLON) {
+		p.nextToken()
+	}
+
+	return stmt
+}
+
+func (p *Parser) parseBodyStmt() *ast.BodyStmt {
+	var stmt = &ast.BodyStmt{}
+	// token '('
+	if !p.advanceIfPeekTokenIs(token.LPAREN) {
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+	// token ')'
+	if p.peekTokenIs(token.RPAREN) {
+		if !p.nextToken() {
+			return nil
+		}
+		stmt.RParen = p.curTokenNode()
+		return stmt
+	}
+
+	expr := p.parseBodyExpr()
+	if expr == nil {
+		return nil
+	}
+	stmt.Body = expr
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseBodyExpr() *ast.BodyExpr {
+	var expr = &ast.BodyExpr{}
+	switch {
+	case p.peekTokenIs(token.LBRACK): // token '['
+		if !p.nextToken() {
+			return nil
+		}
+		expr.LBrack = p.curTokenNode()
+
+		// token ']'
+		if !p.advanceIfPeekTokenIs(token.RBRACK) {
+			return nil
+		}
+		expr.RBrack = p.curTokenNode()
+
+		switch {
+		case p.peekTokenIs(token.MUL):
+			if !p.nextToken() {
+				return nil
+			}
+			expr.Star = p.curTokenNode()
+			if !p.advanceIfPeekTokenIs(token.IDENT) {
+				return nil
+			}
+			expr.Value = p.curTokenNode()
+
+			return expr
+		case p.peekTokenIs(token.IDENT):
+			if !p.nextToken() {
+				return nil
+			}
+			expr.Value = p.curTokenNode()
+
+			return expr
+		default:
+			p.expectPeekToken(token.MUL, token.IDENT)
+			return nil
+		}
+	case p.peekTokenIs(token.MUL):
+		if !p.nextToken() {
+			return nil
+		}
+		expr.Star = p.curTokenNode()
+		if !p.advanceIfPeekTokenIs(token.IDENT) {
+			return nil
+		}
+		expr.Value = p.curTokenNode()
+
+		return expr
+	case p.peekTokenIs(token.IDENT):
+		if !p.nextToken() {
+			return nil
+		}
+		expr.Value = p.curTokenNode()
+
+		return expr
+	default:
+		p.expectPeekToken(token.LBRACK, token.MUL, token.IDENT)
+		return nil
+	}
+}
+
+func (p *Parser) parsePathExpr() *ast.PathExpr {
+	var expr = &ast.PathExpr{}
+
+	var values []token.Token
+	for p.curTokenIsNotEof() &&
+		p.peekTokenIsNot(token.LPAREN, token.Returns, token.AT_DOC, token.AT_HANDLER, token.SEMICOLON, token.RBRACE) {
+		// token '/'
+		if !p.advanceIfPeekTokenIs(token.QUO) {
+			return nil
+		}
+		values = append(values, p.curTok)
+		if p.notExpectPeekTokenGotComment(p.curTokenNode().PeekFirstLeadingComment(), token.COLON, token.IDENT, token.INT) {
+			return nil
+		}
+
+		// token ':' or IDENT
+		if p.notExpectPeekToken(token.COLON, token.IDENT, token.INT) {
+			return nil
+		}
+
+		if p.notExpectPeekTokenGotComment(p.curTokenNode().PeekFirstLeadingComment(), token.COLON) {
+			return nil
+		}
+
+		// token ':'
+		if p.peekTokenIs(token.COLON) {
+			if !p.nextToken() {
+				return nil
+			}
+
+			values = append(values, p.curTok)
+		}
+
+		// path id tokens
+		pathTokens := p.parsePathItem()
+		if pathTokens == nil {
+			return nil
+		}
+		values = append(values, pathTokens...)
+		if p.notExpectPeekToken(token.QUO, token.LPAREN, token.Returns, token.AT_DOC, token.AT_HANDLER, token.SEMICOLON, token.RBRACE) {
+			return nil
+		}
+	}
+
+	var textList []string
+	for _, v := range values {
+		textList = append(textList, v.Text)
+	}
+
+	node := ast.NewTokenNode(token.Token{
+		Type:     token.PATH,
+		Text:     strings.Join(textList, ""),
+		Position: values[0].Position,
+	})
+	node.SetLeadingCommentGroup(p.curTokenNode().LeadingCommentGroup)
+	expr.Value = node
+
+	return expr
+}
+
+func (p *Parser) parsePathItem() []token.Token {
+	var list []token.Token
+	if !p.advanceIfPeekTokenIs(token.IDENT,token.INT) {
+		return nil
+	}
+	list = append(list, p.curTok)
+
+	for p.curTokenIsNotEof() &&
+		p.peekTokenIsNot(token.QUO, token.LPAREN, token.Returns, token.AT_DOC, token.AT_HANDLER, token.RBRACE, token.SEMICOLON, token.EOF) {
+		if p.peekTokenIs(token.SUB) {
+			if !p.nextToken() {
+				return nil
+			}
+			list = append(list, p.curTok)
+
+			if !p.advanceIfPeekTokenIs(token.IDENT) {
+				return nil
+			}
+			list = append(list, p.curTok)
+		} else {
+			if p.peekTokenIs(token.LPAREN, token.Returns, token.AT_DOC, token.AT_HANDLER, token.SEMICOLON, token.RBRACE) {
+				return list
+			}
+			if !p.advanceIfPeekTokenIs(token.IDENT) {
+				return nil
+			}
+			list = append(list, p.curTok)
+		}
+	}
+
+	return list
+}
+
+func (p *Parser) parseServiceNameExpr() *ast.ServiceNameExpr {
+	var expr = &ast.ServiceNameExpr{}
+	var text = p.curTok.Text
+
+	pos := p.curTok.Position
+	if p.peekTokenIs(token.SUB) {
+		if !p.nextToken() {
+			return nil
+		}
+
+		text += p.curTok.Text
+		if !p.expectPeekToken(idAPI) {
+			return nil
+		}
+		if !p.nextToken() {
+			return nil
+		}
+
+		text += p.curTok.Text
+	}
+
+	node := ast.NewTokenNode(token.Token{
+		Type:     token.IDENT,
+		Text:     text,
+		Position: pos,
+	})
+	node.SetLeadingCommentGroup(p.curTokenNode().LeadingCommentGroup)
+	expr.Name = node
+	return expr
+}
+
+func (p *Parser) parseAtDocStmt() ast.AtDocStmt {
+	if p.notExpectPeekToken(token.LPAREN, token.STRING) {
+		return nil
+	}
+	if p.peekTokenIs(token.LPAREN) {
+		return p.parseAtDocGroupStmt()
+	}
+	return p.parseAtDocLiteralStmt()
+}
+
+func (p *Parser) parseAtDocGroupStmt() ast.AtDocStmt {
+	var stmt = &ast.AtDocGroupStmt{}
+	stmt.AtDoc = p.curTokenNode()
+
+	// token '('
+	if !p.advanceIfPeekTokenIs(token.LPAREN) {
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RPAREN) {
+		expr := p.parseKVExpression()
+		if expr == nil {
+			return nil
+		}
+
+		stmt.Values = append(stmt.Values, expr)
+		if p.notExpectPeekToken(token.RPAREN, token.KEY) {
+			return nil
+		}
+	}
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseAtDocLiteralStmt() ast.AtDocStmt {
+	var stmt = &ast.AtDocLiteralStmt{}
+	stmt.AtDoc = p.curTokenNode()
+
+	if !p.advanceIfPeekTokenIs(token.STRING) {
+		return nil
+	}
+	stmt.Value = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseAtHandlerStmt() *ast.AtHandlerStmt {
+	var stmt = &ast.AtHandlerStmt{}
+	stmt.AtHandler = p.curTokenNode()
+
+	// token IDENT
+	if !p.advanceIfPeekTokenIs(token.IDENT) {
+		return nil
+	}
+	stmt.Name = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseAtServerStmt() *ast.AtServerStmt {
+	var stmt = &ast.AtServerStmt{}
+	stmt.AtServer = p.curTokenNode()
+
+	// token '('
+	if !p.advanceIfPeekTokenIs(token.LPAREN) {
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RPAREN) {
+		expr := p.parseAtServerKVExpression()
+		if expr == nil {
+			return nil
+		}
+
+		stmt.Values = append(stmt.Values, expr)
+		if p.notExpectPeekToken(token.RPAREN, token.KEY) {
+			return nil
+		}
+	}
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseTypeStmt() ast.TypeStmt {
+	switch {
+	case p.peekTokenIs(token.LPAREN):
+		return p.parseTypeGroupStmt()
+	case p.peekTokenIs(token.IDENT):
+		return p.parseTypeLiteralStmt()
+	default:
+		p.expectPeekToken(token.LPAREN, token.IDENT)
+		return nil
+	}
+}
+
+func (p *Parser) parseTypeLiteralStmt() ast.TypeStmt {
+	var stmt = &ast.TypeLiteralStmt{}
+	stmt.Type = p.curTokenNode()
+
+	expr := p.parseTypeExpr()
+	if expr == nil {
+		return nil
+	}
+	stmt.Expr = expr
+
+	return stmt
+}
+
+func (p *Parser) parseTypeGroupStmt() ast.TypeStmt {
+	var stmt = &ast.TypeGroupStmt{}
+	stmt.Type = p.curTokenNode()
+
+	// token '('
+	if !p.nextToken() {
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+
+	exprList := p.parseTypeExprList()
+	if exprList == nil {
+		return nil
+	}
+	stmt.ExprList = exprList
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseTypeExprList() []*ast.TypeExpr {
+	if !p.expectPeekToken(token.IDENT, token.RPAREN) {
+		return nil
+	}
+	var exprList = make([]*ast.TypeExpr, 0)
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RPAREN, token.EOF) {
+		expr := p.parseTypeExpr()
+		if expr == nil {
+			return nil
+		}
+		exprList = append(exprList, expr)
+		if !p.expectPeekToken(token.IDENT, token.RPAREN) {
+			return nil
+		}
+	}
+	return exprList
+}
+
+func (p *Parser) parseTypeExpr() *ast.TypeExpr {
+	var expr = &ast.TypeExpr{}
+	// token IDENT
+	if !p.advanceIfPeekTokenIs(token.IDENT) {
+		return nil
+	}
+	if p.curTokenIsKeyword() {
+		return nil
+	}
+
+	expr.Name = p.curTokenNode()
+
+	// token '='
+	if p.peekTokenIs(token.ASSIGN) {
+		if !p.nextToken() {
+			return nil
+		}
+		expr.Assign = p.curTokenNode()
+	}
+
+	dt := p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	expr.DataType = dt
+
+	return expr
+}
+
+func (p *Parser) parseDataType() ast.DataType {
+	switch {
+	case p.peekTokenIs(token.Any):
+		return p.parseAnyDataType()
+	case p.peekTokenIs(token.LBRACE):
+		return p.parseStructDataType()
+	case p.peekTokenIs(token.IDENT):
+		if p.peekTokenIs(token.MapKeyword) {
+			return p.parseMapDataType()
+		}
+		if !p.nextToken() {
+			return nil
+		}
+		if p.curTokenIsKeyword() {
+			return nil
+		}
+		node := p.curTokenNode()
+		baseDT := &ast.BaseDataType{Base: node}
+
+		return baseDT
+	case p.peekTokenIs(token.LBRACK):
+		if !p.nextToken() {
+			return nil
+		}
+		switch {
+		case p.peekTokenIs(token.RBRACK):
+			return p.parseSliceDataType()
+		case p.peekTokenIs(token.INT, token.ELLIPSIS):
+			return p.parseArrayDataType()
+		default:
+			p.expectPeekToken(token.RBRACK, token.INT, token.ELLIPSIS)
+			return nil
+		}
+	case p.peekTokenIs(token.ANY):
+		return p.parseInterfaceDataType()
+	case p.peekTokenIs(token.MUL):
+		return p.parsePointerDataType()
+	default:
+		p.expectPeekToken(token.IDENT, token.LBRACK, token.ANY, token.MUL, token.LBRACE)
+		return nil
+	}
+}
+func (p *Parser) parseStructDataType() *ast.StructDataType {
+	var tp = &ast.StructDataType{}
+	if !p.nextToken() {
+		return nil
+	}
+	tp.LBrace = p.curTokenNode()
+
+	if p.notExpectPeekToken(token.IDENT, token.MUL, token.RBRACE) {
+		return nil
+	}
+	// ElemExprList
+	elems := p.parseElemExprList()
+	if elems == nil {
+		return nil
+	}
+	tp.Elements = elems
+
+	if !p.advanceIfPeekTokenIs(token.RBRACE) {
+		return nil
+	}
+	tp.RBrace = p.curTokenNode()
+
+	return tp
+}
+
+func (p *Parser) parseElemExprList() ast.ElemExprList {
+	var list = make(ast.ElemExprList, 0)
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RBRACE, token.EOF) {
+		if p.notExpectPeekToken(token.IDENT, token.MUL, token.RBRACE) {
+			return nil
+		}
+		expr := p.parseElemExpr()
+		if expr == nil {
+			return nil
+		}
+		list = append(list, expr)
+		if p.notExpectPeekToken(token.IDENT, token.MUL, token.RBRACE) {
+			return nil
+		}
+	}
+
+	return list
+}
+
+func (p *Parser) parseElemExpr() *ast.ElemExpr {
+	var expr = &ast.ElemExpr{}
+	if !p.advanceIfPeekTokenIs(token.IDENT, token.MUL) {
+		return nil
+	}
+	if p.curTokenIsKeyword() {
+		return nil
+	}
+	identNode := p.curTokenNode()
+	if p.curTokenIs(token.MUL) {
+		star := p.curTokenNode()
+		if !p.advanceIfPeekTokenIs(token.IDENT) {
+			return nil
+		}
+		var dt ast.DataType
+		if p.curTokenIs(token.Any) {
+			dt = &ast.AnyDataType{Any: p.curTokenNode()}
+		} else {
+			dt = &ast.BaseDataType{Base: p.curTokenNode()}
+		}
+		expr.DataType = &ast.PointerDataType{
+			Star:     star,
+			DataType: dt,
+		}
+	} else if p.peekTok.Line() > identNode.Token.Line() || p.peekTokenIs(token.RAW_STRING) {
+		if p.curTokenIs(token.Any) {
+			expr.DataType = &ast.AnyDataType{Any: identNode}
+		} else {
+			expr.DataType = &ast.BaseDataType{Base: identNode}
+		}
+	} else {
+		expr.Name = append(expr.Name, identNode)
+		if p.notExpectPeekToken(token.COMMA, token.IDENT, token.LBRACK, token.ANY, token.MUL, token.LBRACE) {
+			return nil
+		}
+
+		for p.peekTokenIs(token.COMMA) {
+			if !p.nextToken() {
+				return nil
+			}
+			if !p.advanceIfPeekTokenIs(token.IDENT) {
+				return nil
+			}
+			if p.curTokenIsKeyword() {
+				return nil
+			}
+			expr.Name = append(expr.Name, p.curTokenNode())
+		}
+
+		dt := p.parseDataType()
+		if isNil(dt) {
+			return nil
+		}
+		expr.DataType = dt
+	}
+
+	if p.notExpectPeekToken(token.RAW_STRING, token.MUL, token.IDENT, token.RBRACE) {
+		return nil
+	}
+
+	if p.peekTokenIs(token.RAW_STRING) {
+		if !p.nextToken() {
+			return nil
+		}
+		expr.Tag = p.curTokenNode()
+	}
+
+	return expr
+}
+
+func (p *Parser) parseAnyDataType() *ast.AnyDataType {
+	var tp = &ast.AnyDataType{}
+	if !p.nextToken() {
+		return nil
+	}
+	tp.Any = p.curTokenNode()
+
+	return tp
+}
+
+func (p *Parser) parsePointerDataType() *ast.PointerDataType {
+	var tp = &ast.PointerDataType{}
+	if !p.nextToken() {
+		return nil
+	}
+	tp.Star = p.curTokenNode()
+
+	if p.notExpectPeekToken(token.IDENT, token.LBRACK, token.ANY, token.MUL) {
+		return nil
+	}
+	// DataType
+	dt := p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	tp.DataType = dt
+
+	return tp
+}
+
+func (p *Parser) parseInterfaceDataType() *ast.InterfaceDataType {
+	var tp = &ast.InterfaceDataType{}
+	if !p.nextToken() {
+		return nil
+	}
+	tp.Interface = p.curTokenNode()
+
+	return tp
+}
+
+func (p *Parser) parseMapDataType() *ast.MapDataType {
+	var tp = &ast.MapDataType{}
+	if !p.nextToken() {
+		return nil
+	}
+	tp.Map = p.curTokenNode()
+
+	// token '['
+	if !p.advanceIfPeekTokenIs(token.LBRACK) {
+		return nil
+	}
+	tp.LBrack = p.curTokenNode()
+
+	// DataType
+	dt := p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	tp.Key = dt
+
+	// token  ']'
+	if !p.advanceIfPeekTokenIs(token.RBRACK) {
+		return nil
+	}
+	tp.RBrack = p.curTokenNode()
+
+	// DataType
+	dt = p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	tp.Value = dt
+
+	return tp
+}
+
+func (p *Parser) parseArrayDataType() *ast.ArrayDataType {
+	var tp = &ast.ArrayDataType{}
+	tp.LBrack = p.curTokenNode()
+
+	// token INT | ELLIPSIS
+	if !p.nextToken() {
+		return nil
+	}
+	tp.Length = p.curTokenNode()
+
+	// token ']'
+	if !p.advanceIfPeekTokenIs(token.RBRACK) {
+		return nil
+	}
+	tp.RBrack = p.curTokenNode()
+
+	// DataType
+	dt := p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	tp.DataType = dt
+
+	return tp
+}
+
+func (p *Parser) parseSliceDataType() *ast.SliceDataType {
+	var tp = &ast.SliceDataType{}
+	tp.LBrack = p.curTokenNode()
+
+	// token ']'
+	if !p.advanceIfPeekTokenIs(token.RBRACK) {
+		return nil
+	}
+	tp.RBrack = p.curTokenNode()
+
+	// DataType
+	dt := p.parseDataType()
+	if isNil(dt) {
+		return nil
+	}
+	tp.DataType = dt
+
+	return tp
+}
+
+func (p *Parser) parseImportStmt() ast.ImportStmt {
+	if p.notExpectPeekToken(token.LPAREN, token.STRING) {
+		return nil
+	}
+
+	if p.peekTokenIs(token.LPAREN) {
+		return p.parseImportGroupStmt()
+	}
+
+	return p.parseImportLiteralStmt()
+}
+
+func (p *Parser) parseImportLiteralStmt() ast.ImportStmt {
+	var stmt = &ast.ImportLiteralStmt{}
+	stmt.Import = p.curTokenNode()
+
+	// token STRING
+	if !p.advanceIfPeekTokenIs(token.STRING) {
+		return nil
+	}
+	stmt.Value = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseImportGroupStmt() ast.ImportStmt {
+	var stmt = &ast.ImportGroupStmt{}
+	stmt.Import = p.curTokenNode()
+
+	// token '('
+	if !p.advanceIfPeekTokenIs(token.LPAREN) { // assert: dead code
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+
+	// token STRING
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RPAREN) {
+		if !p.advanceIfPeekTokenIs(token.STRING) {
+			return nil
+		}
+		stmt.Values = append(stmt.Values, p.curTokenNode())
+
+		if p.notExpectPeekToken(token.RPAREN, token.STRING) {
+			return nil
+		}
+	}
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseInfoStmt() *ast.InfoStmt {
+	var stmt = &ast.InfoStmt{}
+	stmt.Info = p.curTokenNode()
+
+	// token '('
+	if !p.advanceIfPeekTokenIs(token.LPAREN) {
+		return nil
+	}
+	stmt.LParen = p.curTokenNode()
+
+	for p.curTokenIsNotEof() && p.peekTokenIsNot(token.RPAREN) {
+		expr := p.parseKVExpression()
+		if expr == nil {
+			return nil
+		}
+
+		stmt.Values = append(stmt.Values, expr)
+		if p.notExpectPeekToken(token.RPAREN, token.KEY) {
+			return nil
+		}
+	}
+
+	// token ')'
+	if !p.advanceIfPeekTokenIs(token.RPAREN) {
+		return nil
+	}
+	stmt.RParen = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) parseAtServerKVExpression() *ast.KVExpr {
+	var expr = &ast.KVExpr{}
+
+	// token IDENT
+	if !p.advanceIfPeekTokenIs(token.KEY, token.RPAREN) {
+		return nil
+	}
+
+	expr.Key = p.curTokenNode()
+
+	var valueTok token.Token
+	var leadingCommentGroup ast.CommentGroup
+	if p.notExpectPeekToken(token.QUO, token.DURATION, token.IDENT, token.INT) {
+		return nil
+	}
+	if p.peekTokenIs(token.QUO) {
+		if !p.nextToken() {
+			return nil
+		}
+		slashTok := p.curTok
+		if !p.advanceIfPeekTokenIs(token.IDENT) {
+			return nil
+		}
+		idTok := p.curTok
+		valueTok = token.Token{
+			Text:     slashTok.Text + idTok.Text,
+			Position: slashTok.Position,
+		}
+		leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+	} else if p.peekTokenIs(token.DURATION) {
+		if !p.nextToken() {
+			return nil
+		}
+		valueTok = p.curTok
+		leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+		node := ast.NewTokenNode(valueTok)
+		node.SetLeadingCommentGroup(leadingCommentGroup)
+		expr.Value = node
+		return expr
+	} else if p.peekTokenIs(token.INT) {
+		if !p.nextToken() {
+			return nil
+		}
+		valueTok = p.curTok
+		leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+		node := ast.NewTokenNode(valueTok)
+		node.SetLeadingCommentGroup(leadingCommentGroup)
+		expr.Value = node
+		return expr
+	} else {
+		if !p.advanceIfPeekTokenIs(token.IDENT) {
+			return nil
+		}
+		valueTok = p.curTok
+		leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+		if p.peekTokenIs(token.COMMA) {
+			for {
+				if p.peekTokenIs(token.COMMA) {
+					if !p.nextToken() {
+						return nil
+					}
+					slashTok := p.curTok
+					if !p.advanceIfPeekTokenIs(token.IDENT) {
+						return nil
+					}
+					idTok := p.curTok
+					valueTok = token.Token{
+						Text:     valueTok.Text + slashTok.Text + idTok.Text,
+						Position: valueTok.Position,
+					}
+					leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+				} else {
+					break
+				}
+			}
+			valueTok.Type = token.PATH
+			node := ast.NewTokenNode(valueTok)
+			node.SetLeadingCommentGroup(leadingCommentGroup)
+			expr.Value = node
+			return expr
+		}
+	}
+
+	for {
+		if p.peekTokenIs(token.QUO) {
+			if !p.nextToken() {
+				return nil
+			}
+			slashTok := p.curTok
+			if !p.advanceIfPeekTokenIs(token.IDENT) {
+				return nil
+			}
+			idTok := p.curTok
+			valueTok = token.Token{
+				Text:     valueTok.Text + slashTok.Text + idTok.Text,
+				Position: valueTok.Position,
+			}
+			leadingCommentGroup = p.curTokenNode().LeadingCommentGroup
+		} else {
+			break
+		}
+	}
+
+	valueTok.Type = token.PATH
+	node := ast.NewTokenNode(valueTok)
+	node.SetLeadingCommentGroup(leadingCommentGroup)
+	expr.Value = node
+
+	return expr
+}
+
+func (p *Parser) parseKVExpression() *ast.KVExpr {
+	var expr = &ast.KVExpr{}
+
+	// token IDENT
+	if !p.advanceIfPeekTokenIs(token.KEY) {
+		return nil
+	}
+	expr.Key = p.curTokenNode()
+
+	// token STRING
+	if !p.advanceIfPeekTokenIs(token.STRING) {
+		return nil
+	}
+	expr.Value = p.curTokenNode()
+
+	return expr
+}
+
+// syntax = "v1"
+func (p *Parser) parseSyntaxStmt() *ast.SyntaxStmt {
+	var stmt = &ast.SyntaxStmt{}
+	stmt.Syntax = p.curTokenNode()
+
+	// token '='
+	if !p.advanceIfPeekTokenIs(token.ASSIGN) {
+		return nil
+	}
+	stmt.Assign = p.curTokenNode()
+
+	// token STRING
+	if !p.advanceIfPeekTokenIs(token.STRING) {
+		return nil
+	}
+	stmt.Value = p.curTokenNode()
+
+	return stmt
+}
+
+func (p *Parser) curTokenIsNotEof() bool {
+	return p.curTokenIsNot(token.EOF)
+}
+
+func (p *Parser) curTokenIsNot(expected token.Type) bool {
+	return p.curTok.Type != expected
+}
+
+func (p *Parser) curTokenIsKeyword() bool {
+	tp, ok := token.LookupKeyword(p.curTok.Text)
+	if ok {
+		p.curTokenIs()
+		p.expectIdentError(p.curTok.Fork(tp), token.IDENT)
+		return true
+	}
+	return false
+}
+
+func (p *Parser) curTokenIs(expected ...interface{}) bool {
+	for _, v := range expected {
+		switch val := v.(type) {
+		case token.Type:
+			if p.curTok.Type == val {
+				return true
+			}
+		case string:
+			if p.curTok.Text == val {
+				return true
+			}
+		}
+	}
+	return false
+}
+
+func (p *Parser) advanceIfPeekTokenIs(expected ...interface{}) bool {
+	if p.expectPeekToken(expected...) {
+		if !p.nextToken() {
+			return false
+		}
+		return true
+	}
+
+	return false
+}
+
+func (p *Parser) peekTokenIs(expected ...interface{}) bool {
+	for _, v := range expected {
+		switch val := v.(type) {
+		case token.Type:
+			if p.peekTok.Type == val {
+				return true
+			}
+		case string:
+			if p.peekTok.Text == val {
+				return true
+			}
+		}
+	}
+	return false
+}
+
+func (p *Parser) peekTokenIsNot(expected ...interface{}) bool {
+	for _, v := range expected {
+		switch val := v.(type) {
+		case token.Type:
+			if p.peekTok.Type == val {
+				return false
+			}
+		case string:
+			if p.peekTok.Text == val {
+				return false
+			}
+		}
+	}
+	return true
+}
+
+func (p *Parser) notExpectPeekToken(expected ...interface{}) bool {
+	if !p.peekTokenIsNot(expected...) {
+		return false
+	}
+
+	var expectedString []string
+	for _, v := range expected {
+		expectedString = append(expectedString, fmt.Sprintf("'%s'", v))
+	}
+
+	var got string
+	if p.peekTok.Type == token.ILLEGAL {
+		got = p.peekTok.Text
+	} else {
+		got = p.peekTok.Type.String()
+	}
+
+	var err error
+	if p.peekTok.Type == token.EOF {
+		position := p.curTok.Position
+		position.Column = position.Column + len(p.curTok.Text)
+		err = fmt.Errorf(
+			"%s syntax error: expected %s, got '%s'",
+			position,
+			strings.Join(expectedString, " | "),
+			got)
+	} else {
+		err = fmt.Errorf(
+			"%s syntax error: expected %s, got '%s'",
+			p.peekTok.Position,
+			strings.Join(expectedString, " | "),
+			got)
+	}
+	p.errors = append(p.errors, err)
+
+	return true
+}
+
+func (p *Parser) notExpectPeekTokenGotComment(actual *ast.CommentStmt, expected ...interface{}) bool {
+	if actual == nil {
+		return false
+	}
+	var expectedString []string
+	for _, v := range expected {
+		switch val := v.(type) {
+		case token.Token:
+			expectedString = append(expectedString, fmt.Sprintf("'%s'", val.Text))
+		default:
+			expectedString = append(expectedString, fmt.Sprintf("'%s'", v))
+		}
+	}
+
+	got := actual.Comment.Type.String()
+	p.errors = append(p.errors, fmt.Errorf(
+		"%s syntax error: expected %s, got '%s'",
+		p.peekTok.Position,
+		strings.Join(expectedString, " | "),
+		got))
+
+	return true
+}
+
+func (p *Parser) expectPeekToken(expected ...interface{}) bool {
+	if p.peekTokenIs(expected...) {
+		return true
+	}
+
+	var expectedString []string
+	for _, v := range expected {
+		expectedString = append(expectedString, fmt.Sprintf("'%s'", v))
+	}
+
+	var got string
+	if p.peekTok.Type == token.ILLEGAL {
+		got = p.peekTok.Text
+	} else {
+		got = p.peekTok.Type.String()
+	}
+
+	var err error
+	if p.peekTok.Type == token.EOF {
+		position := p.curTok.Position
+		position.Column = position.Column + len(p.curTok.Text)
+		err = fmt.Errorf(
+			"%s syntax error: expected %s, got '%s'",
+			position,
+			strings.Join(expectedString, " | "),
+			got)
+	} else {
+		err = fmt.Errorf(
+			"%s syntax error: expected %s, got '%s'",
+			p.peekTok.Position,
+			strings.Join(expectedString, " | "),
+			got)
+	}
+	p.errors = append(p.errors, err)
+
+	return false
+}
+
+func (p *Parser) expectIdentError(tok token.Token, expected ...interface{}) {
+	var expectedString []string
+	for _, v := range expected {
+		expectedString = append(expectedString, fmt.Sprintf("'%s'", v))
+	}
+
+	p.errors = append(p.errors, fmt.Errorf(
+		"%s syntax error: expected %s, got '%s'",
+		tok.Position,
+		strings.Join(expectedString, " | "),
+		tok.Type.String()))
+}
+
+func (p *Parser) init() bool {
+	if !p.nextToken() {
+		return false
+	}
+	return p.nextToken()
+}
+
+func (p *Parser) nextToken() bool {
+	var err error
+	p.curTok = p.peekTok
+	var line = -1
+	if p.curTok.Valid() {
+		if p.curTokenIs(token.EOF) {
+			for _, v := range p.headCommentGroup {
+				p.appendStmt(v)
+			}
+			p.headCommentGroup = ast.CommentGroup{}
+			return true
+		}
+		node := ast.NewTokenNode(p.curTok)
+		if p.headCommentGroup.Valid() {
+			node.HeadCommentGroup = append(node.HeadCommentGroup, p.headCommentGroup...)
+			p.headCommentGroup = ast.CommentGroup{}
+		}
+		p.node[p.curTok] = node
+		line = p.curTok.Line()
+	}
+	p.peekTok, err = p.s.NextToken()
+	if err != nil {
+		p.errors = append(p.errors, err)
+		return false
+	}
+
+	var leadingCommentGroup ast.CommentGroup
+	for p.peekTok.Type == token.COMMENT || p.peekTok.Type == token.DOCUMENT {
+		commentStmt := &ast.CommentStmt{Comment: p.peekTok}
+		if p.peekTok.Line() == line && line > -1 {
+			leadingCommentGroup = append(leadingCommentGroup, commentStmt)
+		} else {
+			p.headCommentGroup = append(p.headCommentGroup, commentStmt)
+		}
+		p.peekTok, err = p.s.NextToken()
+		if err != nil {
+			p.errors = append(p.errors, err)
+			return false
+		}
+	}
+
+	if len(leadingCommentGroup) > 0 {
+		p.curTokenNode().SetLeadingCommentGroup(leadingCommentGroup)
+	}
+
+	return true
+}
+
+func (p *Parser) curTokenNode() *ast.TokenNode {
+	return p.getNode(p.curTok)
+}
+
+func (p *Parser) getNode(tok token.Token) *ast.TokenNode {
+	return p.node[tok]
+}
+
+func isNil(v interface{}) bool {
+	if v == nil {
+		return true
+	}
+
+	vo := reflect.ValueOf(v)
+	if vo.Kind() == reflect.Ptr {
+		return vo.IsNil()
+	}
+	return false
+}
+
+// CheckErrors check parser errors.
+func (p *Parser) CheckErrors() error {
+	if len(p.errors) == 0 {
+		return nil
+	}
+	var errors []string
+	for _, e := range p.errors {
+		errors = append(errors, e.Error())
+	}
+	return fmt.Errorf(strings.Join(errors, "\n"))
+}
+
+func (p *Parser) appendStmt(stmt ...ast.Stmt) {
+	p.api.Stmts = append(p.api.Stmts, stmt...)
+}
+
+func (p *Parser) hasNoErrors() bool {
+	return len(p.errors) == 0
+}
+
+/************************EXPERIMENTAL CODE BG************************/
+// The following code block are experimental, do not call it out of unit test.
+
+// ParseForUintTest parse the source code for unit test.
+func (p *Parser) ParseForUintTest() *ast.AST {
+	api := &ast.AST{}
+	if !p.init() {
+		return nil
+	}
+
+	for p.curTokenIsNotEof() {
+		stmt := p.parseStmtForUniTest()
+		if isNil(stmt) {
+			return nil
+		}
+
+		api.Stmts = append(api.Stmts, stmt)
+		if !p.nextToken() {
+			return nil
+		}
+	}
+
+	return api
+}
+
+func (p *Parser) parseStmtForUniTest() ast.Stmt {
+	switch p.curTok.Type {
+	case token.AT_SERVER:
+		return p.parseAtServerStmt()
+	case token.AT_HANDLER:
+		return p.parseAtHandlerStmt()
+	case token.AT_DOC:
+		return p.parseAtDocStmt()
+	}
+	return nil
+}
+
+/************************EXPERIMENTAL CODE END************************/

+ 1484 - 0
tools/goctl/pkg/parser/api/parser/parser_test.go

@@ -0,0 +1,1484 @@
+package parser
+
+import (
+	_ "embed"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/assertx"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/ast"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+//go:embed testdata/comment_test.api
+var testCommentInput string
+
+func TestParser_init(t *testing.T) {
+	var testData = []string{
+		"`",
+		"@`",
+		"syntax/**/`",
+	}
+	for _, val := range testData {
+		p := New("test.api", val)
+		val := p.init()
+		assert.False(t, val)
+	}
+}
+
+//go:embed testdata/test.api
+var testInput string
+
+func TestParser_Parse(t *testing.T) {
+	t.Run("valid", func(t *testing.T) { // EXPERIMENTAL: just for testing output formatter.
+		p := New("test.api", testInput)
+		result := p.Parse()
+		assert.NotNil(t, result)
+		assert.True(t, p.hasNoErrors())
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			"foo bar",
+			"@",
+		}
+		for _, val := range testData {
+			p := New("test.api", val)
+			p.Parse()
+			assertx.ErrorOrigin(t, val, p.errors...)
+		}
+	})
+}
+
+func TestParser_Parse_Mode(t *testing.T) {
+
+	t.Run("All", func(t *testing.T) {
+		var testData = []string{
+			`// foo`,
+			`// bar`,
+			`/*foo*/`,
+			`/*bar*/`,
+			`//baz`,
+		}
+		p := New("foo.api", testCommentInput)
+		result := p.Parse()
+		for idx, v := range testData {
+			stmt := result.Stmts[idx]
+			c, ok := stmt.(*ast.CommentStmt)
+			assert.True(t, ok)
+			assert.True(t, p.hasNoErrors())
+			assert.Equal(t, v, c.Format(""))
+		}
+	})
+}
+
+func TestParser_Parse_syntaxStmt(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []struct {
+			input    string
+			expected string
+		}{
+			{
+				input:    `syntax = "v1"`,
+				expected: `syntax = "v1"`,
+			},
+			{
+				input:    `syntax = "foo"`,
+				expected: `syntax = "foo"`,
+			},
+			{
+				input:    `syntax= "bar"`,
+				expected: `syntax = "bar"`,
+			},
+			{
+				input:    ` syntax = "" `,
+				expected: `syntax = ""`,
+			},
+		}
+		for _, v := range testData {
+			p := New("foo.aoi", v.input)
+			result := p.Parse()
+			assert.True(t, p.hasNoErrors())
+			assert.Equal(t, v.expected, result.Stmts[0].Format(""))
+		}
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`syntax`,
+			`syntax = `,
+			`syntax = ''`,
+			`syntax = @`,
+			`syntax = "v1`,
+			`syntax == "v"`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/info_test.api
+var infoTestAPI string
+
+func TestParser_Parse_infoStmt(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		expected := map[string]string{
+			"title:":   `"type title here"`,
+			"desc:":    `"type desc here"`,
+			"author:":  `"type author here"`,
+			"email:":   `"type email here"`,
+			"version:": `"type version here"`,
+		}
+		p := New("foo.api", infoTestAPI)
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		infoStmt, ok := stmt.(*ast.InfoStmt)
+		assert.True(t, ok)
+		for _, stmt := range infoStmt.Values {
+			actual := stmt.Value.Token.Text
+			expectedValue := expected[stmt.Key.Token.Text]
+			assert.Equal(t, expectedValue, actual)
+		}
+
+	})
+
+	t.Run("empty", func(t *testing.T) {
+		p := New("foo.api", "info ()")
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		infoStmt, ok := stmt.(*ast.InfoStmt)
+		assert.True(t, ok)
+		assert.Equal(t, "info", infoStmt.Info.Token.Text)
+		assert.Equal(t, "(", infoStmt.LParen.Token.Text)
+		assert.Equal(t, ")", infoStmt.RParen.Token.Text)
+		assert.Equal(t, 0, len(infoStmt.Values))
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`info`,
+			`info(`,
+			`info{`,
+			`info(}`,
+			`info( foo`,
+			`info( foo:`,
+			`info( foo:""`,
+			`info( foo:"" bar`,
+			`info( foo:"" bar:`,
+			`info( foo:"" bar:""`,
+			`info( foo:"`,
+			`info foo:""`,
+			`info( foo,""`,
+			`info( foo-bar:"")`,
+			`info(123:"")`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/import_literal_test.api
+var testImportLiteral string
+
+func TestParser_Parse_importLiteral(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []string{
+			`""`,
+			`"foo"`,
+			`"bar"`,
+		}
+		p := New("foo.api", testImportLiteral)
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		for idx, v := range testData {
+			stmt := result.Stmts[idx]
+			importLit, ok := stmt.(*ast.ImportLiteralStmt)
+			assert.True(t, ok)
+			assert.Equal(t, v, importLit.Value.Token.Text)
+		}
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`import`,
+			`import "`,
+			`import "foo`,
+			`import foo`,
+			`import @`,
+			`import $`,
+			`import 好`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/import_group_test.api
+var testImportGroup string
+
+func TestParser_Parse_importGroup(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []string{
+			`""`,
+			`"foo"`,
+			`"bar"`,
+		}
+		p := New("foo.api", testImportGroup)
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		importGroup, ok := stmt.(*ast.ImportGroupStmt)
+		assert.Equal(t, token.IDENT, importGroup.Import.Token.Type)
+		assert.Equal(t, token.LPAREN, importGroup.LParen.Token.Type)
+		assert.Equal(t, token.RPAREN, importGroup.RParen.Token.Type)
+		for idx, v := range testData {
+			assert.True(t, ok)
+			assert.Equal(t, v, importGroup.Values[idx].Token.Text)
+		}
+	})
+
+	t.Run("empty", func(t *testing.T) {
+		p := New("foo.api", "import ()")
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		importGroup, ok := stmt.(*ast.ImportGroupStmt)
+		assert.True(t, ok)
+		assert.Equal(t, token.IDENT, importGroup.Import.Token.Type)
+		assert.Equal(t, token.LPAREN, importGroup.LParen.Token.Type)
+		assert.Equal(t, token.RPAREN, importGroup.RParen.Token.Type)
+		assert.Equal(t, 0, len(importGroup.Values))
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`import`,
+			`import (`,
+			`import {`,
+			`import ( "`,
+			`import (} "`,
+			`import ( ")`,
+			`import ( ""`,
+			`import ( "" foo)`,
+			`import ( "" 好)`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/atserver_test.api
+var atServerTestAPI string
+
+func TestParser_Parse_atServerStmt(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var expectedData = map[string]string{
+			"foo:":        `bar`,
+			"bar:":        `baz`,
+			"baz:":        `foo`,
+			"qux:":        `/v1`,
+			"quux:":       `/v1/v2`,
+			"middleware:": `M1,M2`,
+			"timeout1:":   "1h",
+			"timeout2:":   "10m",
+			"timeout3:":   "10s",
+			"timeout4:":   "10ms",
+			"timeout5:":   "10µs",
+			"timeout6:":   "10ns",
+			"timeout7:":   "1h10m10s10ms10µs10ns",
+			"maxBytes:":   `1024`,
+		}
+
+		p := New("foo.api", atServerTestAPI)
+		result := p.ParseForUintTest()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		atServerStmt, ok := stmt.(*ast.AtServerStmt)
+		assert.True(t, ok)
+		for _, v := range atServerStmt.Values {
+			expectedValue := expectedData[v.Key.Token.Text]
+			assert.Equal(t, expectedValue, v.Value.Token.Text)
+		}
+	})
+
+	t.Run("empty", func(t *testing.T) {
+		p := New("foo.api", `@server()`)
+		result := p.ParseForUintTest()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		atServerStmt, ok := stmt.(*ast.AtServerStmt)
+		assert.True(t, ok)
+		assert.Equal(t, token.AT_SERVER, atServerStmt.AtServer.Token.Type)
+		assert.Equal(t, token.LPAREN, atServerStmt.LParen.Token.Type)
+		assert.Equal(t, token.RPAREN, atServerStmt.RParen.Token.Type)
+		assert.Equal(t, 0, len(atServerStmt.Values))
+	})
+
+	t.Run("invalidInSkipCommentMode", func(t *testing.T) {
+		var testData = []string{
+			`@server`,
+			`@server{`,
+			`@server(`,
+			`@server(}`,
+			`@server( //foo`,
+			`@server( foo`,
+			`@server( foo:`,
+			`@server( foo:bar bar`,
+			`@server( foo:bar bar,`,
+			`@server( foo:bar bar: 123`,
+			`@server( foo:bar bar: ""`,
+			`@server( foo:bar bar: @`,
+			`@server("":foo)`,
+			`@server(foo:bar,baz)`,
+			`@server(foo:/`,
+			`@server(foo:/v`,
+			`@server(foo:/v1/`,
+			`@server(foo:/v1/v`,
+			`@server(foo:/v1/v2`,
+			`@server(foo: m1,`,
+			`@server(foo: m1,)`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+
+	t.Run("invalidWithNoSkipCommentMode", func(t *testing.T) {
+		var testData = []string{
+			`@server`,
+			`@server //foo`,
+			`@server /*foo*/`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.Error(t, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/athandler_test.api
+var atHandlerTestAPI string
+
+func TestParser_Parse_atHandler(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []string{
+			`@handler foo`,
+			`@handler foo1`,
+			`@handler _bar`,
+		}
+
+		p := New("foo.api", atHandlerTestAPI)
+		result := p.ParseForUintTest()
+		assert.True(t, p.hasNoErrors())
+		for idx, v := range testData {
+			stmt := result.Stmts[idx]
+			atHandlerStmt, ok := stmt.(*ast.AtHandlerStmt)
+			assert.True(t, ok)
+			assert.Equal(t, v, atHandlerStmt.Format(""))
+		}
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`@handler`,
+			`@handler 1`,
+			`@handler ""`,
+			`@handler @`,
+			`@handler $`,
+			`@handler ()`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.ParseForUintTest()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/atdoc_literal_test.api
+var atDocLiteralTestAPI string
+
+func TestParser_Parse_atDocLiteral(t *testing.T) {
+	t.Run("validLiteral", func(t *testing.T) {
+		var testData = []string{
+			`""`,
+			`"foo"`,
+			`"bar"`,
+		}
+
+		p := New("foo.api", atDocLiteralTestAPI)
+		result := p.ParseForUintTest()
+		assert.True(t, p.hasNoErrors())
+		for idx, v := range testData {
+			stmt := result.Stmts[idx]
+			atDocLitStmt, ok := stmt.(*ast.AtDocLiteralStmt)
+			assert.True(t, ok)
+			assert.Equal(t, v, atDocLitStmt.Value.Token.Text)
+		}
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`@doc`,
+			`@doc "`,
+			`@doc $`,
+			`@doc 好`,
+			`@doc |`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.ParseForUintTest()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/atdoc_group_test.api
+var atDocGroupTestAPI string
+
+func TestParser_Parse_atDocGroup(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = `@doc (
+	foo: "foo"
+	bar: "bar"
+	baz: ""
+)`
+
+		p := New("foo.api", atDocGroupTestAPI)
+		result := p.ParseForUintTest()
+		assert.True(t, p.hasNoErrors())
+		stmt := result.Stmts[0]
+		atDocGroupStmt, _ := stmt.(*ast.AtDocGroupStmt)
+		assert.Equal(t, testData, atDocGroupStmt.Format(""))
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`@doc{`,
+			`@doc(`,
+			`@doc(}`,
+			`@doc( foo`,
+			`@doc( foo:`,
+			`@doc( foo: 123`,
+			`@doc( foo: )`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.ParseForUintTest()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+//go:embed testdata/service_test.api
+var serviceTestAPI string
+
+func TestParser_Parse_service(t *testing.T) {
+	assertEqual := func(t *testing.T, expected, actual *ast.ServiceStmt) {
+		if expected.AtServerStmt == nil {
+			assert.Nil(t, actual.AtServerStmt)
+		}
+		assert.Equal(t, expected.Service.Token.Type, actual.Service.Token.Type)
+		assert.Equal(t, expected.Service.Token.Text, actual.Service.Token.Text)
+		assert.Equal(t, expected.Name.Format(""), actual.Name.Format(""))
+		assert.Equal(t, expected.LBrace.Token.Type, actual.LBrace.Token.Type)
+		assert.Equal(t, expected.RBrace.Token.Text, actual.RBrace.Token.Text)
+		assert.Equal(t, len(expected.Routes), len(actual.Routes))
+		for idx, v := range expected.Routes {
+			actualItem := actual.Routes[idx]
+			if v.AtDoc == nil {
+				assert.Nil(t, actualItem.AtDoc)
+			} else {
+				assert.Equal(t, v.AtDoc.Format(""), actualItem.AtDoc.Format(""))
+			}
+			assert.Equal(t, v.AtHandler.Format(""), actualItem.AtHandler.Format(""))
+			assert.Equal(t, v.Route.Format(""), actualItem.Route.Format(""))
+		}
+	}
+
+	t.Run("valid", func(t *testing.T) {
+		var testData = []*ast.ServiceStmt{
+			{
+				Service: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "service"}),
+				Name: &ast.ServiceNameExpr{
+					Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+				},
+				LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+				RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+				Routes: []*ast.ServiceItemStmt{
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "bar"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{Value: ast.NewTokenNode(token.Token{
+								Type: token.PATH,
+								Text: "/ping",
+							})},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "bar"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{Value: ast.NewTokenNode(token.Token{
+								Type: token.PATH,
+								Text: "/ping",
+							})},
+						},
+					},
+				},
+			},
+			{
+				Service: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "service"}),
+				Name: &ast.ServiceNameExpr{
+					Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "bar"}),
+				},
+				LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+				RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+				Routes: []*ast.ServiceItemStmt{
+					{
+						AtDoc: &ast.AtDocLiteralStmt{
+							AtDoc: ast.NewTokenNode(token.Token{Type: token.AT_DOC, Text: "@doc"}),
+							Value: ast.NewTokenNode(token.Token{Type: token.STRING, Text: `"bar"`}),
+						},
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtDoc: &ast.AtDocLiteralStmt{
+							AtDoc: ast.NewTokenNode(token.Token{Type: token.AT_DOC, Text: "@doc"}),
+							Value: ast.NewTokenNode(token.Token{Type: token.STRING, Text: `"bar"`}),
+						},
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar",
+								}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "get"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+				},
+			},
+			{
+				Service: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "service"}),
+				Name: &ast.ServiceNameExpr{
+					Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "baz-api"}),
+				},
+				LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+				RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+				Routes: []*ast.ServiceItemStmt{
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "post"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar/foo-bar-baz",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Star:  ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "foo"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "post"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo/:bar/foo-bar-baz",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									Star:  ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+									Value: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "bar"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "post"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									LBrack: ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+									RBrack: ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+									Value:  ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									LBrack: ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+									RBrack: ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+									Star:   ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+									Value:  ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+					{
+						AtHandler: &ast.AtHandlerStmt{
+							AtHandler: ast.NewTokenNode(token.Token{Type: token.AT_HANDLER, Text: "@handler"}),
+							Name:      ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "bar"}),
+						},
+						Route: &ast.RouteStmt{
+							Method: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "post"}),
+							Path: &ast.PathExpr{
+								Value: ast.NewTokenNode(token.Token{
+									Type: token.PATH,
+									Text: "/foo",
+								}),
+							},
+							Request: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									LBrack: ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+									RBrack: ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+									Value:  ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+							Returns: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "returns"}),
+							Response: &ast.BodyStmt{
+								LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+								Body: &ast.BodyExpr{
+									LBrack: ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+									RBrack: ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+									Star:   ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+									Value:  ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+								},
+								RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+							},
+						},
+					},
+				},
+			},
+		}
+
+		p := New("foo.api", serviceTestAPI)
+		result := p.Parse()
+		assert.True(t, p.hasNoErrors())
+		for idx, v := range testData {
+			stmt := result.Stmts[idx]
+			serviceStmt, ok := stmt.(*ast.ServiceStmt)
+			assert.True(t, ok)
+			assertEqual(t, v, serviceStmt)
+		}
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`service`,
+			`service foo`,
+			`service -`,
+			`service foo-`,
+			`service foo-api`,
+			`service foo(`,
+			`service foo$`,
+			`service foo好`,
+			`service foo{`,
+			`service foo{ @doc`,
+			`service foo{ @doc $`,
+			`service foo{ @doc ""`,
+			`service foo{ @handler`,
+			`service foo{ @handler foo`,
+			`service foo{ @handler foo bar`,
+			`service foo{ @handler foo get`,
+			`service foo{ @handler foo get /`,
+			`service foo{ @handler foo get \`,
+			`service foo{ @handler foo get /:`,
+			`service foo{ @handler foo get /::`,
+			`service foo{ @handler foo get /:foo-`,
+			`service foo{ @handler foo get /:foo--`,
+			`service foo{ @handler foo get /:foo-bar/-`,
+			`service foo{ @handler foo get /:foo-bar/baz`,
+			`service foo{ @handler foo get /:foo-bar/baz (`,
+			`service foo{ @handler foo get /:foo-bar/baz foo`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo)`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) return`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns `,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns (`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns {`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns (bar`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns (bar}`,
+			`service foo{ @handler foo get /:foo-bar/baz (foo) returns (bar)`,
+			`service foo{ @handler foo get /:foo-bar/baz ([`,
+			`service foo{ @handler foo get /:foo-bar/baz ([@`,
+			`service foo{ @handler foo get /:foo-bar/baz ([]`,
+			`service foo{ @handler foo get /:foo-bar/baz ([]*`,
+			`service foo{ @handler foo get /:foo-bar/baz ([]*Foo`,
+			`service foo{ @handler foo get /:foo-bar/baz (*`,
+			`service foo{ @handler foo get /:foo-bar/baz (*Foo`,
+			`service foo{ @handler foo get /:foo-bar/baz returns`,
+			`service foo{ @handler foo get /:foo-bar/baz returns (`,
+			`service foo{ @handler foo get /:foo-bar/baz returns ([`,
+			`service foo{ @handler foo get /:foo-bar/baz returns ([]`,
+			`service foo{ @handler foo get /:foo-bar/baz returns ([]*`,
+			`service foo{ @handler foo get /:foo-bar/baz returns ([]*Foo`,
+			`service foo{ @handler foo get /:foo-bar/baz returns (*`,
+			`service foo{ @handler foo get /:foo-bar/baz returns (*Foo`,
+			`service foo{ @handler foo get /ping (Foo) returns (Bar)]`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			_ = p.Parse()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+
+	t.Run("invalidBeginWithAtServer", func(t *testing.T) {
+		var testData = []string{
+			`@server(`,
+			`@server() service`,
+			`@server() foo`,
+			`@server() service fo`,
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			p.init()
+			_ = p.parseService()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+func TestParser_Parse_pathItem(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []struct {
+			input    string
+			expected string
+		}{
+			{input: "foo", expected: "foo"},
+			{input: "foo2", expected: "foo2"},
+			{input: "foo-bar", expected: "foo-bar"},
+			{input: "foo-bar2", expected: "foo-bar2"},
+			{input: "foo-bar-baz", expected: "foo-bar-baz"},
+			{input: "_foo-bar-baz", expected: "_foo-bar-baz"},
+			{input: "_foo_bar-baz", expected: "_foo_bar-baz"},
+			{input: "_foo_bar_baz", expected: "_foo_bar_baz"},
+			{input: "_foo_bar_baz", expected: "_foo_bar_baz"},
+			{input: "foo/", expected: "foo"},
+			{input: "foo(", expected: "foo"},
+			{input: "foo returns", expected: "foo"},
+			{input: "foo @doc", expected: "foo"},
+			{input: "foo @handler", expected: "foo"},
+			{input: "foo }", expected: "foo"},
+			{input: "1", expected: "1"},
+			{input: "11", expected: "11"},
+		}
+		for _, v := range testData {
+			p := New("foo.api", v.input)
+			ok := p.nextToken()
+			assert.True(t, ok)
+			tokens := p.parsePathItem()
+			var expected []string
+			for _, tok := range tokens {
+				expected = append(expected, tok.Text)
+			}
+			assert.Equal(t, strings.Join(expected, ""), v.expected)
+		}
+	})
+
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			"-foo",
+			"foo-",
+			"foo-2",
+			"2-2",
+			"foo-bar-123",
+			"foo-bar-$",
+			"foo-bar-好",
+			"foo-bar@",
+			"foo-barの",
+		}
+		for _, v := range testData {
+			p := New("foo.api", v)
+			ok := p.nextToken()
+			assert.True(t, ok)
+			p.parsePathItem()
+			assertx.ErrorOrigin(t, v, p.errors...)
+		}
+	})
+}
+
+func TestParser_Parse_parseTypeStmt(t *testing.T) {
+	assertEqual := func(t *testing.T, expected, actual ast.Stmt) {
+		if expected == nil {
+			assert.Nil(t, actual)
+			return
+		}
+		assert.Equal(t, expected.Format(""), actual.Format(""))
+	}
+	t.Run("parseTypeLiteralStmt", func(t *testing.T) {
+		var testData = []struct {
+			input    string
+			expected ast.TypeStmt
+		}{
+			{
+				input: "type Int int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+						DataType: &ast.BaseDataType{
+							Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Int interface{}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+						DataType: &ast.InterfaceDataType{
+							Interface: ast.NewTokenNode(token.Token{Type: token.ANY, Text: "interface{}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Int any",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+						DataType: &ast.AnyDataType{
+							Any: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "any"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Int = int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name:   ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+						Assign: ast.NewTokenNode(token.Token{Type: token.ASSIGN, Text: "="}),
+						DataType: &ast.BaseDataType{
+							Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Array [2]int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Array"}),
+						DataType: &ast.ArrayDataType{
+							LBrack:   ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+							Length:   ast.NewTokenNode(token.Token{Type: token.INT, Text: "2"}),
+							RBrack:   ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+				},
+			},
+			{
+				input: "type Array [...]int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Array"}),
+						DataType: &ast.ArrayDataType{
+							LBrack:   ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+							Length:   ast.NewTokenNode(token.Token{Type: token.ELLIPSIS, Text: "..."}),
+							RBrack:   ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+				},
+			},
+			{
+				input: "type Map map[string]int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Map"}),
+						DataType: &ast.MapDataType{
+							Map:    ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "map"}),
+							LBrack: ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+							Key:    &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "string"})},
+							RBrack: ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+							Value:  &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+				},
+			},
+			{
+				input: "type Pointer *int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Pointer"}),
+						DataType: &ast.PointerDataType{
+							Star:     ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+				},
+			},
+			{
+				input: "type Slice []int",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Slice"}),
+						DataType: &ast.SliceDataType{
+							LBrack:   ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+							RBrack:   ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Bar\n*Baz}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+									},
+								},
+								{
+									DataType: &ast.PointerDataType{
+										Star: ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+										DataType: &ast.BaseDataType{
+											Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Baz"}),
+										},
+									},
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Bar `json:\"bar\"`\n*Baz `json:\"baz\"`}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Bar"}),
+									},
+									Tag: ast.NewTokenNode(token.Token{
+										Type: token.RAW_STRING,
+										Text: "`json:\"bar\"`",
+									}),
+								},
+								{
+									DataType: &ast.PointerDataType{
+										Star: ast.NewTokenNode(token.Token{Type: token.MUL, Text: "*"}),
+										DataType: &ast.BaseDataType{
+											Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Baz"}),
+										},
+									},
+									Tag: ast.NewTokenNode(token.Token{
+										Type: token.RAW_STRING,
+										Text: "`json:\"baz\"`",
+									}),
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Name string}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									Name: []*ast.TokenNode{ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Name"})},
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "string"}),
+									},
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Name,Desc string}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									Name: []*ast.TokenNode{
+										{Token: token.Token{Type: token.IDENT, Text: "Name"}},
+										{Token: token.Token{Type: token.IDENT, Text: "Desc"}},
+									},
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "string"}),
+									},
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Name string\n Age int `json:\"age\"`}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									Name: []*ast.TokenNode{
+										{Token: token.Token{Type: token.IDENT, Text: "Name"}},
+									},
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "string"}),
+									},
+								},
+								{
+									Name: []*ast.TokenNode{
+										{Token: token.Token{Type: token.IDENT, Text: "Age"}},
+									},
+									DataType: &ast.BaseDataType{
+										Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"}),
+									},
+									Tag: ast.NewTokenNode(token.Token{Type: token.RAW_STRING, Text: "`json:\"age\"`"}),
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+			{
+				input: "type Foo {Bar {Name string}}",
+				expected: &ast.TypeLiteralStmt{
+					Type: ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					Expr: &ast.TypeExpr{
+						Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Foo"}),
+						DataType: &ast.StructDataType{
+							LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+							Elements: ast.ElemExprList{
+								{
+									Name: []*ast.TokenNode{
+										{Token: token.Token{Type: token.IDENT, Text: "Bar"}},
+									},
+									DataType: &ast.StructDataType{
+										LBrace: ast.NewTokenNode(token.Token{Type: token.LBRACE, Text: "{"}),
+										Elements: ast.ElemExprList{
+											{
+												Name: []*ast.TokenNode{
+													{Token: token.Token{Type: token.IDENT, Text: "Name"}},
+												},
+												DataType: &ast.BaseDataType{
+													Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "string"}),
+												},
+											},
+										},
+										RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+									},
+								},
+							},
+							RBrace: ast.NewTokenNode(token.Token{Type: token.RBRACE, Text: "}"}),
+						},
+					},
+				},
+			},
+		}
+		for _, val := range testData {
+			p := New("test.api", val.input)
+			result := p.Parse()
+			assert.True(t, p.hasNoErrors())
+			assert.Equal(t, 1, len(result.Stmts))
+			one := result.Stmts[0]
+			assertEqual(t, val.expected, one)
+		}
+	})
+	t.Run("parseTypeGroupStmt", func(t *testing.T) {
+		var testData = []struct {
+			input    string
+			expected ast.TypeStmt
+		}{
+			{
+				input: "type (Int int)",
+				expected: &ast.TypeGroupStmt{
+					Type:   ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+					ExprList: []*ast.TypeExpr{
+						{
+							Name:     ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+					RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+				},
+			},
+			{
+				input: "type (Int = int)",
+				expected: &ast.TypeGroupStmt{
+					Type:   ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+					ExprList: []*ast.TypeExpr{
+						{
+							Name:     ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Int"}),
+							Assign:   ast.NewTokenNode(token.Token{Type: token.ASSIGN, Text: "="}),
+							DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+						},
+					},
+					RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+				},
+			},
+			{
+				input: "type (Array [2]int)",
+				expected: &ast.TypeGroupStmt{
+					Type:   ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+					ExprList: []*ast.TypeExpr{
+						{
+							Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Array"}),
+							DataType: &ast.ArrayDataType{
+								LBrack:   ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+								Length:   ast.NewTokenNode(token.Token{Type: token.INT, Text: "2"}),
+								RBrack:   ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+								DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+							},
+						},
+					},
+					RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+				},
+			},
+			{
+				input: "type (Array [...]int)",
+				expected: &ast.TypeGroupStmt{
+					Type:   ast.NewTokenNode(token.Token{Type: token.TYPE, Text: "type"}),
+					LParen: ast.NewTokenNode(token.Token{Type: token.LPAREN, Text: "("}),
+					ExprList: []*ast.TypeExpr{
+						{
+							Name: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "Array"}),
+							DataType: &ast.ArrayDataType{
+								LBrack:   ast.NewTokenNode(token.Token{Type: token.LBRACK, Text: "["}),
+								Length:   ast.NewTokenNode(token.Token{Type: token.ELLIPSIS, Text: "..."}),
+								RBrack:   ast.NewTokenNode(token.Token{Type: token.RBRACK, Text: "]"}),
+								DataType: &ast.BaseDataType{Base: ast.NewTokenNode(token.Token{Type: token.IDENT, Text: "int"})},
+							},
+						},
+					},
+					RParen: ast.NewTokenNode(token.Token{Type: token.RPAREN, Text: ")"}),
+				},
+			},
+		}
+		for _, val := range testData {
+			p := New("test.api", val.input)
+			result := p.Parse()
+			assert.True(t, p.hasNoErrors())
+			assert.Equal(t, 1, len(result.Stmts))
+			one := result.Stmts[0]
+			assertEqual(t, val.expected, one)
+		}
+	})
+}
+
+func TestParser_Parse_parseTypeStmt_invalid(t *testing.T) {
+	var testData = []string{
+		/**************** type literal stmt ****************/
+		"type",
+		"type @",
+		"type Foo",
+		"type Foo = ",
+		"type Foo = [",
+		"type Foo = []",
+		"type Foo = [2",
+		"type Foo = [2]",
+		"type Foo = [...",
+		"type Foo = [...]",
+		"type Foo map",
+		"type Foo map[",
+		"type Foo map[]",
+		"type Foo map[string",
+		"type Foo map[123",
+		"type Foo map[string]",
+		"type Foo map[string]@",
+		"type Foo interface",
+		"type Foo interface{",
+		"type Foo *",
+		"type Foo *123",
+		"type Foo *go",
+		"type Foo {",
+		"type Foo { Foo ",
+		"type Foo { Foo int",
+		"type Foo { Foo int `",
+		"type Foo { Foo int ``",
+		"type Foo { Foo,",
+		"type Foo { Foo@",
+		"type Foo { Foo,Bar",
+		"type Foo { Foo,Bar int",
+		"type Foo { Foo,Bar int `baz`",
+		"type Foo { Foo,Bar int `baz`)",
+		"type Foo { Foo,Bar int `baz`@",
+		"type Foo *",
+		"type Foo *{",
+		"type Foo *[",
+		"type Foo *[]",
+		"type Foo *map",
+		"type Foo *map[",
+		"type Foo *map[int",
+		"type Foo *map[int]123",
+		"type Foo *map[int]@",
+		"type Foo *好",
+
+		/**************** type group stmt ****************/
+		"type (@",
+		"type (Foo",
+		"type (Foo = ",
+		"type (Foo = [",
+		"type (Foo = []",
+		"type (Foo = [2",
+		"type (Foo = [2]",
+		"type (Foo = [...",
+		"type (Foo = [...]",
+		"type (Foo map",
+		"type (Foo map[",
+		"type (Foo map[]",
+		"type (Foo map[string",
+		"type (Foo map[123",
+		"type (Foo map[string]",
+		"type (Foo map[string]@",
+		"type (Foo interface",
+		"type (Foo interface{",
+		"type (Foo *",
+		"type (Foo *123",
+		"type (Foo *go",
+		"type (Foo {",
+		"type (Foo { Foo ",
+		"type (Foo { Foo int",
+		"type (Foo { Foo int `",
+		"type (Foo { Foo int ``",
+		"type (Foo { Foo,",
+		"type (Foo { Foo@",
+		"type (Foo { Foo,Bar",
+		"type (Foo { Foo,Bar int",
+		"type (Foo { Foo,Bar int `baz`",
+		"type (Foo { Foo,Bar int `baz`)",
+		"type (Foo { Foo,Bar int `baz`@",
+		"type (Foo *",
+		"type (Foo *{",
+		"type (Foo *[",
+		"type (Foo *[]",
+		"type (Foo *map",
+		"type (Foo *map[",
+		"type (Foo *map[int",
+		"type (Foo *map[int]123",
+		"type (Foo *map[int]@",
+		"type (Foo *好",
+		"type (Foo)",
+		"type (Foo int\nBar)",
+		"type (Foo int\nBar string `)",
+		"type (())",
+		"type go int",
+		"type  A func",
+		"type  A {map[string]int}",
+		"type  A {Name \n string}",
+	}
+
+	for _, v := range testData {
+		p := New("test.api", v)
+		p.Parse()
+		assertx.ErrorOrigin(t, v, p.errors...)
+	}
+}

+ 5 - 0
tools/goctl/pkg/parser/api/parser/testdata/atdoc_group_test.api

@@ -0,0 +1,5 @@
+@doc(
+  foo: "foo"
+  bar: "bar"
+  baz: ""
+)

+ 3 - 0
tools/goctl/pkg/parser/api/parser/testdata/atdoc_literal_test.api

@@ -0,0 +1,3 @@
+@doc ""
+@doc "foo"
+@doc "bar"

+ 3 - 0
tools/goctl/pkg/parser/api/parser/testdata/athandler_test.api

@@ -0,0 +1,3 @@
+@handler foo
+@handler foo1
+@handler _bar

+ 16 - 0
tools/goctl/pkg/parser/api/parser/testdata/atserver_test.api

@@ -0,0 +1,16 @@
+@server(
+    foo: bar
+    bar: baz
+    baz: foo
+    qux: /v1
+    quux: /v1/v2
+    middleware: M1,M2
+    timeout1: 1h
+    timeout2: 10m
+    timeout3: 10s
+    timeout4: 10ms
+    timeout5: 10µs
+    timeout6: 10ns
+    timeout7: 1h10m10s10ms10µs10ns
+    maxBytes: 1024
+)

+ 11 - 0
tools/goctl/pkg/parser/api/parser/testdata/base.api

@@ -0,0 +1,11 @@
+syntax = "v1"
+
+import "base1.api"
+info (
+	title:   "type title here"
+	desc:    "type desc here"
+	author:  "type author here"
+	email:   "type email here"
+	version: "type version here"
+)
+

+ 11 - 0
tools/goctl/pkg/parser/api/parser/testdata/base1.api

@@ -0,0 +1,11 @@
+syntax = "v1"
+
+import "base2.api"
+info (
+	title:   "type title here"
+	desc:    "type desc here"
+	author:  "type author here"
+	email:   "type email here"
+	version: "type version here"
+)
+

+ 11 - 0
tools/goctl/pkg/parser/api/parser/testdata/base2.api

@@ -0,0 +1,11 @@
+syntax = "v1"
+
+import "base.api"
+info (
+	title:   "type title here"
+	desc:    "type desc here"
+	author:  "type author here"
+	email:   "type email here"
+	version: "type version here"
+)
+

+ 4 - 0
tools/goctl/pkg/parser/api/parser/testdata/comment_test.api

@@ -0,0 +1,4 @@
+// foo
+// bar
+/*foo*/
+/*bar*/ //baz

+ 167 - 0
tools/goctl/pkg/parser/api/parser/testdata/example.api

@@ -0,0 +1,167 @@
+syntax = "v1"
+
+import "example_base1.api"
+
+import (
+	"example_base2.api"
+)
+
+info (
+	title:   "type title here"
+	desc:    "type desc here"
+	author:  "type author here"
+	email:   "type email here"
+	version: "type version here"
+)
+
+type GetFormReq {
+	Name    string   `form:"name"`
+	Age     int      `form:"age"`
+	Hobbits []string `form:"hobbits"`
+}
+
+type GetFormREsp {
+	Name    string   `json:"name"`
+	Age     int      `json:"age"`
+	Hobbits []string `json:"hobbits"`
+}
+
+type (
+	PostFormReq {
+		Name    string   `form:"name"`
+		Age     int      `form:"age"`
+		Hobbits []string `form:"hobbits"`
+	}
+	PostFormResp {
+		Name    string   `json:"name"`
+		Age     int      `json:"age"`
+		Hobbits []string `json:"hobbits"`
+	}
+)
+
+type (
+	PostJsonReq {
+		Name    string   `json:"name"`
+		Age     int      `json:"age"`
+		Hobbits []string `json:"hobbits"`
+	}
+	PostJsonResp {
+		Name    string            `json:"name"`
+		Age     int               `json:"age"`
+		Hobbits []string          `json:"hobbits"`
+		Extra   map[string]string `json:"extra"`
+		Data    interface{}       `json:"data"`
+	}
+)
+
+type (
+	PostPathReq {
+		Id string `path:"id"`
+	}
+	PostPathResp {
+		Name    string            `json:"name"`
+		Age     int               `json:"age"`
+		Hobbits []string          `json:"hobbits"`
+		Hobbits2 [2]string          `json:"hobbits2"`
+		Extra   map[string]string `json:"extra"`
+		Data    interface{}       `json:"data"`
+	}
+)
+
+type (
+	DemoOfArrayReq {
+		In string `json:"in"`
+	}
+	DemoOfArrayResp {
+		Out string `json:"out"`
+	}
+)
+
+type (
+	Nest {
+		Name string `json:"name"`
+	}
+	NestDemoReq1 {
+		Nest *Nest `json:"nest"`
+	}
+	NestDemoResp1 {
+		Nest []*Nest `json:"nest"`
+	}
+	NestDemoReq2 {
+		*Nest
+	}
+	NestDemoResp2 {
+*Nest `json:"nest"`
+	}
+)
+
+@server (
+	group:   form
+	timeout: 3s
+)
+service example {
+	@handler getForm
+	get /example/form (GetFormReq) returns (GetFormREsp)
+
+	@handler postForm
+	post /example/form (PostFormReq) returns (PostFormResp)
+}
+
+@server (
+	group:   json
+	jwt:     Auth
+	timeout: 3m
+)
+service example {
+	@doc "json demo"
+	@handler postJson
+	post /example/json (PostJsonReq) returns (PostJsonResp)
+}
+
+@server (
+	group:      path
+	middleware: Path
+	prefix:     /v1/v2
+	timeout:    100ms
+)
+service example {
+	@doc (
+		desc: "path demo"
+	)
+	@handler postPath
+	post /example/path (PostPathReq) returns (PostPathResp)
+}
+
+@server (
+	group:    array
+	prefix:   /array
+	maxBytes: 1024
+)
+service example {
+	@doc (
+		desc: "array response demo"
+	)
+	@handler getArray
+	post /example/array (DemoOfArrayReq) returns ([]DemoOfArrayResp)
+
+	@doc (
+		desc: "array pointer response demo"
+	)
+	@handler getArrayPointer
+	post /example/array/pointer (DemoOfArrayReq) returns ([]*DemoOfArrayResp)
+
+	@doc (
+		desc: "array base response demo"
+	)
+	@handler getArrayBase
+	post /example/array/base (DemoOfArrayReq) returns ([]string)
+}
+
+service example {
+	@handler nestDemo1
+	post /example/nest (NestDemoReq1) returns (NestDemoResp1)
+
+	@handler nestDemo2
+	post /example/nest2 (NestDemoReq2) returns (NestDemoResp2)
+}
+

+ 12 - 0
tools/goctl/pkg/parser/api/parser/testdata/example_base1.api

@@ -0,0 +1,12 @@
+syntax = "v1"
+
+info(
+    title: "type title here"
+    desc: "type desc here"
+    author: "type author here"
+    email: "type email here"
+    version: "type version here"
+)
+
+type BaseReq1{}
+type BaseResp1{}

+ 12 - 0
tools/goctl/pkg/parser/api/parser/testdata/example_base2.api

@@ -0,0 +1,12 @@
+syntax = "v1"
+
+info(
+    title: "type title here"
+    desc: "type desc here"
+    author: "type author here"
+    email: "type email here"
+    version: "type version here"
+)
+
+type BaseReq2{}
+type BaseResp2{}

+ 5 - 0
tools/goctl/pkg/parser/api/parser/testdata/import_group_test.api

@@ -0,0 +1,5 @@
+import (
+    ""
+    "foo"
+    "bar"
+)

+ 3 - 0
tools/goctl/pkg/parser/api/parser/testdata/import_literal_test.api

@@ -0,0 +1,3 @@
+import ""
+import "foo"
+import "bar"

+ 7 - 0
tools/goctl/pkg/parser/api/parser/testdata/info_test.api

@@ -0,0 +1,7 @@
+info(
+    title: "type title here"
+    desc: "type desc here"
+    author: "type author here"
+    email: "type email here"
+    version: "type version here"
+)

+ 136 - 0
tools/goctl/pkg/parser/api/parser/testdata/invalid.api

@@ -0,0 +1,136 @@
+// test case: expected syntax statement
+info ()
+
+-----
+// test case: duplicate syntax statement
+syntax = "v1"
+syntax = "v1"
+
+-----
+// test case: duplicate info statement
+syntax = "v1"
+info()
+info()
+
+-----
+// test case: duplicate type
+syntax = "v1"
+type Foo{}
+type Foo{}
+
+-----
+// test case: duplicate type
+syntax = "v1"
+type Baz{}
+type (
+    Baz{}
+)
+
+
+-----
+// test case: multiple service name
+syntax = "v1"
+service foo{
+    @handler foo
+    get /foo
+}
+service bar{
+    @handler foo
+    get /foo
+}
+
+-----
+// test case: duplicate handler
+syntax = "v1"
+service foo{
+    @handler foo
+    get /foo
+    @handler foo
+    get /bar
+}
+
+-----
+// test case: duplicate path
+syntax = "v1"
+service foo{
+    @handler foo
+    get /foo
+    @handler bar
+    get /foo
+    @handler qux
+    get /v1/baz
+}
+
+@server(
+    prefix: /v1
+)
+service foo{
+    @handler qux
+    get /baz
+    @handler quux
+    get /baz
+}
+
+-----
+// test case: type declare context
+syntax = "v1"
+type Foo {
+    Bar Bar `json:"bar"`
+}
+
+-----
+// test case:  map key expected literal type
+syntax = "v1"
+type Foo {
+    Bar map[[]int]string  `json:"bar"`
+}
+
+-----
+// test case:  map key expected literal type
+syntax = "v1"
+type Foo {
+    Bar map[[]int]string  `json:"bar"`
+}
+
+-----
+// test case:  map key expected literal type
+syntax = "v1"
+type Foo {
+    Bar *map[[]int]string  `json:"bar"`
+}
+
+-----
+// test case:  map valu expected literal type
+syntax = "v1"
+type Foo {
+    Bar *map[string]{}  `json:"bar"`
+}
+
+-----
+// test case:  invalid slice
+syntax = "v1"
+type Foo {
+    Bar []map[[]int]string  `json:"bar"`
+}
+
+-----
+// test case:  array
+syntax = "v1"
+type Foo {
+    Bar [...]int  `json:"bar"`
+}
+
+-----
+// test case:  any
+syntax = "v1"
+type Foo {
+    Bar any  `json:"bar"`
+}
+
+-----
+// test case:  unresolved type
+syntax = "v1"
+service example {
+    @handler nestDemo
+    post /example/nest (NestDemoReq) returns (NestDemoResp)
+}

+ 37 - 0
tools/goctl/pkg/parser/api/parser/testdata/service_test.api

@@ -0,0 +1,37 @@
+service foo {
+    @handler bar
+    get /ping
+
+    @handler bar
+    get /ping;
+}
+
+service bar {
+    @doc "bar"
+    @handler foo
+    get /foo/:bar (Foo)
+
+    @doc "bar"
+    @handler foo
+    get /foo/:bar (Foo) returns ();
+
+    @handler foo
+    get /foo/:bar returns (Foo)
+
+    @handler foo
+    get /foo/:bar () returns (Foo);
+}
+
+service baz-api {
+    @handler foo
+    post /foo/:bar/foo-bar-baz (Foo) returns (*Bar)
+
+    @handler foo
+    post /foo/:bar/foo-bar-baz (Foo) returns (*Bar);
+
+    @handler bar
+    post /foo ([]Foo) returns ([]*Bar)
+
+    @handler bar
+    post /foo ([]Foo) returns ([]*Bar);
+}

+ 160 - 0
tools/goctl/pkg/parser/api/parser/testdata/test.api

@@ -0,0 +1,160 @@
+// aaaa
+
+/*bb*/ syntax /*cc*/ = /*dd*/ "v1" /*syntax doc*/ // syntax stmt
+// bbb
+
+info ( // info stmt
+    title:   "type title here" // title expr
+    /*ee*/
+    desc:    "type desc here"
+    author:  "type author here"
+    email:   "type email here"
+    version: "type version here"
+)
+
+type AliasInt int
+type AliasString = string
+type AliasArray [2]int8
+type AliasArray2 [...]int8
+type AliasSlice []int8
+type AliasMap map[string]int
+type Any interface{}
+type AliasMapKeyStruct map[{
+Name string `json:"name"`
+Age  int    `json:"age"`
+        Bar {
+            Name string `json:"name"`
+            Age  int    `json:"age"`
+            Bar {
+                Name string `json:"name"`
+                Age  int    `json:"age"`
+                Bar {
+                    Name string `json:"name"`
+                    Age  int    `json:"age"`
+                }
+            }
+        }
+}]int
+type AliasMapValueStruct map[string]{
+Name string `json:"name"`
+Age  int    `json:"age"`
+        Bar {
+            Name string `json:"name"`
+            Age  int    `json:"age"`
+            Bar {
+                Name string `json:"name"`
+                Age  int    `json:"age"`
+                Bar {
+                    Name string `json:"name"`
+                    Age  int    `json:"age"`
+                }
+            }
+        }
+}
+type Foo {
+    Bar {
+        Name string `json:"name"`
+        Age  int    `json:"age"`
+        Bar {
+            Name string `json:"name"`
+            Age  int    `json:"age"`
+            Bar {
+                Name string `json:"name"`
+                Age  int    `json:"age"`
+            }
+        }
+    }
+}
+
+type Bar {
+    Base      int                 `json:"base"`
+    Array1    [2]int              `json:"array1"`
+    Array2    [...]int            `json:"array2"`
+    Slice     []int               `json:"slice"`
+    Map1      map[string]int      `json:"map1"`
+    Map2      map[string]*int     `json:"map2"`
+    Map3      map[string][]int    `json:"map3"`
+    Map4      map[string][]*int   `json:"map4"`
+    Map5      map[string][2]int   `json:"map5"`
+    Map6      map[string][...]int `json:"map6"`
+    Interface interface{}         `json:"interface"`
+    Any       any                 `json:"any"`
+    Foo       Foo                 `json:"foo"`
+    Baz {
+        F1 int `json:"f1"`
+        F2 int `json:"f2"`
+    } `json:"baz"`
+    Qux  *string `json:"qux"`
+    Quux bool    `json:"quux"`
+}
+
+type (
+GroupAliasInt int
+GroupAliasString = string
+GroupAliasArray [2]int8
+GroupAliasArray2 [...]int8
+GroupAliasSlice []int8
+GroupAliasMap map[string]int
+GroupAny interface{}
+GroupFoo {}
+GroupBar {
+Base      int                 `json:"base"`
+Array1    [2]int              `json:"array1"`
+Array2    [...]int            `json:"array2"`
+Slice     []int               `json:"slice"`
+Map1      map[string]int      `json:"map1"`
+Map2      map[string]*int     `json:"map2"`
+Map3      map[string][]int    `json:"map3"`
+Map4      map[string][]*int   `json:"map4"`
+Map5      map[string][2]int   `json:"map5"`
+Map6      map[string][...]int `json:"map6"`
+Interface interface{}         `json:"interface"`
+Any       any                 `json:"any"`
+Foo       Foo                 `json:"foo"`
+Baz {
+F1 int `json:"f1"`
+F2 int `json:"f2"`
+} `json:"baz"`
+Qux  *string `json:"qux"`
+Quux bool    `json:"quux"`
+}
+)
+
+@server ()
+service test {
+    @handler foo
+    get /test/foo
+}
+
+@server (
+    jwt: Auth
+    group: Group1
+)
+service test {
+    @doc "ping"
+    @handler foo
+    get /test/foo
+    @doc (
+        key1: "value1"
+        key2: "value2"
+    )
+    @handler bar
+    get /test/foo (Foo)
+    @handler baz
+    post /test/foo/baz returns (Bar)
+    @handler qux
+    post /test/foo/baz/:qux (Foo) returns (Bar)
+    @handler quux
+    post /test/foo/baz/:qux/qu-ux (Foo) returns (Bar)
+    @handler foobar
+    post /foo/bar (*Foo) returns ([]Bar)
+    @handler barbaz
+    post /bar/baz ([]*Foo) returns ([]int)
+}
+
+// terminal
+// terminal2
+/*
+kkk
+*/
+

+ 50 - 0
tools/goctl/pkg/parser/api/parser/testdata/test_format.api

@@ -0,0 +1,50 @@
+// format api demo
+syntax ="v1" // dd
+
+info()
+info(foo:"")
+info(foo:""
+bar: ""
+quux: "")
+info(foo:""
+    bar: ""
+    quux: ""
+)
+// info statement
+// info statement
+info (// Info bloack
+    title:   "type title here" // title comment
+    desc:    "type desc here"
+    author:  "type author here"
+/*aaa*/
+/*
+bbb
+*/
+email:   "type email here" // eamil comment
+/*aaa*/version:/*bbb*/ "type version here"// version comment
+)
+
+import ""
+import "aaa"
+import"bb"
+import    "cc"
+import()
+import(
+)
+import (
+
+)
+import ("aa")
+import ("aa" "bb")
+import ("aa"
+"bb"
+)
+import ("aa"
+    "bb")
+import (
+
+    "aa"
+
+    "bb"
+
+)

+ 7 - 0
tools/goctl/pkg/parser/api/placeholder/placeholder.go

@@ -0,0 +1,7 @@
+package placeholder
+
+// Type is the placeholder type.
+type Type struct{}
+
+// PlaceHolder is the placeholder.
+var PlaceHolder Type

+ 667 - 0
tools/goctl/pkg/parser/api/scanner/scanner.go

@@ -0,0 +1,667 @@
+package scanner
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"io/ioutil"
+	"log"
+	"path/filepath"
+	"strings"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+const (
+	initMode mode = iota
+
+	// document mode bg
+	documentHalfOpen
+	documentOpen
+	documentHalfClose
+	documentClose
+	// document mode end
+
+	// string mode bg
+	stringOpen
+	stringClose
+	// string mode end
+
+)
+
+var missingInput = errors.New("missing input")
+
+type mode int
+
+// Scanner is a lexical scanner.
+type Scanner struct {
+	filename string
+	size     int
+
+	data         []rune
+	position     int // current position in input (points to current char)
+	readPosition int // current reading position in input (after current char)
+	ch           rune
+
+	lines []int
+}
+
+// NextToken returns the next token.
+func (s *Scanner) NextToken() (token.Token, error) {
+	s.skipWhiteSpace()
+	switch s.ch {
+	case '/':
+		peekOne := s.peekRune()
+		switch peekOne {
+		case '/':
+			return s.scanLineComment(), nil
+		case '*':
+			return s.scanDocument()
+		default:
+			return s.newToken(token.QUO), nil
+		}
+	case '-':
+		return s.newToken(token.SUB), nil
+	case '*':
+		return s.newToken(token.MUL), nil
+	case '(':
+		return s.newToken(token.LPAREN), nil
+	case '[':
+		return s.newToken(token.LBRACK), nil
+	case '{':
+		return s.newToken(token.LBRACE), nil
+	case ',':
+		return s.newToken(token.COMMA), nil
+	case '.':
+		position := s.position
+		peekOne := s.peekRune()
+		if peekOne != '.' {
+			return s.newToken(token.DOT), nil
+		}
+		s.readRune()
+		peekOne = s.peekRune()
+		if peekOne != '.' {
+			return s.newToken(token.DOT), nil
+		}
+		s.readRune()
+		s.readRune()
+		return token.Token{
+			Type:     token.ELLIPSIS,
+			Text:     "...",
+			Position: s.newPosition(position),
+		}, nil
+	case ')':
+		return s.newToken(token.RPAREN), nil
+	case ']':
+		return s.newToken(token.RBRACK), nil
+	case '}':
+		return s.newToken(token.RBRACE), nil
+	case ';':
+		return s.newToken(token.SEMICOLON), nil
+	case ':':
+		return s.newToken(token.COLON), nil
+	case '=':
+		return s.newToken(token.ASSIGN), nil
+	case '@':
+		return s.scanAt()
+	case '"':
+		return s.scanString('"', token.STRING)
+	case '`':
+		return s.scanString('`', token.RAW_STRING)
+	case 0:
+		return token.EofToken, nil
+	default:
+		if s.isIdentifierLetter(s.ch) {
+			return s.scanIdent(), nil
+		}
+		if s.isDigit(s.ch) {
+			return s.scanIntOrDuration(), nil
+		}
+		tok := token.NewIllegalToken(s.ch, s.newPosition(s.position))
+		s.readRune()
+		return tok, nil
+	}
+}
+
+func (s *Scanner) newToken(tp token.Type) token.Token {
+	tok := token.Token{
+		Type:     tp,
+		Text:     string(s.ch),
+		Position: s.positionAt(),
+	}
+	s.readRune()
+	return tok
+}
+
+func (s *Scanner) readRune() {
+	if s.readPosition >= s.size {
+		s.ch = 0
+	} else {
+		s.ch = s.data[s.readPosition]
+	}
+	s.position = s.readPosition
+	s.readPosition += 1
+}
+
+func (s *Scanner) peekRune() rune {
+	if s.readPosition >= s.size {
+		return 0
+	}
+	return s.data[s.readPosition]
+}
+
+func (s *Scanner) scanString(delim rune, tp token.Type) (token.Token, error) {
+	position := s.position
+	var stringMode = initMode
+	for {
+		switch s.ch {
+		case delim:
+			switch stringMode {
+			case initMode:
+				stringMode = stringOpen
+			case stringOpen:
+				stringMode = stringClose
+				s.readRune()
+				return token.Token{
+					Type:     tp,
+					Text:     string(s.data[position:s.position]),
+					Position: s.newPosition(position),
+				}, nil
+			}
+		case 0:
+			switch stringMode {
+			case initMode: // assert: dead code
+				return token.ErrorToken, s.assertExpected(token.EOF, tp)
+			case stringOpen:
+				return token.ErrorToken, s.assertExpectedString(token.EOF.String(), string(delim))
+			case stringClose: // assert: dead code
+				return token.Token{
+					Type:     tp,
+					Text:     string(s.data[position:s.position]),
+					Position: s.newPosition(position),
+				}, nil
+			}
+		}
+		s.readRune()
+	}
+}
+
+func (s *Scanner) scanAt() (token.Token, error) {
+	position := s.position
+	peek := s.peekRune()
+	if !s.isLetter(peek) {
+		if peek == 0 {
+			return token.NewIllegalToken(s.ch, s.positionAt()), nil
+		}
+		return token.ErrorToken, s.assertExpectedString(string(peek), token.IDENT.String())
+	}
+
+	s.readRune()
+	letters := s.scanLetterSet()
+	switch letters {
+	case "handler":
+		return token.Token{
+			Type:     token.AT_HANDLER,
+			Text:     "@handler",
+			Position: s.newPosition(position),
+		}, nil
+	case "server":
+		return token.Token{
+			Type:     token.AT_SERVER,
+			Text:     "@server",
+			Position: s.newPosition(position),
+		}, nil
+	case "doc":
+		return token.Token{
+			Type:     token.AT_DOC,
+			Text:     "@doc",
+			Position: s.newPosition(position),
+		}, nil
+	default:
+
+		return token.ErrorToken, s.assertExpectedString(
+			"@"+letters,
+			token.AT_DOC.String(),
+			token.AT_HANDLER.String(),
+			token.AT_SERVER.String())
+	}
+}
+
+func (s *Scanner) scanIntOrDuration() token.Token {
+	position := s.position
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+	switch s.ch {
+	case 'n', 'µ', 'm', 's', 'h':
+		return s.scanDuration(position)
+	default:
+		return token.Token{
+			Type:     token.INT,
+			Text:     string(s.data[position:s.position]),
+			Position: s.newPosition(position),
+		}
+	}
+}
+
+// scanDuration scans a duration literal, for example "1ns", "1µs", "1ms", "1s", "1m", "1h".
+func (s *Scanner) scanDuration(bgPos int) token.Token {
+	switch s.ch {
+	case 'n':
+		return s.scanNanosecond(bgPos)
+	case 'µ':
+		return s.scanMicrosecond(bgPos)
+	case 'm':
+		return s.scanMillisecondOrMinute(bgPos)
+	case 's':
+		return s.scanSecond(bgPos)
+	case 'h':
+		return s.scanHour(bgPos)
+	default:
+		return s.illegalToken()
+	}
+}
+
+func (s *Scanner) scanNanosecond(bgPos int) token.Token {
+	s.readRune()
+	if s.ch != 's' {
+		return s.illegalToken()
+	}
+	s.readRune()
+	return token.Token{
+		Type:     token.DURATION,
+		Text:     string(s.data[bgPos:s.position]),
+		Position: s.newPosition(bgPos),
+	}
+}
+
+func (s *Scanner) scanMicrosecond(bgPos int) token.Token {
+	s.readRune()
+	if s.ch != 's' {
+		return s.illegalToken()
+	}
+
+	s.readRune()
+	if !s.isDigit(s.ch) {
+		return token.Token{
+			Type:     token.DURATION,
+			Text:     string(s.data[bgPos:s.position]),
+			Position: s.newPosition(bgPos),
+		}
+	}
+
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	if s.ch != 'n' {
+		return s.illegalToken()
+	}
+
+	return s.scanNanosecond(bgPos)
+
+}
+
+func (s *Scanner) scanMillisecondOrMinute(bgPos int) token.Token {
+	s.readRune()
+	if s.ch != 's' { // minute
+		if s.ch == 0 || !s.isDigit(s.ch) {
+			return token.Token{
+				Type:     token.DURATION,
+				Text:     string(s.data[bgPos:s.position]),
+				Position: s.newPosition(bgPos),
+			}
+		}
+
+		return s.scanMinute(bgPos)
+	}
+
+	return s.scanMillisecond(bgPos)
+}
+
+func (s *Scanner) scanMillisecond(bgPos int) token.Token {
+	s.readRune()
+	if !s.isDigit(s.ch) {
+		return token.Token{
+			Type:     token.DURATION,
+			Text:     string(s.data[bgPos:s.position]),
+			Position: s.newPosition(bgPos),
+		}
+	}
+
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	switch s.ch {
+	case 'n':
+		return s.scanNanosecond(bgPos)
+	case 'µ':
+		return s.scanMicrosecond(bgPos)
+	default:
+		return s.illegalToken()
+	}
+}
+
+func (s *Scanner) scanSecond(bgPos int) token.Token {
+	s.readRune()
+	if !s.isDigit(s.ch) {
+		return token.Token{
+			Type:     token.DURATION,
+			Text:     string(s.data[bgPos:s.position]),
+			Position: s.newPosition(bgPos),
+		}
+	}
+
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	switch s.ch {
+	case 'n':
+		return s.scanNanosecond(bgPos)
+	case 'µ':
+		return s.scanMicrosecond(bgPos)
+	case 'm':
+		s.readRune()
+		if s.ch != 's' {
+			return s.illegalToken()
+		}
+		return s.scanMillisecond(bgPos)
+	default:
+		return s.illegalToken()
+	}
+}
+
+func (s *Scanner) scanMinute(bgPos int) token.Token {
+	if !s.isDigit(s.ch) {
+		return token.Token{
+			Type:     token.DURATION,
+			Text:     string(s.data[bgPos:s.position]),
+			Position: s.newPosition(bgPos),
+		}
+	}
+
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	switch s.ch {
+	case 'n':
+		return s.scanNanosecond(bgPos)
+	case 'µ':
+		return s.scanMicrosecond(bgPos)
+	case 'm':
+		s.readRune()
+		if s.ch != 's' {
+			return s.illegalToken()
+		}
+		return s.scanMillisecond(bgPos)
+	case 's':
+		return s.scanSecond(bgPos)
+	default:
+		return s.illegalToken()
+	}
+}
+
+func (s *Scanner) scanHour(bgPos int) token.Token {
+	s.readRune()
+	if !s.isDigit(s.ch) {
+		return token.Token{
+			Type:     token.DURATION,
+			Text:     string(s.data[bgPos:s.position]),
+			Position: s.newPosition(bgPos),
+		}
+	}
+
+	for s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	switch s.ch {
+	case 'n':
+		return s.scanNanosecond(bgPos)
+	case 'µ':
+		return s.scanMicrosecond(bgPos)
+	case 'm':
+		return s.scanMillisecondOrMinute(bgPos)
+	case 's':
+		return s.scanSecond(bgPos)
+	default:
+		return s.illegalToken()
+	}
+}
+
+func (s *Scanner) illegalToken() token.Token {
+	tok := token.NewIllegalToken(s.ch, s.newPosition(s.position))
+	s.readRune()
+	return tok
+}
+
+func (s *Scanner) scanIdent() token.Token {
+	position := s.position
+	for s.isIdentifierLetter(s.ch) || s.isDigit(s.ch) {
+		s.readRune()
+	}
+
+	ident := string(s.data[position:s.position])
+
+	if s.ch == ':' {
+		s.readRune()
+		return token.Token{
+			Type:     token.KEY,
+			Text:     string(s.data[position:s.position]),
+			Position: s.newPosition(position),
+		}
+	}
+
+	if ident == "interface" && s.ch == '{' && s.peekRune() == '}' {
+		s.readRune()
+		s.readRune()
+		return token.Token{
+			Type:     token.ANY,
+			Text:     string(s.data[position:s.position]),
+			Position: s.newPosition(position),
+		}
+	}
+
+	return token.Token{
+		Type:     token.IDENT,
+		Text:     ident,
+		Position: s.newPosition(position),
+	}
+}
+
+func (s *Scanner) scanLetterSet() string {
+	position := s.position
+	for s.isLetter(s.ch) {
+		s.readRune()
+	}
+	return string(s.data[position:s.position])
+}
+
+func (s *Scanner) scanLineComment() token.Token {
+	position := s.position
+	for s.ch != '\n' && s.ch != 0 {
+		s.readRune()
+	}
+	return token.Token{
+		Type:     token.COMMENT,
+		Text:     string(s.data[position:s.position]),
+		Position: s.newPosition(position),
+	}
+}
+
+func (s *Scanner) scanDocument() (token.Token, error) {
+	position := s.position
+	var documentMode = initMode
+	for {
+		switch s.ch {
+		case '*':
+			switch documentMode {
+			case documentHalfOpen:
+				documentMode = documentOpen // /*
+			case documentOpen, documentHalfClose:
+				documentMode = documentHalfClose // (?m)\/\*\*+
+			}
+
+		case 0:
+			switch documentMode {
+			case initMode, documentHalfOpen: // assert: dead code
+				return token.ErrorToken, s.assertExpected(token.EOF, token.MUL)
+			case documentOpen:
+				return token.ErrorToken, s.assertExpected(token.EOF, token.MUL)
+			case documentHalfClose:
+				return token.ErrorToken, s.assertExpected(token.EOF, token.QUO)
+			}
+		case '/':
+			switch documentMode {
+			case initMode: // /
+				documentMode = documentHalfOpen
+			case documentHalfOpen: // assert: dead code
+				return token.ErrorToken, s.assertExpected(token.QUO, token.MUL)
+			case documentHalfClose:
+				documentMode = documentClose // /*\*+*/
+				s.readRune()
+				tok := token.Token{
+					Type:     token.DOCUMENT,
+					Text:     string(s.data[position:s.position]),
+					Position: s.newPosition(position),
+				}
+				return tok, nil
+			}
+		}
+		s.readRune()
+	}
+}
+
+func (s *Scanner) assertExpected(actual token.Type, expected ...token.Type) error {
+	var expects []string
+	for _, v := range expected {
+		expects = append(expects, fmt.Sprintf("'%s'", v.String()))
+	}
+
+	text := fmt.Sprint(s.positionAt().String(), " ", fmt.Sprintf(
+		"expected %s, got '%s'",
+		strings.Join(expects, " | "),
+		actual.String(),
+	))
+	return errors.New(text)
+}
+
+func (s *Scanner) assertExpectedString(actual string, expected ...string) error {
+	var expects []string
+	for _, v := range expected {
+		expects = append(expects, fmt.Sprintf("'%s'", v))
+	}
+
+	text := fmt.Sprint(s.positionAt().String(), " ", fmt.Sprintf(
+		"expected %s, got '%s'",
+		strings.Join(expects, " | "),
+		actual,
+	))
+	return errors.New(text)
+}
+
+func (s *Scanner) positionAt() token.Position {
+	return s.newPosition(s.position)
+}
+
+func (s *Scanner) newPosition(position int) token.Position {
+	line := s.lineCount()
+	return token.Position{
+		Filename: s.filename,
+		Line:     line,
+		Column:   position - s.lines[line-1],
+	}
+}
+
+func (s *Scanner) lineCount() int {
+	return len(s.lines)
+}
+
+func (s *Scanner) skipWhiteSpace() {
+	for s.isWhiteSpace(s.ch) {
+		s.readRune()
+	}
+}
+
+func (s *Scanner) isDigit(b rune) bool {
+	return b >= '0' && b <= '9'
+}
+
+func (s *Scanner) isLetter(b rune) bool {
+	return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')
+}
+
+func (s *Scanner) isIdentifierLetter(b rune) bool {
+	if s.isLetter(b) {
+		return true
+	}
+	return b == '_'
+}
+
+func (s *Scanner) isWhiteSpace(b rune) bool {
+	if b == '\n' {
+		s.lines = append(s.lines, s.position)
+	}
+	return b == ' ' || b == '\t' || b == '\r' || b == '\f' || b == '\v' || b == '\n'
+}
+
+// MustNewScanner returns a new scanner for the given filename and data.
+func MustNewScanner(filename string, src interface{}) *Scanner {
+	sc, err := NewScanner(filename, src)
+	if err != nil {
+		log.Fatalln(err)
+	}
+	return sc
+}
+
+// NewScanner returns a new scanner for the given filename and data.
+func NewScanner(filename string, src interface{}) (*Scanner, error) {
+	data, err := readData(filename, src)
+	if err != nil {
+		return nil, err
+	}
+
+	if len(data) == 0 {
+		return nil, missingInput
+	}
+
+	var runeList []rune
+	for _, r := range string(data) {
+		runeList = append(runeList, r)
+	}
+
+	filename = filepath.Base(filename)
+	s := &Scanner{
+		filename:     filename,
+		size:         len(runeList),
+		data:         runeList,
+		lines:        []int{-1},
+		readPosition: 0,
+	}
+
+	s.readRune()
+	return s, nil
+}
+
+func readData(filename string, src interface{}) ([]byte, error) {
+	data, err := ioutil.ReadFile(filename)
+	if err == nil {
+		return data, nil
+	}
+
+	switch v := src.(type) {
+	case []byte:
+		data = append(data, v...)
+	case *bytes.Buffer:
+		data = v.Bytes()
+	case string:
+		data = []byte(v)
+	default:
+		return nil, fmt.Errorf("unsupported type: %T", src)
+	}
+
+	return data, nil
+}

+ 1490 - 0
tools/goctl/pkg/parser/api/scanner/scanner_test.go

@@ -0,0 +1,1490 @@
+package scanner
+
+import (
+	"bytes"
+	_ "embed"
+	"fmt"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/assertx"
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/token"
+)
+
+func Test_readData(t *testing.T) {
+	testData := []struct {
+		input    interface{}
+		expected interface{}
+	}{
+		{
+			input:    []byte("foo"),
+			expected: []byte("foo"),
+		},
+		{
+			input:    bytes.NewBufferString("foo"),
+			expected: []byte("foo"),
+		},
+		{
+			input:    "foo",
+			expected: []byte("foo"),
+		},
+		{
+			input:    "",
+			expected: []byte{},
+		},
+		{
+			input:    strings.NewReader("foo"),
+			expected: fmt.Errorf("unsupported type: *strings.Reader"),
+		},
+	}
+	for _, v := range testData {
+		actual, err := readData("", v.input)
+		if err != nil {
+			assert.Equal(t, v.expected.(error).Error(), err.Error())
+		} else {
+			assert.Equal(t, v.expected, actual)
+		}
+	}
+}
+
+func TestNewScanner(t *testing.T) {
+	testData := []struct {
+		filename string
+		src      interface{}
+		expected interface{}
+	}{
+		{
+			filename: "foo",
+			src:      "foo",
+			expected: "foo",
+		},
+		{
+			filename: "foo",
+			src:      "",
+			expected: missingInput,
+		},
+	}
+	for _, v := range testData {
+		s, err := NewScanner(v.filename, v.src)
+		if err != nil {
+			assert.Equal(t, v.expected.(error).Error(), err.Error())
+		} else {
+			assert.Equal(t, v.expected, s.filename)
+		}
+	}
+}
+
+func TestScanner_NextToken_lineComment(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.COMMENT,
+			Text: "//",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.COMMENT,
+			Text: "//foo",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.COMMENT,
+			Text: "//bar",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.COMMENT,
+			Text: "///",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     4,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.COMMENT,
+			Text: "////",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     5,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.QUO,
+			Text: "/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     6,
+				Column:   1,
+			},
+		},
+		token.EofToken,
+	}
+	var input = "//\n//foo\n//bar\n///\n////\n/"
+	s, err := NewScanner("foo.api", input)
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_document(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.DOCUMENT,
+			Text: "/**/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/***/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   6,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*-*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   12,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*/*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   18,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*////*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   24,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*foo*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*---*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.DOCUMENT,
+			Text: "/*\n*/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     4,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.QUO,
+			Text: "/",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     5,
+				Column:   1,
+			},
+		},
+		token.EofToken,
+	}
+	var input = "/**/ /***/ /*-*/ /*/*/ /*////*/  \n/*foo*/\n/*---*/\n/*\n*/\n/"
+	s, err := NewScanner("foo.api", input)
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_invalid_document(t *testing.T) {
+	var testData = []string{
+		"/*",
+		"/**",
+		"/***",
+		"/*/",
+		"/*/*",
+		"/*/**",
+	}
+	for _, v := range testData {
+		s, err := NewScanner("foo.api", v)
+		assert.NoError(t, err)
+		_, err = s.NextToken()
+		assertx.Error(t, err)
+	}
+}
+
+func TestScanner_NextToken_operator(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.SUB,
+			Text: "-",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.MUL,
+			Text: "*",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   2,
+			},
+		},
+		{
+			Type: token.LPAREN,
+			Text: "(",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   3,
+			},
+		},
+		{
+			Type: token.LBRACE,
+			Text: "{",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   4,
+			},
+		},
+		{
+			Type: token.COMMA,
+			Text: ",",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.DOT,
+			Text: ".",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   6,
+			},
+		},
+		{
+			Type: token.RPAREN,
+			Text: ")",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   7,
+			},
+		},
+		{
+			Type: token.RBRACE,
+			Text: "}",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   8,
+			},
+		},
+		{
+			Type: token.SEMICOLON,
+			Text: ";",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   9,
+			},
+		},
+		{
+			Type: token.COLON,
+			Text: ":",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   10,
+			},
+		},
+		{
+			Type: token.ASSIGN,
+			Text: "=",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   11,
+			},
+		},
+		{
+			Type: token.ELLIPSIS,
+			Text: "...",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   12,
+			},
+		},
+	}
+	s, err := NewScanner("foo.api", "-*({,.)};:=...")
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_at(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []token.Token{
+			{
+				Type: token.AT_DOC,
+				Text: "@doc",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.AT_HANDLER,
+				Text: "@handler",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   5,
+				},
+			},
+			{
+				Type: token.AT_SERVER,
+				Text: "@server",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   13,
+				},
+			},
+			{
+				Type: token.AT_HANDLER,
+				Text: "@handler",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     2,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.AT_SERVER,
+				Text: "@server",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     3,
+					Column:   1,
+				},
+			},
+		}
+		s, err := NewScanner("foo.api", "@doc@handler@server\n@handler\n@server")
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			"@h",
+			"@ha",
+			"@han",
+			"@hand",
+			"@handl",
+			"@handle",
+			"@handlerr",
+			"@hhandler",
+			"@foo",
+			"@sserver",
+			"@serverr",
+			"@d",
+			"@do",
+			"@docc",
+		}
+		for _, v := range testData {
+			s, err := NewScanner("foo.api", v)
+			assert.NoError(t, err)
+			_, err = s.NextToken()
+			assertx.Error(t, err)
+		}
+	})
+}
+
+func TestScanner_NextToken_ident(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.IDENT,
+			Text: "foo",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "bar",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "go",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "func",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   4,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "_",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "_go",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     4,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "info",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     5,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "goo",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     6,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "vvar",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     6,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "imports",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     6,
+				Column:   10,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "go1",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     7,
+				Column:   1,
+			},
+		},
+	}
+	s, err := NewScanner("foo.api", "foo bar\ngo func\n_\n_go\ninfo\ngoo vvar imports\ngo1")
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_Key(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.IDENT,
+			Text: "foo",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.KEY,
+			Text: "foo:",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.KEY,
+			Text: "bar:",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.COLON,
+			Text: ":",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "interface",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     4,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.ANY,
+			Text: "interface{}",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     5,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.LBRACE,
+			Text: "{",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     5,
+				Column:   12,
+			},
+		},
+	}
+	s, err := NewScanner("foo.api", "foo\nfoo:\nbar::\ninterface\ninterface{}{")
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_int(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.INT,
+			Text: `123`,
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.INT,
+			Text: `234`,
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.INT,
+			Text: `123`,
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.INT,
+			Text: `234`,
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+	}
+	s, err := NewScanner("foo.api", "123 234\n123\n234a")
+	assert.NoError(t, err)
+	for _, expected := range testData {
+		actual, err := s.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, expected, actual)
+	}
+}
+
+func TestScanner_NextToken_duration(t *testing.T) {
+	t.Run("ns", func(t *testing.T) {
+		var testData = []token.Token{
+			{
+				Type: token.DURATION,
+				Text: `1ns`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `10ns`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     2,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `100ns`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     3,
+					Column:   1,
+				},
+			},
+		}
+		s, err := NewScanner("foo.api", "1ns\n10ns\n100ns")
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("µs", func(t *testing.T) {
+		var testData = []token.Token{
+			{
+				Type: token.DURATION,
+				Text: `1µs`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `10µs`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     2,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `100µs`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     3,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `1µs1ns`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     4,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.DURATION,
+				Text: `1µs10ns`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     5,
+					Column:   1,
+				},
+			},
+		}
+		s, err := NewScanner("foo.api", "1µs\n10µs\n100µs\n1µs1ns\n1µs10ns")
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("ms", func(t *testing.T) {
+		var testData []token.Token
+		var source interface{} = "1ms\n10ms\n100ms\n1ms1µs\n1ms10µs\n1ms1ns\n1ms10ns\n1ms1µs1ns\n1ms1µs10ns\n1ms10µs1ns\n1ms10µs10ns"
+		for idx, seg := range strings.FieldsFunc(source.(string), func(r rune) bool {
+			return r == '\n'
+		}) {
+			testData = append(testData, token.Token{
+				Type: token.DURATION,
+				Text: seg,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     idx + 1,
+					Column:   1,
+				},
+			})
+		}
+		s, err := NewScanner("foo.api", source)
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("s", func(t *testing.T) {
+		var testData []token.Token
+		var source interface{} = "1s\n10s\n100s\n1s1ms\n1s10ms\n1s100ms\n1s1ms1µs\n1s10ms10µs\n1s100ms100µs\n" +
+			"1s100ms100µs1ns\n1s100ms100µs10ns\n1s100ms100µs100ns\n1s1µs\n1s10µs\n1s100µs\n1s1µs1ns\n1s10µs10ns\n" +
+			"1s100µs100ns\n1s1ms1µs1ns\n1s10ms10µs10ns\n1s100ms100µs100ns\n1s1ns\n1s10ns\n1s100ns"
+		for idx, seg := range strings.FieldsFunc(source.(string), func(r rune) bool {
+			return r == '\n'
+		}) {
+			testData = append(testData, token.Token{
+				Type: token.DURATION,
+				Text: seg,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     idx + 1,
+					Column:   1,
+				},
+			})
+		}
+		s, err := NewScanner("foo.api", source)
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("m", func(t *testing.T) {
+		var testData []token.Token
+		var source interface{} = "1m\n10m\n100m\n1m1s\n1m10s\n1m100s\n1m1s1ms\n1m10s10ms\n1m100s100ms\n" +
+			"1m1s1ms1µs\n1m10s10ms10µs\n1m100s100ms100µs\n1m1s1ms1µs1ns\n1m1s1ms1µs10ns\n1m1s1ms1µs100ns\n" +
+			"1m1s1µs\n1m1ns\n1m10ms10µs100ns"
+		list := strings.FieldsFunc(source.(string), func(r rune) bool {
+			return r == '\n'
+		})
+		for idx, seg := range list {
+			testData = append(testData, token.Token{
+				Type: token.DURATION,
+				Text: seg,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     idx + 1,
+					Column:   1,
+				},
+			})
+		}
+		s, err := NewScanner("foo.api", source)
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("h", func(t *testing.T) {
+		var testData []token.Token
+		var source interface{} = "1h\n10h\n100h\n" +
+			"1h1m\n10h10m\n100h100m\n" +
+			"1h1m1s\n10h10m10s\n100h100m100s\n" +
+			"1h1m1s1ms\n10h10m10s10ms\n100h100m100s100ms\n" +
+			"1h1m1s1ms1µs\n10h10m10s10ms10µs\n100h100m100s100ms100µs\n" +
+			"1h1m1s1ms1µs1ns\n10h10m10s10ms10µs10ns\n100h100m100s100ms100µs100ns\n" +
+			"1h10ns\n1h100µs\n10h10s\n10h10ms\n10h10m10µs"
+		list := strings.FieldsFunc(source.(string), func(r rune) bool {
+			return r == '\n'
+		})
+		for idx, seg := range list {
+			testData = append(testData, token.Token{
+				Type: token.DURATION,
+				Text: seg,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     idx + 1,
+					Column:   1,
+				},
+			})
+		}
+		s, err := NewScanner("foo.api", source)
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+}
+
+func TestScanner_NextToken_string(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []token.Token{
+			{
+				Type: token.STRING,
+				Text: `""`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.STRING,
+				Text: `"foo"`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   3,
+				},
+			},
+			{
+				Type: token.STRING,
+				Text: `"foo\nbar"`,
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   8,
+				},
+			},
+		}
+		s, err := NewScanner("foo.api", `"""foo""foo\nbar"`)
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			`"`,
+			`"foo`,
+			`"
+`,
+		}
+		for _, v := range testData {
+			s, err := NewScanner("foo.api", v)
+			assert.NoError(t, err)
+			_, err = s.NextToken()
+			assertx.Error(t, err)
+		}
+	})
+}
+
+func TestScanner_NextToken_rawString(t *testing.T) {
+	t.Run("valid", func(t *testing.T) {
+		var testData = []token.Token{
+			{
+				Type: token.RAW_STRING,
+				Text: "``",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   1,
+				},
+			},
+			{
+				Type: token.RAW_STRING,
+				Text: "`foo`",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   3,
+				},
+			},
+			{
+				Type: token.RAW_STRING,
+				Text: "`foo bar`",
+				Position: token.Position{
+					Filename: "foo.api",
+					Line:     1,
+					Column:   8,
+				},
+			},
+		}
+		s, err := NewScanner("foo.api", "```foo``foo bar`")
+		assert.NoError(t, err)
+		for _, expected := range testData {
+			actual, err := s.NextToken()
+			assert.NoError(t, err)
+			assert.Equal(t, expected, actual)
+		}
+	})
+	t.Run("invalid", func(t *testing.T) {
+		var testData = []string{
+			"`",
+			"`foo",
+			"`    ",
+		}
+		for _, v := range testData {
+			s, err := NewScanner("foo.api", v)
+			assert.NoError(t, err)
+			_, err = s.NextToken()
+			assertx.Error(t, err)
+		}
+	})
+}
+
+func TestScanner_NextToken_anyCase(t *testing.T) {
+	t.Run("case1", func(t *testing.T) {
+		var testData = []string{
+			"#",
+			"$",
+			"^",
+			"好",
+			"|",
+		}
+		for _, v := range testData {
+			s, err := NewScanner("foo.api", v)
+			assert.NoError(t, err)
+			tok, err := s.NextToken()
+			assert.NoError(t, err)
+			fmt.Println(tok.String())
+			assert.Equal(t, token.ILLEGAL, tok.Type)
+		}
+	})
+
+	t.Run("case2", func(t *testing.T) {
+		s, err := NewScanner("foo.api", `好の`)
+		assert.NoError(t, err)
+		for {
+			tok, err := s.NextToken()
+			if tok.Type == token.EOF {
+				break
+			}
+			assert.NoError(t, err)
+			fmt.Println(tok)
+		}
+	})
+
+	t.Run("case3", func(t *testing.T) {
+		s, err := NewScanner("foo.api", `foo`)
+		assert.NoError(t, err)
+		for {
+			tok, err := s.NextToken()
+			if tok.Type == token.EOF {
+				break
+			}
+			assert.NoError(t, err)
+			fmt.Println(tok)
+		}
+	})
+}
+
+//go:embed test.api
+var testInput string
+
+func TestScanner_NextToken(t *testing.T) {
+	position := func(line, column int) token.Position {
+		return token.Position{
+			Filename: "test.api",
+			Line:     line,
+			Column:   column,
+		}
+	}
+	var testData = []token.Token{
+		{
+			Type:     token.IDENT,
+			Text:     "syntax",
+			Position: position(1, 1),
+		},
+		{
+			Type:     token.ASSIGN,
+			Text:     "=",
+			Position: position(1, 8),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"v1"`,
+			Position: position(1, 10),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `info`,
+			Position: position(3, 1),
+		},
+		{
+			Type:     token.LPAREN,
+			Text:     `(`,
+			Position: position(3, 5),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `title:`,
+			Position: position(4, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"type title here"`,
+			Position: position(4, 12),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `desc:`,
+			Position: position(5, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"type desc here"`,
+			Position: position(5, 11),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `author:`,
+			Position: position(6, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"type author here"`,
+			Position: position(6, 13),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `email:`,
+			Position: position(7, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"type email here"`,
+			Position: position(7, 12),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `version:`,
+			Position: position(8, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"type version here"`,
+			Position: position(8, 14),
+		},
+		{
+			Type:     token.RPAREN,
+			Text:     `)`,
+			Position: position(9, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `type`,
+			Position: position(12, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `request`,
+			Position: position(12, 6),
+		},
+		{
+			Type:     token.LBRACE,
+			Text:     `{`,
+			Position: position(12, 14),
+		},
+		{
+			Type:     token.COMMENT,
+			Text:     `// TODO: add members here and delete this comment`,
+			Position: position(13, 5),
+		},
+		{
+			Type:     token.RBRACE,
+			Text:     `}`,
+			Position: position(14, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `type`,
+			Position: position(16, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `response`,
+			Position: position(16, 6),
+		},
+		{
+			Type:     token.LBRACE,
+			Text:     `{`,
+			Position: position(16, 15),
+		},
+		{
+			Type:     token.COMMENT,
+			Text:     `// TODO: add members here and delete this comment`,
+			Position: position(17, 5),
+		},
+		{
+			Type:     token.RBRACE,
+			Text:     `}`,
+			Position: position(18, 1),
+		},
+		{
+			Type:     token.AT_SERVER,
+			Text:     `@server`,
+			Position: position(20, 1),
+		},
+		{
+			Type:     token.LPAREN,
+			Text:     `(`,
+			Position: position(20, 8),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `jwt:`,
+			Position: position(21, 5),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `Auth`,
+			Position: position(21, 10),
+		},
+		{
+			Type:     token.KEY,
+			Text:     `group:`,
+			Position: position(22, 5),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `template`,
+			Position: position(22, 12),
+		},
+		{
+			Type:     token.RPAREN,
+			Text:     `)`,
+			Position: position(23, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `service`,
+			Position: position(24, 1),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `template`,
+			Position: position(24, 9),
+		},
+		{
+			Type:     token.LBRACE,
+			Text:     `{`,
+			Position: position(24, 18),
+		},
+		{
+			Type:     token.AT_DOC,
+			Text:     `@doc`,
+			Position: position(25, 5),
+		},
+		{
+			Type:     token.STRING,
+			Text:     `"foo"`,
+			Position: position(25, 10),
+		},
+		{
+			Type:     token.DOCUMENT,
+			Text:     `/*foo*/`,
+			Position: position(25, 16),
+		},
+		{
+			Type:     token.AT_HANDLER,
+			Text:     `@handler`,
+			Position: position(26, 5),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `handlerName`,
+			Position: position(26, 14),
+		},
+		{
+			Type:     token.COMMENT,
+			Text:     `// TODO: replace handler name and delete this comment`,
+			Position: position(26, 26),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `get`,
+			Position: position(27, 5),
+		},
+		{
+			Type:     token.QUO,
+			Text:     `/`,
+			Position: position(27, 9),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `users`,
+			Position: position(27, 10),
+		},
+		{
+			Type:     token.QUO,
+			Text:     `/`,
+			Position: position(27, 15),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `id`,
+			Position: position(27, 16),
+		},
+		{
+			Type:     token.QUO,
+			Text:     `/`,
+			Position: position(27, 18),
+		},
+		{
+			Type:     token.COLON,
+			Text:     `:`,
+			Position: position(27, 19),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `userId`,
+			Position: position(27, 20),
+		},
+		{
+			Type:     token.LPAREN,
+			Text:     `(`,
+			Position: position(27, 27),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `request`,
+			Position: position(27, 28),
+		},
+		{
+			Type:     token.RPAREN,
+			Text:     `)`,
+			Position: position(27, 35),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `returns`,
+			Position: position(27, 37),
+		},
+		{
+			Type:     token.LPAREN,
+			Text:     `(`,
+			Position: position(27, 45),
+		},
+		{
+			Type:     token.IDENT,
+			Text:     `response`,
+			Position: position(27, 46),
+		},
+		{
+			Type:     token.RPAREN,
+			Text:     `)`,
+			Position: position(27, 54),
+		},
+	}
+	scanner, err := NewScanner("test.api", testInput)
+	assert.NoError(t, err)
+	for _, v := range testData {
+		actual, err := scanner.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, v, actual)
+	}
+}
+
+func TestScanner_NextToken_type(t *testing.T) {
+	var testData = []token.Token{
+		{
+			Type: token.IDENT,
+			Text: "foo",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "string",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.RAW_STRING,
+			Text: "`json:\"foo\"`",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     1,
+				Column:   12,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "bar",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.LBRACK,
+			Text: "[",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.RBRACK,
+			Text: "]",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   6,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "int",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     2,
+				Column:   7,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "baz",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   1,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "map",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   5,
+			},
+		},
+		{
+			Type: token.LBRACK,
+			Text: "[",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   8,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "string",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   9,
+			},
+		},
+		{
+			Type: token.RBRACK,
+			Text: "]",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   15,
+			},
+		},
+		{
+			Type: token.IDENT,
+			Text: "int",
+			Position: token.Position{
+				Filename: "foo.api",
+				Line:     3,
+				Column:   16,
+			},
+		},
+	}
+	var input = "foo string `json:\"foo\"`\nbar []int\nbaz map[string]int"
+	scanner, err := NewScanner("foo.api", input)
+	assert.NoError(t, err)
+	for _, v := range testData {
+		actual, err := scanner.NextToken()
+		assert.NoError(t, err)
+		assert.Equal(t, v, actual)
+	}
+}

+ 28 - 0
tools/goctl/pkg/parser/api/scanner/test.api

@@ -0,0 +1,28 @@
+syntax = "v1"
+
+info(
+    title: "type title here"
+    desc: "type desc here"
+    author: "type author here"
+    email: "type email here"
+    version: "type version here"
+)
+
+
+type request {
+    // TODO: add members here and delete this comment
+}
+
+type response {
+    // TODO: add members here and delete this comment
+}
+
+@server(
+    jwt: Auth
+    group: template
+)
+service template {
+    @doc "foo" /*foo*/
+    @handler handlerName // TODO: replace handler name and delete this comment
+    get /users/id/:userId (request) returns (response)
+}

+ 21 - 0
tools/goctl/pkg/parser/api/token/position.go

@@ -0,0 +1,21 @@
+package token
+
+import "fmt"
+
+// IllegalPosition is a position that is not valid.
+var IllegalPosition = Position{}
+
+// Position represents a rune position in the source code.
+type Position struct {
+	Filename string
+	Line     int
+	Column   int
+}
+
+// String returns a string representation of the position.
+func (p Position) String() string {
+	if len(p.Filename) == 0 {
+		return fmt.Sprint(p.Line, ":", p.Column)
+	}
+	return fmt.Sprint(p.Filename, " ", p.Line, ":", p.Column)
+}

+ 357 - 0
tools/goctl/pkg/parser/api/token/token.go

@@ -0,0 +1,357 @@
+package token
+
+import (
+	"fmt"
+
+	"github.com/zeromicro/go-zero/tools/goctl/pkg/parser/api/placeholder"
+	"github.com/zeromicro/go-zero/tools/goctl/util"
+)
+
+const (
+	Syntax        = "syntax"
+	Info          = "info"
+	Service       = "service"
+	Returns       = "returns"
+	Any           = "any"
+	TypeKeyword   = "type"
+	MapKeyword    = "map"
+	ImportKeyword = "import"
+)
+
+// Type is the type of token.
+type Type int
+
+// EofToken is the end of file token.
+var EofToken = Token{Type: EOF}
+
+// ErrorToken is the error token.
+var ErrorToken = Token{Type: error}
+
+// Token is the token of a rune.
+type Token struct {
+	Type     Type
+	Text     string
+	Position Position
+}
+
+// Fork forks token for a given Type.
+func (t Token) Fork(tp Type) Token {
+	return Token{
+		Type:     tp,
+		Text:     t.Text,
+		Position: t.Position,
+	}
+}
+
+// IsEmptyString returns true if the token is empty string.
+func (t Token) IsEmptyString() bool {
+	if t.Type != STRING && t.Type != RAW_STRING {
+		return false
+	}
+	text := util.TrimWhiteSpace(t.Text)
+	return text == `""` || text == "``"
+}
+
+// IsComment returns true if the token is comment.
+func (t Token) IsComment() bool {
+	return t.IsType(COMMENT)
+}
+
+// IsDocument returns true if the token is document.
+func (t Token) IsDocument() bool {
+	return t.IsType(DOCUMENT)
+}
+
+// IsType returns true if the token is the given type.
+func (t Token) IsType(tp Type) bool {
+	return t.Type == tp
+}
+
+// Line returns the line number of the token.
+func (t Token) Line() int {
+	return t.Position.Line
+}
+
+// String returns the string of the token.
+func (t Token) String() string {
+	if t == ErrorToken {
+		return t.Type.String()
+	}
+	return fmt.Sprintf("%s %s %s", t.Position.String(), t.Type.String(), t.Text)
+}
+
+// Valid returns true if the token is valid.
+func (t Token) Valid() bool {
+	return t.Type != token_bg
+}
+
+// IsKeyword returns true if the token is keyword.
+func (t Token) IsKeyword() bool {
+	return golang_keyword_beg < t.Type && t.Type < golang_keyword_end
+}
+
+// IsBaseType returns true if the token is base type.
+func (t Token) IsBaseType() bool {
+	_, ok := baseDataType[t.Text]
+	return ok
+}
+
+// IsHttpMethod returns true if the token is http method.
+func (t Token) IsHttpMethod() bool {
+	_, ok := httpMethod[t.Text]
+	return ok
+}
+
+// Is returns true if the token text is one of the given list.
+func (t Token) Is(text ...string) bool {
+	for _, v := range text {
+		if t.Text == v {
+			return true
+		}
+	}
+	return false
+}
+
+const (
+	token_bg Type = iota
+	error
+	ILLEGAL
+	EOF
+	COMMENT
+	DOCUMENT
+
+	literal_beg
+	IDENT      // main
+	INT        // 123
+	DURATION   // 3s,3ms
+	STRING     // "abc"
+	RAW_STRING // `abc`
+	PATH       // `abc`
+	KEY        // `abc:`
+	literal_end
+
+	operator_beg
+	SUB    // -
+	MUL    // *
+	QUO    // /
+	ASSIGN // =
+
+	LPAREN // (
+	LBRACK // [
+	LBRACE // {
+	COMMA  // ,
+	DOT    // .
+
+	RPAREN    // )
+	RBRACE    // }
+	RBRACK    // ]
+	SEMICOLON // ;
+	COLON     // :
+	ELLIPSIS
+	operator_end
+
+	golang_keyword_beg
+	BREAK
+	CASE
+	CHAN
+	CONST
+	CONTINUE
+
+	DEFAULT
+	DEFER
+	ELSE
+	FALLTHROUGH
+	FOR
+
+	FUNC
+	GO
+	GOTO
+	IF
+	IMPORT
+
+	INTERFACE
+	MAP
+	PACKAGE
+	RANGE
+	RETURN
+
+	SELECT
+	STRUCT
+	SWITCH
+	TYPE
+	VAR
+	golang_keyword_end
+
+	api_keyword_bg
+	AT_DOC
+	AT_HANDLER
+	AT_SERVER
+	ANY
+
+	api_keyword_end
+	token_end
+)
+
+// String returns the string of the token type.
+func (t Type) String() string {
+	if t >= token_bg && t < token_end {
+		return tokens[t]
+	}
+	return ""
+}
+
+var tokens = [...]string{
+	ILLEGAL: "ILLEGAL",
+
+	EOF:      "EOF",
+	COMMENT:  "COMMENT",
+	DOCUMENT: "DOCUMENT",
+
+	IDENT:      "IDENT",
+	INT:        "INT",
+	DURATION:   "DURATION",
+	STRING:     "STRING",
+	RAW_STRING: "RAW_STRING",
+	PATH:       "PATH",
+	KEY:        "KEY",
+
+	SUB:    "-",
+	MUL:    "*",
+	QUO:    "/",
+	ASSIGN: "=",
+
+	LPAREN: "(",
+	LBRACK: "[",
+	LBRACE: "{",
+	COMMA:  ",",
+	DOT:    ".",
+
+	RPAREN:    ")",
+	RBRACK:    "]",
+	RBRACE:    "}",
+	SEMICOLON: ";",
+	COLON:     ":",
+	ELLIPSIS:  "...",
+
+	BREAK:    "break",
+	CASE:     "case",
+	CHAN:     "chan",
+	CONST:    "const",
+	CONTINUE: "continue",
+
+	DEFAULT:     "default",
+	DEFER:       "defer",
+	ELSE:        "else",
+	FALLTHROUGH: "fallthrough",
+	FOR:         "for",
+
+	FUNC:   "func",
+	GO:     "go",
+	GOTO:   "goto",
+	IF:     "if",
+	IMPORT: "import",
+
+	INTERFACE: "interface",
+	MAP:       "map",
+	PACKAGE:   "package",
+	RANGE:     "range",
+	RETURN:    "return",
+
+	SELECT: "select",
+	STRUCT: "struct",
+	SWITCH: "switch",
+	TYPE:   "type",
+	VAR:    "var",
+
+	AT_DOC:     "@doc",
+	AT_HANDLER: "@handler",
+	AT_SERVER:  "@server",
+	ANY:        "interface{}",
+}
+
+// HttpMethods returns the http methods.
+var HttpMethods = []interface{}{"get", "head", "post", "put", "patch", "delete", "connect", "options", "trace"}
+
+var httpMethod = map[string]placeholder.Type{
+	"get":     placeholder.PlaceHolder,
+	"head":    placeholder.PlaceHolder,
+	"post":    placeholder.PlaceHolder,
+	"put":     placeholder.PlaceHolder,
+	"patch":   placeholder.PlaceHolder,
+	"delete":  placeholder.PlaceHolder,
+	"connect": placeholder.PlaceHolder,
+	"options": placeholder.PlaceHolder,
+	"trace":   placeholder.PlaceHolder,
+}
+
+var keywords = map[string]Type{
+	// golang_keyword_bg
+	"break":    BREAK,
+	"case":     CASE,
+	"chan":     CHAN,
+	"const":    CONST,
+	"continue": CONTINUE,
+
+	"default":     DEFAULT,
+	"defer":       DEFER,
+	"else":        ELSE,
+	"fallthrough": FALLTHROUGH,
+	"for":         FOR,
+
+	"func":   FUNC,
+	"go":     GO,
+	"goto":   GOTO,
+	"if":     IF,
+	"import": IMPORT,
+
+	"interface": INTERFACE,
+	"map":       MAP,
+	"package":   PACKAGE,
+	"range":     RANGE,
+	"return":    RETURN,
+
+	"select": SELECT,
+	"struct": STRUCT,
+	"switch": SWITCH,
+	"type":   TYPE,
+	"var":    VAR,
+	// golang_keyword_end
+}
+
+var baseDataType = map[string]placeholder.Type{
+	"bool":       placeholder.PlaceHolder,
+	"uint8":      placeholder.PlaceHolder,
+	"uint16":     placeholder.PlaceHolder,
+	"uint32":     placeholder.PlaceHolder,
+	"uint64":     placeholder.PlaceHolder,
+	"int8":       placeholder.PlaceHolder,
+	"int16":      placeholder.PlaceHolder,
+	"int32":      placeholder.PlaceHolder,
+	"int64":      placeholder.PlaceHolder,
+	"float32":    placeholder.PlaceHolder,
+	"float64":    placeholder.PlaceHolder,
+	"complex64":  placeholder.PlaceHolder,
+	"complex128": placeholder.PlaceHolder,
+	"string":     placeholder.PlaceHolder,
+	"int":        placeholder.PlaceHolder,
+	"uint":       placeholder.PlaceHolder,
+	"uintptr":    placeholder.PlaceHolder,
+	"byte":       placeholder.PlaceHolder,
+	"rune":       placeholder.PlaceHolder,
+	"any":        placeholder.PlaceHolder,
+}
+
+// LookupKeyword returns the keyword type if the given ident is keyword.
+func LookupKeyword(ident string) (Type, bool) {
+	tp, ok := keywords[ident]
+	return tp, ok
+}
+
+// NewIllegalToken returns a new illegal token.
+func NewIllegalToken(b rune, pos Position) Token {
+	return Token{
+		Type:     ILLEGAL,
+		Text:     string(b),
+		Position: pos,
+	}
+}

+ 10 - 0
tools/goctl/util/string.go

@@ -111,3 +111,13 @@ func isGolangKeyword(s string) bool {
 	_, ok := goKeyword[s]
 	return ok
 }
+
+func TrimWhiteSpace(s string) string {
+	r := strings.NewReplacer(" ", "", "\t", "", "\n", "", "\f", "", "\r", "")
+	return r.Replace(s)
+}
+
+func IsEmptyStringOrWhiteSpace(s string) bool {
+	v := TrimWhiteSpace(s)
+	return len(v) == 0
+}