Source file src/pkg/go/parser/parser.go
1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 // Package parser implements a parser for Go source files. Input may be
6 // provided in a variety of forms (see the various Parse* functions); the
7 // output is an abstract syntax tree (AST) representing the Go source. The
8 // parser is invoked through one of the Parse* functions.
9 //
10 package parser
11
12 import (
13 "fmt"
14 "go/ast"
15 "go/scanner"
16 "go/token"
17 "strconv"
18 "strings"
19 "unicode"
20 )
21
22 // The parser structure holds the parser's internal state.
23 type parser struct {
24 file *token.File
25 errors scanner.ErrorList
26 scanner scanner.Scanner
27
28 // Tracing/debugging
29 mode Mode // parsing mode
30 trace bool // == (mode & Trace != 0)
31 indent uint // indentation used for tracing output
32
33 // Comments
34 comments []*ast.CommentGroup
35 leadComment *ast.CommentGroup // last lead comment
36 lineComment *ast.CommentGroup // last line comment
37
38 // Next token
39 pos token.Pos // token position
40 tok token.Token // one token look-ahead
41 lit string // token literal
42
43 // Error recovery
44 // (used to limit the number of calls to syncXXX functions
45 // w/o making scanning progress - avoids potential endless
46 // loops across multiple parser functions during error recovery)
47 syncPos token.Pos // last synchronization position
48 syncCnt int // number of calls to syncXXX without progress
49
50 // Non-syntactic parser control
51 exprLev int // < 0: in control clause, >= 0: in expression
52
53 // Ordinary identifier scopes
54 pkgScope *ast.Scope // pkgScope.Outer == nil
55 topScope *ast.Scope // top-most scope; may be pkgScope
56 unresolved []*ast.Ident // unresolved identifiers
57 imports []*ast.ImportSpec // list of imports
58
59 // Label scope
60 // (maintained by open/close LabelScope)
61 labelScope *ast.Scope // label scope for current function
62 targetStack [][]*ast.Ident // stack of unresolved labels
63 }
64
65 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
66 p.file = fset.AddFile(filename, fset.Base(), len(src))
67 var m scanner.Mode
68 if mode&ParseComments != 0 {
69 m = scanner.ScanComments
70 }
71 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
72 p.scanner.Init(p.file, src, eh, m)
73
74 p.mode = mode
75 p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
76
77 p.next()
78
79 // set up the pkgScope here (as opposed to in parseFile) because
80 // there are other parser entry points (ParseExpr, etc.)
81 p.openScope()
82 p.pkgScope = p.topScope
83
84 // for the same reason, set up a label scope
85 p.openLabelScope()
86 }
87
88 // ----------------------------------------------------------------------------
89 // Scoping support
90
91 func (p *parser) openScope() {
92 p.topScope = ast.NewScope(p.topScope)
93 }
94
95 func (p *parser) closeScope() {
96 p.topScope = p.topScope.Outer
97 }
98
99 func (p *parser) openLabelScope() {
100 p.labelScope = ast.NewScope(p.labelScope)
101 p.targetStack = append(p.targetStack, nil)
102 }
103
104 func (p *parser) closeLabelScope() {
105 // resolve labels
106 n := len(p.targetStack) - 1
107 scope := p.labelScope
108 for _, ident := range p.targetStack[n] {
109 ident.Obj = scope.Lookup(ident.Name)
110 if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
111 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
112 }
113 }
114 // pop label scope
115 p.targetStack = p.targetStack[0:n]
116 p.labelScope = p.labelScope.Outer
117 }
118
119 func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
120 for _, ident := range idents {
121 assert(ident.Obj == nil, "identifier already declared or resolved")
122 obj := ast.NewObj(kind, ident.Name)
123 // remember the corresponding declaration for redeclaration
124 // errors and global variable resolution/typechecking phase
125 obj.Decl = decl
126 obj.Data = data
127 ident.Obj = obj
128 if ident.Name != "_" {
129 if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
130 prevDecl := ""
131 if pos := alt.Pos(); pos.IsValid() {
132 prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
133 }
134 p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
135 }
136 }
137 }
138 }
139
140 func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
141 // Go spec: A short variable declaration may redeclare variables
142 // provided they were originally declared in the same block with
143 // the same type, and at least one of the non-blank variables is new.
144 n := 0 // number of new variables
145 for _, x := range list {
146 if ident, isIdent := x.(*ast.Ident); isIdent {
147 assert(ident.Obj == nil, "identifier already declared or resolved")
148 obj := ast.NewObj(ast.Var, ident.Name)
149 // remember corresponding assignment for other tools
150 obj.Decl = decl
151 ident.Obj = obj
152 if ident.Name != "_" {
153 if alt := p.topScope.Insert(obj); alt != nil {
154 ident.Obj = alt // redeclaration
155 } else {
156 n++ // new declaration
157 }
158 }
159 } else {
160 p.errorExpected(x.Pos(), "identifier")
161 }
162 }
163 if n == 0 && p.mode&DeclarationErrors != 0 {
164 p.error(list[0].Pos(), "no new variables on left side of :=")
165 }
166 }
167
168 // The unresolved object is a sentinel to mark identifiers that have been added
169 // to the list of unresolved identifiers. The sentinel is only used for verifying
170 // internal consistency.
171 var unresolved = new(ast.Object)
172
173 func (p *parser) resolve(x ast.Expr) {
174 // nothing to do if x is not an identifier or the blank identifier
175 ident, _ := x.(*ast.Ident)
176 if ident == nil {
177 return
178 }
179 assert(ident.Obj == nil, "identifier already declared or resolved")
180 if ident.Name == "_" {
181 return
182 }
183 // try to resolve the identifier
184 for s := p.topScope; s != nil; s = s.Outer {
185 if obj := s.Lookup(ident.Name); obj != nil {
186 ident.Obj = obj
187 return
188 }
189 }
190 // all local scopes are known, so any unresolved identifier
191 // must be found either in the file scope, package scope
192 // (perhaps in another file), or universe scope --- collect
193 // them so that they can be resolved later
194 ident.Obj = unresolved
195 p.unresolved = append(p.unresolved, ident)
196 }
197
198 // ----------------------------------------------------------------------------
199 // Parsing support
200
201 func (p *parser) printTrace(a ...interface{}) {
202 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " +
203 ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
204 const n = uint(len(dots))
205 pos := p.file.Position(p.pos)
206 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
207 i := 2 * p.indent
208 for ; i > n; i -= n {
209 fmt.Print(dots)
210 }
211 fmt.Print(dots[0:i])
212 fmt.Println(a...)
213 }
214
215 func trace(p *parser, msg string) *parser {
216 p.printTrace(msg, "(")
217 p.indent++
218 return p
219 }
220
221 // Usage pattern: defer un(trace(p, "..."));
222 func un(p *parser) {
223 p.indent--
224 p.printTrace(")")
225 }
226
227 // Advance to the next token.
228 func (p *parser) next0() {
229 // Because of one-token look-ahead, print the previous token
230 // when tracing as it provides a more readable output. The
231 // very first token (!p.pos.IsValid()) is not initialized
232 // (it is token.ILLEGAL), so don't print it .
233 if p.trace && p.pos.IsValid() {
234 s := p.tok.String()
235 switch {
236 case p.tok.IsLiteral():
237 p.printTrace(s, p.lit)
238 case p.tok.IsOperator(), p.tok.IsKeyword():
239 p.printTrace("\"" + s + "\"")
240 default:
241 p.printTrace(s)
242 }
243 }
244
245 p.pos, p.tok, p.lit = p.scanner.Scan()
246 }
247
248 // Consume a comment and return it and the line on which it ends.
249 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
250 // /*-style comments may end on a different line than where they start.
251 // Scan the comment for '\n' chars and adjust endline accordingly.
252 endline = p.file.Line(p.pos)
253 if p.lit[1] == '*' {
254 // don't use range here - no need to decode Unicode code points
255 for i := 0; i < len(p.lit); i++ {
256 if p.lit[i] == '\n' {
257 endline++
258 }
259 }
260 }
261
262 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
263 p.next0()
264
265 return
266 }
267
268 // Consume a group of adjacent comments, add it to the parser's
269 // comments list, and return it together with the line at which
270 // the last comment in the group ends. An empty line or non-comment
271 // token terminates a comment group.
272 //
273 func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int) {
274 var list []*ast.Comment
275 endline = p.file.Line(p.pos)
276 for p.tok == token.COMMENT && endline+1 >= p.file.Line(p.pos) {
277 var comment *ast.Comment
278 comment, endline = p.consumeComment()
279 list = append(list, comment)
280 }
281
282 // add comment group to the comments list
283 comments = &ast.CommentGroup{List: list}
284 p.comments = append(p.comments, comments)
285
286 return
287 }
288
289 // Advance to the next non-comment token. In the process, collect
290 // any comment groups encountered, and remember the last lead and
291 // and line comments.
292 //
293 // A lead comment is a comment group that starts and ends in a
294 // line without any other tokens and that is followed by a non-comment
295 // token on the line immediately after the comment group.
296 //
297 // A line comment is a comment group that follows a non-comment
298 // token on the same line, and that has no tokens after it on the line
299 // where it ends.
300 //
301 // Lead and line comments may be considered documentation that is
302 // stored in the AST.
303 //
304 func (p *parser) next() {
305 p.leadComment = nil
306 p.lineComment = nil
307 line := p.file.Line(p.pos) // current line
308 p.next0()
309
310 if p.tok == token.COMMENT {
311 var comment *ast.CommentGroup
312 var endline int
313
314 if p.file.Line(p.pos) == line {
315 // The comment is on same line as the previous token; it
316 // cannot be a lead comment but may be a line comment.
317 comment, endline = p.consumeCommentGroup()
318 if p.file.Line(p.pos) != endline {
319 // The next token is on a different line, thus
320 // the last comment group is a line comment.
321 p.lineComment = comment
322 }
323 }
324
325 // consume successor comments, if any
326 endline = -1
327 for p.tok == token.COMMENT {
328 comment, endline = p.consumeCommentGroup()
329 }
330
331 if endline+1 == p.file.Line(p.pos) {
332 // The next token is following on the line immediately after the
333 // comment group, thus the last comment group is a lead comment.
334 p.leadComment = comment
335 }
336 }
337 }
338
339 func (p *parser) error(pos token.Pos, msg string) {
340 p.errors.Add(p.file.Position(pos), msg)
341 }
342
343 func (p *parser) errorExpected(pos token.Pos, msg string) {
344 msg = "expected " + msg
345 if pos == p.pos {
346 // the error happened at the current position;
347 // make the error message more specific
348 if p.tok == token.SEMICOLON && p.lit == "\n" {
349 msg += ", found newline"
350 } else {
351 msg += ", found '" + p.tok.String() + "'"
352 if p.tok.IsLiteral() {
353 msg += " " + p.lit
354 }
355 }
356 }
357 p.error(pos, msg)
358 }
359
360 func (p *parser) expect(tok token.Token) token.Pos {
361 pos := p.pos
362 if p.tok != tok {
363 p.errorExpected(pos, "'"+tok.String()+"'")
364 }
365 p.next() // make progress
366 return pos
367 }
368
369 // expectClosing is like expect but provides a better error message
370 // for the common case of a missing comma before a newline.
371 //
372 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
373 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
374 p.error(p.pos, "missing ',' before newline in "+context)
375 p.next()
376 }
377 return p.expect(tok)
378 }
379
380 func (p *parser) expectSemi() {
381 // semicolon is optional before a closing ')' or '}'
382 if p.tok != token.RPAREN && p.tok != token.RBRACE {
383 if p.tok == token.SEMICOLON {
384 p.next()
385 } else {
386 p.errorExpected(p.pos, "';'")
387 syncStmt(p)
388 }
389 }
390 }
391
392 func (p *parser) atComma(context string) bool {
393 if p.tok == token.COMMA {
394 return true
395 }
396 if p.tok == token.SEMICOLON && p.lit == "\n" {
397 p.error(p.pos, "missing ',' before newline in "+context)
398 return true // "insert" the comma and continue
399
400 }
401 return false
402 }
403
404 func assert(cond bool, msg string) {
405 if !cond {
406 panic("go/parser internal error: " + msg)
407 }
408 }
409
410 // syncStmt advances to the next statement.
411 // Used for synchronization after an error.
412 //
413 func syncStmt(p *parser) {
414 for {
415 switch p.tok {
416 case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
417 token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
418 token.IF, token.RETURN, token.SELECT, token.SWITCH,
419 token.TYPE, token.VAR:
420 // Return only if parser made some progress since last
421 // sync or if it has not reached 10 sync calls without
422 // progress. Otherwise consume at least one token to
423 // avoid an endless parser loop (it is possible that
424 // both parseOperand and parseStmt call syncStmt and
425 // correctly do not advance, thus the need for the
426 // invocation limit p.syncCnt).
427 if p.pos == p.syncPos && p.syncCnt < 10 {
428 p.syncCnt++
429 return
430 }
431 if p.pos > p.syncPos {
432 p.syncPos = p.pos
433 p.syncCnt = 0
434 return
435 }
436 // Reaching here indicates a parser bug, likely an
437 // incorrect token list in this function, but it only
438 // leads to skipping of possibly correct code if a
439 // previous error is present, and thus is preferred
440 // over a non-terminating parse.
441 case token.EOF:
442 return
443 }
444 p.next()
445 }
446 }
447
448 // syncDecl advances to the next declaration.
449 // Used for synchronization after an error.
450 //
451 func syncDecl(p *parser) {
452 for {
453 switch p.tok {
454 case token.CONST, token.TYPE, token.VAR:
455 // see comments in syncStmt
456 if p.pos == p.syncPos && p.syncCnt < 10 {
457 p.syncCnt++
458 return
459 }
460 if p.pos > p.syncPos {
461 p.syncPos = p.pos
462 p.syncCnt = 0
463 return
464 }
465 case token.EOF:
466 return
467 }
468 p.next()
469 }
470 }
471
472 // ----------------------------------------------------------------------------
473 // Identifiers
474
475 func (p *parser) parseIdent() *ast.Ident {
476 pos := p.pos
477 name := "_"
478 if p.tok == token.IDENT {
479 name = p.lit
480 p.next()
481 } else {
482 p.expect(token.IDENT) // use expect() error handling
483 }
484 return &ast.Ident{NamePos: pos, Name: name}
485 }
486
487 func (p *parser) parseIdentList() (list []*ast.Ident) {
488 if p.trace {
489 defer un(trace(p, "IdentList"))
490 }
491
492 list = append(list, p.parseIdent())
493 for p.tok == token.COMMA {
494 p.next()
495 list = append(list, p.parseIdent())
496 }
497
498 return
499 }
500
501 // ----------------------------------------------------------------------------
502 // Common productions
503
504 // If lhs is set, result list elements which are identifiers are not resolved.
505 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
506 if p.trace {
507 defer un(trace(p, "ExpressionList"))
508 }
509
510 list = append(list, p.checkExpr(p.parseExpr(lhs)))
511 for p.tok == token.COMMA {
512 p.next()
513 list = append(list, p.checkExpr(p.parseExpr(lhs)))
514 }
515
516 return
517 }
518
519 func (p *parser) parseLhsList() []ast.Expr {
520 list := p.parseExprList(true)
521 switch p.tok {
522 case token.DEFINE:
523 // lhs of a short variable declaration
524 // but doesn't enter scope until later:
525 // caller must call p.shortVarDecl(p.makeIdentList(list))
526 // at appropriate time.
527 case token.COLON:
528 // lhs of a label declaration or a communication clause of a select
529 // statement (parseLhsList is not called when parsing the case clause
530 // of a switch statement):
531 // - labels are declared by the caller of parseLhsList
532 // - for communication clauses, if there is a stand-alone identifier
533 // followed by a colon, we have a syntax error; there is no need
534 // to resolve the identifier in that case
535 default:
536 // identifiers must be declared elsewhere
537 for _, x := range list {
538 p.resolve(x)
539 }
540 }
541 return list
542 }
543
544 func (p *parser) parseRhsList() []ast.Expr {
545 return p.parseExprList(false)
546 }
547
548 // ----------------------------------------------------------------------------
549 // Types
550
551 func (p *parser) parseType() ast.Expr {
552 if p.trace {
553 defer un(trace(p, "Type"))
554 }
555
556 typ := p.tryType()
557
558 if typ == nil {
559 pos := p.pos
560 p.errorExpected(pos, "type")
561 p.next() // make progress
562 return &ast.BadExpr{From: pos, To: p.pos}
563 }
564
565 return typ
566 }
567
568 // If the result is an identifier, it is not resolved.
569 func (p *parser) parseTypeName() ast.Expr {
570 if p.trace {
571 defer un(trace(p, "TypeName"))
572 }
573
574 ident := p.parseIdent()
575 // don't resolve ident yet - it may be a parameter or field name
576
577 if p.tok == token.PERIOD {
578 // ident is a package name
579 p.next()
580 p.resolve(ident)
581 sel := p.parseIdent()
582 return &ast.SelectorExpr{X: ident, Sel: sel}
583 }
584
585 return ident
586 }
587
588 func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
589 if p.trace {
590 defer un(trace(p, "ArrayType"))
591 }
592
593 lbrack := p.expect(token.LBRACK)
594 var len ast.Expr
595 if ellipsisOk && p.tok == token.ELLIPSIS {
596 len = &ast.Ellipsis{Ellipsis: p.pos}
597 p.next()
598 } else if p.tok != token.RBRACK {
599 len = p.parseRhs()
600 }
601 p.expect(token.RBRACK)
602 elt := p.parseType()
603
604 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
605 }
606
607 func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
608 idents := make([]*ast.Ident, len(list))
609 for i, x := range list {
610 ident, isIdent := x.(*ast.Ident)
611 if !isIdent {
612 if _, isBad := x.(*ast.BadExpr); !isBad {
613 // only report error if it's a new one
614 p.errorExpected(x.Pos(), "identifier")
615 }
616 ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
617 }
618 idents[i] = ident
619 }
620 return idents
621 }
622
623 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
624 if p.trace {
625 defer un(trace(p, "FieldDecl"))
626 }
627
628 doc := p.leadComment
629
630 // fields
631 list, typ := p.parseVarList(false)
632
633 // optional tag
634 var tag *ast.BasicLit
635 if p.tok == token.STRING {
636 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
637 p.next()
638 }
639
640 // analyze case
641 var idents []*ast.Ident
642 if typ != nil {
643 // IdentifierList Type
644 idents = p.makeIdentList(list)
645 } else {
646 // ["*"] TypeName (AnonymousField)
647 typ = list[0] // we always have at least one element
648 p.resolve(typ)
649 if n := len(list); n > 1 || !isTypeName(deref(typ)) {
650 pos := typ.Pos()
651 p.errorExpected(pos, "anonymous field")
652 typ = &ast.BadExpr{From: pos, To: list[n-1].End()}
653 }
654 }
655
656 p.expectSemi() // call before accessing p.linecomment
657
658 field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
659 p.declare(field, nil, scope, ast.Var, idents...)
660
661 return field
662 }
663
664 func (p *parser) parseStructType() *ast.StructType {
665 if p.trace {
666 defer un(trace(p, "StructType"))
667 }
668
669 pos := p.expect(token.STRUCT)
670 lbrace := p.expect(token.LBRACE)
671 scope := ast.NewScope(nil) // struct scope
672 var list []*ast.Field
673 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
674 // a field declaration cannot start with a '(' but we accept
675 // it here for more robust parsing and better error messages
676 // (parseFieldDecl will check and complain if necessary)
677 list = append(list, p.parseFieldDecl(scope))
678 }
679 rbrace := p.expect(token.RBRACE)
680
681 return &ast.StructType{
682 Struct: pos,
683 Fields: &ast.FieldList{
684 Opening: lbrace,
685 List: list,
686 Closing: rbrace,
687 },
688 }
689 }
690
691 func (p *parser) parsePointerType() *ast.StarExpr {
692 if p.trace {
693 defer un(trace(p, "PointerType"))
694 }
695
696 star := p.expect(token.MUL)
697 base := p.parseType()
698
699 return &ast.StarExpr{Star: star, X: base}
700 }
701
702 func (p *parser) tryVarType(isParam bool) ast.Expr {
703 if isParam && p.tok == token.ELLIPSIS {
704 pos := p.pos
705 p.next()
706 typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message
707 if typ == nil {
708 p.error(pos, "'...' parameter is missing type")
709 typ = &ast.BadExpr{From: pos, To: p.pos}
710 }
711 return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
712 }
713 return p.tryIdentOrType(false)
714 }
715
716 func (p *parser) parseVarType(isParam bool) ast.Expr {
717 typ := p.tryVarType(isParam)
718 if typ == nil {
719 pos := p.pos
720 p.errorExpected(pos, "type")
721 p.next() // make progress
722 typ = &ast.BadExpr{From: pos, To: p.pos}
723 }
724 return typ
725 }
726
727 func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
728 if p.trace {
729 defer un(trace(p, "VarList"))
730 }
731
732 // a list of identifiers looks like a list of type names
733 //
734 // parse/tryVarType accepts any type (including parenthesized
735 // ones) even though the syntax does not permit them here: we
736 // accept them all for more robust parsing and complain later
737 for typ := p.parseVarType(isParam); typ != nil; {
738 list = append(list, typ)
739 if p.tok != token.COMMA {
740 break
741 }
742 p.next()
743 typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
744 }
745
746 // if we had a list of identifiers, it must be followed by a type
747 if typ = p.tryVarType(isParam); typ != nil {
748 p.resolve(typ)
749 }
750
751 return
752 }
753
754 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
755 if p.trace {
756 defer un(trace(p, "ParameterList"))
757 }
758
759 list, typ := p.parseVarList(ellipsisOk)
760 if typ != nil {
761 // IdentifierList Type
762 idents := p.makeIdentList(list)
763 field := &ast.Field{Names: idents, Type: typ}
764 params = append(params, field)
765 // Go spec: The scope of an identifier denoting a function
766 // parameter or result variable is the function body.
767 p.declare(field, nil, scope, ast.Var, idents...)
768 if p.tok == token.COMMA {
769 p.next()
770 }
771
772 for p.tok != token.RPAREN && p.tok != token.EOF {
773 idents := p.parseIdentList()
774 typ := p.parseVarType(ellipsisOk)
775 field := &ast.Field{Names: idents, Type: typ}
776 params = append(params, field)
777 // Go spec: The scope of an identifier denoting a function
778 // parameter or result variable is the function body.
779 p.declare(field, nil, scope, ast.Var, idents...)
780 if !p.atComma("parameter list") {
781 break
782 }
783 p.next()
784 }
785
786 } else {
787 // Type { "," Type } (anonymous parameters)
788 params = make([]*ast.Field, len(list))
789 for i, x := range list {
790 p.resolve(x)
791 params[i] = &ast.Field{Type: x}
792 }
793 }
794
795 return
796 }
797
798 func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
799 if p.trace {
800 defer un(trace(p, "Parameters"))
801 }
802
803 var params []*ast.Field
804 lparen := p.expect(token.LPAREN)
805 if p.tok != token.RPAREN {
806 params = p.parseParameterList(scope, ellipsisOk)
807 }
808 rparen := p.expect(token.RPAREN)
809
810 return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
811 }
812
813 func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
814 if p.trace {
815 defer un(trace(p, "Result"))
816 }
817
818 if p.tok == token.LPAREN {
819 return p.parseParameters(scope, false)
820 }
821
822 typ := p.tryType()
823 if typ != nil {
824 list := make([]*ast.Field, 1)
825 list[0] = &ast.Field{Type: typ}
826 return &ast.FieldList{List: list}
827 }
828
829 return nil
830 }
831
832 func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
833 if p.trace {
834 defer un(trace(p, "Signature"))
835 }
836
837 params = p.parseParameters(scope, true)
838 results = p.parseResult(scope)
839
840 return
841 }
842
843 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
844 if p.trace {
845 defer un(trace(p, "FuncType"))
846 }
847
848 pos := p.expect(token.FUNC)
849 scope := ast.NewScope(p.topScope) // function scope
850 params, results := p.parseSignature(scope)
851
852 return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
853 }
854
855 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
856 if p.trace {
857 defer un(trace(p, "MethodSpec"))
858 }
859
860 doc := p.leadComment
861 var idents []*ast.Ident
862 var typ ast.Expr
863 x := p.parseTypeName()
864 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
865 // method
866 idents = []*ast.Ident{ident}
867 scope := ast.NewScope(nil) // method scope
868 params, results := p.parseSignature(scope)
869 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
870 } else {
871 // embedded interface
872 typ = x
873 p.resolve(typ)
874 }
875 p.expectSemi() // call before accessing p.linecomment
876
877 spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
878 p.declare(spec, nil, scope, ast.Fun, idents...)
879
880 return spec
881 }
882
883 func (p *parser) parseInterfaceType() *ast.InterfaceType {
884 if p.trace {
885 defer un(trace(p, "InterfaceType"))
886 }
887
888 pos := p.expect(token.INTERFACE)
889 lbrace := p.expect(token.LBRACE)
890 scope := ast.NewScope(nil) // interface scope
891 var list []*ast.Field
892 for p.tok == token.IDENT {
893 list = append(list, p.parseMethodSpec(scope))
894 }
895 rbrace := p.expect(token.RBRACE)
896
897 return &ast.InterfaceType{
898 Interface: pos,
899 Methods: &ast.FieldList{
900 Opening: lbrace,
901 List: list,
902 Closing: rbrace,
903 },
904 }
905 }
906
907 func (p *parser) parseMapType() *ast.MapType {
908 if p.trace {
909 defer un(trace(p, "MapType"))
910 }
911
912 pos := p.expect(token.MAP)
913 p.expect(token.LBRACK)
914 key := p.parseType()
915 p.expect(token.RBRACK)
916 value := p.parseType()
917
918 return &ast.MapType{Map: pos, Key: key, Value: value}
919 }
920
921 func (p *parser) parseChanType() *ast.ChanType {
922 if p.trace {
923 defer un(trace(p, "ChanType"))
924 }
925
926 pos := p.pos
927 dir := ast.SEND | ast.RECV
928 if p.tok == token.CHAN {
929 p.next()
930 if p.tok == token.ARROW {
931 p.next()
932 dir = ast.SEND
933 }
934 } else {
935 p.expect(token.ARROW)
936 p.expect(token.CHAN)
937 dir = ast.RECV
938 }
939 value := p.parseType()
940
941 return &ast.ChanType{Begin: pos, Dir: dir, Value: value}
942 }
943
944 // If the result is an identifier, it is not resolved.
945 func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr {
946 switch p.tok {
947 case token.IDENT:
948 return p.parseTypeName()
949 case token.LBRACK:
950 return p.parseArrayType(ellipsisOk)
951 case token.STRUCT:
952 return p.parseStructType()
953 case token.MUL:
954 return p.parsePointerType()
955 case token.FUNC:
956 typ, _ := p.parseFuncType()
957 return typ
958 case token.INTERFACE:
959 return p.parseInterfaceType()
960 case token.MAP:
961 return p.parseMapType()
962 case token.CHAN, token.ARROW:
963 return p.parseChanType()
964 case token.LPAREN:
965 lparen := p.pos
966 p.next()
967 typ := p.parseType()
968 rparen := p.expect(token.RPAREN)
969 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
970 }
971
972 // no type found
973 return nil
974 }
975
976 func (p *parser) tryType() ast.Expr {
977 typ := p.tryIdentOrType(false)
978 if typ != nil {
979 p.resolve(typ)
980 }
981 return typ
982 }
983
984 // ----------------------------------------------------------------------------
985 // Blocks
986
987 func (p *parser) parseStmtList() (list []ast.Stmt) {
988 if p.trace {
989 defer un(trace(p, "StatementList"))
990 }
991
992 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
993 list = append(list, p.parseStmt())
994 }
995
996 return
997 }
998
999 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
1000 if p.trace {
1001 defer un(trace(p, "Body"))
1002 }
1003
1004 lbrace := p.expect(token.LBRACE)
1005 p.topScope = scope // open function scope
1006 p.openLabelScope()
1007 list := p.parseStmtList()
1008 p.closeLabelScope()
1009 p.closeScope()
1010 rbrace := p.expect(token.RBRACE)
1011
1012 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1013 }
1014
1015 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1016 if p.trace {
1017 defer un(trace(p, "BlockStmt"))
1018 }
1019
1020 lbrace := p.expect(token.LBRACE)
1021 p.openScope()
1022 list := p.parseStmtList()
1023 p.closeScope()
1024 rbrace := p.expect(token.RBRACE)
1025
1026 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1027 }
1028
1029 // ----------------------------------------------------------------------------
1030 // Expressions
1031
1032 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1033 if p.trace {
1034 defer un(trace(p, "FuncTypeOrLit"))
1035 }
1036
1037 typ, scope := p.parseFuncType()
1038 if p.tok != token.LBRACE {
1039 // function type only
1040 return typ
1041 }
1042
1043 p.exprLev++
1044 body := p.parseBody(scope)
1045 p.exprLev--
1046
1047 return &ast.FuncLit{Type: typ, Body: body}
1048 }
1049
1050 // parseOperand may return an expression or a raw type (incl. array
1051 // types of the form [...]T. Callers must verify the result.
1052 // If lhs is set and the result is an identifier, it is not resolved.
1053 //
1054 func (p *parser) parseOperand(lhs bool) ast.Expr {
1055 if p.trace {
1056 defer un(trace(p, "Operand"))
1057 }
1058
1059 switch p.tok {
1060 case token.IDENT:
1061 x := p.parseIdent()
1062 if !lhs {
1063 p.resolve(x)
1064 }
1065 return x
1066
1067 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1068 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1069 p.next()
1070 return x
1071
1072 case token.LPAREN:
1073 lparen := p.pos
1074 p.next()
1075 p.exprLev++
1076 x := p.parseRhsOrType() // types may be parenthesized: (some type)
1077 p.exprLev--
1078 rparen := p.expect(token.RPAREN)
1079 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1080
1081 case token.FUNC:
1082 return p.parseFuncTypeOrLit()
1083 }
1084
1085 if typ := p.tryIdentOrType(true); typ != nil {
1086 // could be type for composite literal or conversion
1087 _, isIdent := typ.(*ast.Ident)
1088 assert(!isIdent, "type cannot be identifier")
1089 return typ
1090 }
1091
1092 // we have an error
1093 pos := p.pos
1094 p.errorExpected(pos, "operand")
1095 syncStmt(p)
1096 return &ast.BadExpr{From: pos, To: p.pos}
1097 }
1098
1099 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1100 if p.trace {
1101 defer un(trace(p, "Selector"))
1102 }
1103
1104 sel := p.parseIdent()
1105
1106 return &ast.SelectorExpr{X: x, Sel: sel}
1107 }
1108
1109 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1110 if p.trace {
1111 defer un(trace(p, "TypeAssertion"))
1112 }
1113
1114 p.expect(token.LPAREN)
1115 var typ ast.Expr
1116 if p.tok == token.TYPE {
1117 // type switch: typ == nil
1118 p.next()
1119 } else {
1120 typ = p.parseType()
1121 }
1122 p.expect(token.RPAREN)
1123
1124 return &ast.TypeAssertExpr{X: x, Type: typ}
1125 }
1126
1127 func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
1128 if p.trace {
1129 defer un(trace(p, "IndexOrSlice"))
1130 }
1131
1132 lbrack := p.expect(token.LBRACK)
1133 p.exprLev++
1134 var low, high ast.Expr
1135 isSlice := false
1136 if p.tok != token.COLON {
1137 low = p.parseRhs()
1138 }
1139 if p.tok == token.COLON {
1140 isSlice = true
1141 p.next()
1142 if p.tok != token.RBRACK {
1143 high = p.parseRhs()
1144 }
1145 }
1146 p.exprLev--
1147 rbrack := p.expect(token.RBRACK)
1148
1149 if isSlice {
1150 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: low, High: high, Rbrack: rbrack}
1151 }
1152 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: low, Rbrack: rbrack}
1153 }
1154
1155 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1156 if p.trace {
1157 defer un(trace(p, "CallOrConversion"))
1158 }
1159
1160 lparen := p.expect(token.LPAREN)
1161 p.exprLev++
1162 var list []ast.Expr
1163 var ellipsis token.Pos
1164 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1165 list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
1166 if p.tok == token.ELLIPSIS {
1167 ellipsis = p.pos
1168 p.next()
1169 }
1170 if !p.atComma("argument list") {
1171 break
1172 }
1173 p.next()
1174 }
1175 p.exprLev--
1176 rparen := p.expectClosing(token.RPAREN, "argument list")
1177
1178 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1179 }
1180
1181 func (p *parser) parseElement(keyOk bool) ast.Expr {
1182 if p.trace {
1183 defer un(trace(p, "Element"))
1184 }
1185
1186 if p.tok == token.LBRACE {
1187 return p.parseLiteralValue(nil)
1188 }
1189
1190 x := p.checkExpr(p.parseExpr(keyOk)) // don't resolve if map key
1191 if keyOk {
1192 if p.tok == token.COLON {
1193 colon := p.pos
1194 p.next()
1195 return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}
1196 }
1197 p.resolve(x) // not a map key
1198 }
1199
1200 return x
1201 }
1202
1203 func (p *parser) parseElementList() (list []ast.Expr) {
1204 if p.trace {
1205 defer un(trace(p, "ElementList"))
1206 }
1207
1208 for p.tok != token.RBRACE && p.tok != token.EOF {
1209 list = append(list, p.parseElement(true))
1210 if !p.atComma("composite literal") {
1211 break
1212 }
1213 p.next()
1214 }
1215
1216 return
1217 }
1218
1219 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1220 if p.trace {
1221 defer un(trace(p, "LiteralValue"))
1222 }
1223
1224 lbrace := p.expect(token.LBRACE)
1225 var elts []ast.Expr
1226 p.exprLev++
1227 if p.tok != token.RBRACE {
1228 elts = p.parseElementList()
1229 }
1230 p.exprLev--
1231 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1232 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1233 }
1234
1235 // checkExpr checks that x is an expression (and not a type).
1236 func (p *parser) checkExpr(x ast.Expr) ast.Expr {
1237 switch unparen(x).(type) {
1238 case *ast.BadExpr:
1239 case *ast.Ident:
1240 case *ast.BasicLit:
1241 case *ast.FuncLit:
1242 case *ast.CompositeLit:
1243 case *ast.ParenExpr:
1244 panic("unreachable")
1245 case *ast.SelectorExpr:
1246 case *ast.IndexExpr:
1247 case *ast.SliceExpr:
1248 case *ast.TypeAssertExpr:
1249 // If t.Type == nil we have a type assertion of the form
1250 // y.(type), which is only allowed in type switch expressions.
1251 // It's hard to exclude those but for the case where we are in
1252 // a type switch. Instead be lenient and test this in the type
1253 // checker.
1254 case *ast.CallExpr:
1255 case *ast.StarExpr:
1256 case *ast.UnaryExpr:
1257 case *ast.BinaryExpr:
1258 default:
1259 // all other nodes are not proper expressions
1260 p.errorExpected(x.Pos(), "expression")
1261 x = &ast.BadExpr{From: x.Pos(), To: x.End()}
1262 }
1263 return x
1264 }
1265
1266 // isTypeName returns true iff x is a (qualified) TypeName.
1267 func isTypeName(x ast.Expr) bool {
1268 switch t := x.(type) {
1269 case *ast.BadExpr:
1270 case *ast.Ident:
1271 case *ast.SelectorExpr:
1272 _, isIdent := t.X.(*ast.Ident)
1273 return isIdent
1274 default:
1275 return false // all other nodes are not type names
1276 }
1277 return true
1278 }
1279
1280 // isLiteralType returns true iff x is a legal composite literal type.
1281 func isLiteralType(x ast.Expr) bool {
1282 switch t := x.(type) {
1283 case *ast.BadExpr:
1284 case *ast.Ident:
1285 case *ast.SelectorExpr:
1286 _, isIdent := t.X.(*ast.Ident)
1287 return isIdent
1288 case *ast.ArrayType:
1289 case *ast.StructType:
1290 case *ast.MapType:
1291 default:
1292 return false // all other nodes are not legal composite literal types
1293 }
1294 return true
1295 }
1296
1297 // If x is of the form *T, deref returns T, otherwise it returns x.
1298 func deref(x ast.Expr) ast.Expr {
1299 if p, isPtr := x.(*ast.StarExpr); isPtr {
1300 x = p.X
1301 }
1302 return x
1303 }
1304
1305 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
1306 func unparen(x ast.Expr) ast.Expr {
1307 if p, isParen := x.(*ast.ParenExpr); isParen {
1308 x = unparen(p.X)
1309 }
1310 return x
1311 }
1312
1313 // checkExprOrType checks that x is an expression or a type
1314 // (and not a raw type such as [...]T).
1315 //
1316 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
1317 switch t := unparen(x).(type) {
1318 case *ast.ParenExpr:
1319 panic("unreachable")
1320 case *ast.UnaryExpr:
1321 case *ast.ArrayType:
1322 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
1323 p.error(len.Pos(), "expected array length, found '...'")
1324 x = &ast.BadExpr{From: x.Pos(), To: x.End()}
1325 }
1326 }
1327
1328 // all other nodes are expressions or types
1329 return x
1330 }
1331
1332 // If lhs is set and the result is an identifier, it is not resolved.
1333 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
1334 if p.trace {
1335 defer un(trace(p, "PrimaryExpr"))
1336 }
1337
1338 x := p.parseOperand(lhs)
1339 L:
1340 for {
1341 switch p.tok {
1342 case token.PERIOD:
1343 p.next()
1344 if lhs {
1345 p.resolve(x)
1346 }
1347 switch p.tok {
1348 case token.IDENT:
1349 x = p.parseSelector(p.checkExpr(x))
1350 case token.LPAREN:
1351 x = p.parseTypeAssertion(p.checkExpr(x))
1352 default:
1353 pos := p.pos
1354 p.errorExpected(pos, "selector or type assertion")
1355 p.next() // make progress
1356 x = &ast.BadExpr{From: pos, To: p.pos}
1357 }
1358 case token.LBRACK:
1359 if lhs {
1360 p.resolve(x)
1361 }
1362 x = p.parseIndexOrSlice(p.checkExpr(x))
1363 case token.LPAREN:
1364 if lhs {
1365 p.resolve(x)
1366 }
1367 x = p.parseCallOrConversion(p.checkExprOrType(x))
1368 case token.LBRACE:
1369 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
1370 if lhs {
1371 p.resolve(x)
1372 }
1373 x = p.parseLiteralValue(x)
1374 } else {
1375 break L
1376 }
1377 default:
1378 break L
1379 }
1380 lhs = false // no need to try to resolve again
1381 }
1382
1383 return x
1384 }
1385
1386 // If lhs is set and the result is an identifier, it is not resolved.
1387 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
1388 if p.trace {
1389 defer un(trace(p, "UnaryExpr"))
1390 }
1391
1392 switch p.tok {
1393 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
1394 pos, op := p.pos, p.tok
1395 p.next()
1396 x := p.parseUnaryExpr(false)
1397 return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
1398
1399 case token.ARROW:
1400 // channel type or receive expression
1401 pos := p.pos
1402 p.next()
1403 if p.tok == token.CHAN {
1404 p.next()
1405 value := p.parseType()
1406 return &ast.ChanType{Begin: pos, Dir: ast.RECV, Value: value}
1407 }
1408
1409 x := p.parseUnaryExpr(false)
1410 return &ast.UnaryExpr{OpPos: pos, Op: token.ARROW, X: p.checkExpr(x)}
1411
1412 case token.MUL:
1413 // pointer type or unary "*" expression
1414 pos := p.pos
1415 p.next()
1416 x := p.parseUnaryExpr(false)
1417 return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
1418 }
1419
1420 return p.parsePrimaryExpr(lhs)
1421 }
1422
1423 // If lhs is set and the result is an identifier, it is not resolved.
1424 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
1425 if p.trace {
1426 defer un(trace(p, "BinaryExpr"))
1427 }
1428
1429 x := p.parseUnaryExpr(lhs)
1430 for prec := p.tok.Precedence(); prec >= prec1; prec-- {
1431 for p.tok.Precedence() == prec {
1432 pos, op := p.pos, p.tok
1433 p.next()
1434 if lhs {
1435 p.resolve(x)
1436 lhs = false
1437 }
1438 y := p.parseBinaryExpr(false, prec+1)
1439 x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
1440 }
1441 }
1442
1443 return x
1444 }
1445
1446 // If lhs is set and the result is an identifier, it is not resolved.
1447 // The result may be a type or even a raw type ([...]int). Callers must
1448 // check the result (using checkExpr or checkExprOrType), depending on
1449 // context.
1450 func (p *parser) parseExpr(lhs bool) ast.Expr {
1451 if p.trace {
1452 defer un(trace(p, "Expression"))
1453 }
1454
1455 return p.parseBinaryExpr(lhs, token.LowestPrec+1)
1456 }
1457
1458 func (p *parser) parseRhs() ast.Expr {
1459 return p.checkExpr(p.parseExpr(false))
1460 }
1461
1462 func (p *parser) parseRhsOrType() ast.Expr {
1463 return p.checkExprOrType(p.parseExpr(false))
1464 }
1465
1466 // ----------------------------------------------------------------------------
1467 // Statements
1468
1469 // Parsing modes for parseSimpleStmt.
1470 const (
1471 basic = iota
1472 labelOk
1473 rangeOk
1474 )
1475
1476 // parseSimpleStmt returns true as 2nd result if it parsed the assignment
1477 // of a range clause (with mode == rangeOk). The returned statement is an
1478 // assignment with a right-hand side that is a single unary expression of
1479 // the form "range x". No guarantees are given for the left-hand side.
1480 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1481 if p.trace {
1482 defer un(trace(p, "SimpleStmt"))
1483 }
1484
1485 x := p.parseLhsList()
1486
1487 switch p.tok {
1488 case
1489 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1490 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1491 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1492 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1493 // assignment statement, possibly part of a range clause
1494 pos, tok := p.pos, p.tok
1495 p.next()
1496 var y []ast.Expr
1497 isRange := false
1498 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1499 pos := p.pos
1500 p.next()
1501 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1502 isRange = true
1503 } else {
1504 y = p.parseRhsList()
1505 }
1506 as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
1507 if tok == token.DEFINE {
1508 p.shortVarDecl(as, x)
1509 }
1510 return as, isRange
1511 }
1512
1513 if len(x) > 1 {
1514 p.errorExpected(x[0].Pos(), "1 expression")
1515 // continue with first expression
1516 }
1517
1518 switch p.tok {
1519 case token.COLON:
1520 // labeled statement
1521 colon := p.pos
1522 p.next()
1523 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1524 // Go spec: The scope of a label is the body of the function
1525 // in which it is declared and excludes the body of any nested
1526 // function.
1527 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1528 p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
1529 return stmt, false
1530 }
1531 // The label declaration typically starts at x[0].Pos(), but the label
1532 // declaration may be erroneous due to a token after that position (and
1533 // before the ':'). If SpuriousErrors is not set, the (only) error re-
1534 // ported for the line is the illegal label error instead of the token
1535 // before the ':' that caused the problem. Thus, use the (latest) colon
1536 // position for error reporting.
1537 p.error(colon, "illegal label declaration")
1538 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1539
1540 case token.ARROW:
1541 // send statement
1542 arrow := p.pos
1543 p.next()
1544 y := p.parseRhs()
1545 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1546
1547 case token.INC, token.DEC:
1548 // increment or decrement
1549 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1550 p.next()
1551 return s, false
1552 }
1553
1554 // expression
1555 return &ast.ExprStmt{X: x[0]}, false
1556 }
1557
1558 func (p *parser) parseCallExpr() *ast.CallExpr {
1559 x := p.parseRhsOrType() // could be a conversion: (some type)(x)
1560 if call, isCall := x.(*ast.CallExpr); isCall {
1561 return call
1562 }
1563 if _, isBad := x.(*ast.BadExpr); !isBad {
1564 // only report error if it's a new one
1565 p.errorExpected(x.Pos(), "function/method call")
1566 }
1567 return nil
1568 }
1569
1570 func (p *parser) parseGoStmt() ast.Stmt {
1571 if p.trace {
1572 defer un(trace(p, "GoStmt"))
1573 }
1574
1575 pos := p.expect(token.GO)
1576 call := p.parseCallExpr()
1577 p.expectSemi()
1578 if call == nil {
1579 return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
1580 }
1581
1582 return &ast.GoStmt{Go: pos, Call: call}
1583 }
1584
1585 func (p *parser) parseDeferStmt() ast.Stmt {
1586 if p.trace {
1587 defer un(trace(p, "DeferStmt"))
1588 }
1589
1590 pos := p.expect(token.DEFER)
1591 call := p.parseCallExpr()
1592 p.expectSemi()
1593 if call == nil {
1594 return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
1595 }
1596
1597 return &ast.DeferStmt{Defer: pos, Call: call}
1598 }
1599
1600 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
1601 if p.trace {
1602 defer un(trace(p, "ReturnStmt"))
1603 }
1604
1605 pos := p.pos
1606 p.expect(token.RETURN)
1607 var x []ast.Expr
1608 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
1609 x = p.parseRhsList()
1610 }
1611 p.expectSemi()
1612
1613 return &ast.ReturnStmt{Return: pos, Results: x}
1614 }
1615
1616 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
1617 if p.trace {
1618 defer un(trace(p, "BranchStmt"))
1619 }
1620
1621 pos := p.expect(tok)
1622 var label *ast.Ident
1623 if tok != token.FALLTHROUGH && p.tok == token.IDENT {
1624 label = p.parseIdent()
1625 // add to list of unresolved targets
1626 n := len(p.targetStack) - 1
1627 p.targetStack[n] = append(p.targetStack[n], label)
1628 }
1629 p.expectSemi()
1630
1631 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
1632 }
1633
1634 func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
1635 if s == nil {
1636 return nil
1637 }
1638 if es, isExpr := s.(*ast.ExprStmt); isExpr {
1639 return p.checkExpr(es.X)
1640 }
1641 p.error(s.Pos(), "expected condition, found simple statement")
1642 return &ast.BadExpr{From: s.Pos(), To: s.End()}
1643 }
1644
1645 func (p *parser) parseIfStmt() *ast.IfStmt {
1646 if p.trace {
1647 defer un(trace(p, "IfStmt"))
1648 }
1649
1650 pos := p.expect(token.IF)
1651 p.openScope()
1652 defer p.closeScope()
1653
1654 var s ast.Stmt
1655 var x ast.Expr
1656 {
1657 prevLev := p.exprLev
1658 p.exprLev = -1
1659 if p.tok == token.SEMICOLON {
1660 p.next()
1661 x = p.parseRhs()
1662 } else {
1663 s, _ = p.parseSimpleStmt(basic)
1664 if p.tok == token.SEMICOLON {
1665 p.next()
1666 x = p.parseRhs()
1667 } else {
1668 x = p.makeExpr(s)
1669 s = nil
1670 }
1671 }
1672 p.exprLev = prevLev
1673 }
1674
1675 body := p.parseBlockStmt()
1676 var else_ ast.Stmt
1677 if p.tok == token.ELSE {
1678 p.next()
1679 else_ = p.parseStmt()
1680 } else {
1681 p.expectSemi()
1682 }
1683
1684 return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
1685 }
1686
1687 func (p *parser) parseTypeList() (list []ast.Expr) {
1688 if p.trace {
1689 defer un(trace(p, "TypeList"))
1690 }
1691
1692 list = append(list, p.parseType())
1693 for p.tok == token.COMMA {
1694 p.next()
1695 list = append(list, p.parseType())
1696 }
1697
1698 return
1699 }
1700
1701 func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
1702 if p.trace {
1703 defer un(trace(p, "CaseClause"))
1704 }
1705
1706 pos := p.pos
1707 var list []ast.Expr
1708 if p.tok == token.CASE {
1709 p.next()
1710 if typeSwitch {
1711 list = p.parseTypeList()
1712 } else {
1713 list = p.parseRhsList()
1714 }
1715 } else {
1716 p.expect(token.DEFAULT)
1717 }
1718
1719 colon := p.expect(token.COLON)
1720 p.openScope()
1721 body := p.parseStmtList()
1722 p.closeScope()
1723
1724 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
1725 }
1726
1727 func isTypeSwitchAssert(x ast.Expr) bool {
1728 a, ok := x.(*ast.TypeAssertExpr)
1729 return ok && a.Type == nil
1730 }
1731
1732 func isTypeSwitchGuard(s ast.Stmt) bool {
1733 switch t := s.(type) {
1734 case *ast.ExprStmt:
1735 // x.(nil)
1736 return isTypeSwitchAssert(t.X)
1737 case *ast.AssignStmt:
1738 // v := x.(nil)
1739 return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
1740 }
1741 return false
1742 }
1743
1744 func (p *parser) parseSwitchStmt() ast.Stmt {
1745 if p.trace {
1746 defer un(trace(p, "SwitchStmt"))
1747 }
1748
1749 pos := p.expect(token.SWITCH)
1750 p.openScope()
1751 defer p.closeScope()
1752
1753 var s1, s2 ast.Stmt
1754 if p.tok != token.LBRACE {
1755 prevLev := p.exprLev
1756 p.exprLev = -1
1757 if p.tok != token.SEMICOLON {
1758 s2, _ = p.parseSimpleStmt(basic)
1759 }
1760 if p.tok == token.SEMICOLON {
1761 p.next()
1762 s1 = s2
1763 s2 = nil
1764 if p.tok != token.LBRACE {
1765 // A TypeSwitchGuard may declare a variable in addition
1766 // to the variable declared in the initial SimpleStmt.
1767 // Introduce extra scope to avoid redeclaration errors:
1768 //
1769 // switch t := 0; t := x.(T) { ... }
1770 //
1771 // (this code is not valid Go because the first t will
1772 // cannot be accessed and thus is never used, the extra
1773 // scope is needed for the correct error message).
1774 //
1775 // If we don't have a type switch, s2 must be an expression.
1776 // Having the extra nested but empty scope won't affect it.
1777 p.openScope()
1778 defer p.closeScope()
1779 s2, _ = p.parseSimpleStmt(basic)
1780 }
1781 }
1782 p.exprLev = prevLev
1783 }
1784
1785 typeSwitch := isTypeSwitchGuard(s2)
1786 lbrace := p.expect(token.LBRACE)
1787 var list []ast.Stmt
1788 for p.tok == token.CASE || p.tok == token.DEFAULT {
1789 list = append(list, p.parseCaseClause(typeSwitch))
1790 }
1791 rbrace := p.expect(token.RBRACE)
1792 p.expectSemi()
1793 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1794
1795 if typeSwitch {
1796 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
1797 }
1798
1799 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2), Body: body}
1800 }
1801
1802 func (p *parser) parseCommClause() *ast.CommClause {
1803 if p.trace {
1804 defer un(trace(p, "CommClause"))
1805 }
1806
1807 p.openScope()
1808 pos := p.pos
1809 var comm ast.Stmt
1810 if p.tok == token.CASE {
1811 p.next()
1812 lhs := p.parseLhsList()
1813 if p.tok == token.ARROW {
1814 // SendStmt
1815 if len(lhs) > 1 {
1816 p.errorExpected(lhs[0].Pos(), "1 expression")
1817 // continue with first expression
1818 }
1819 arrow := p.pos
1820 p.next()
1821 rhs := p.parseRhs()
1822 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
1823 } else {
1824 // RecvStmt
1825 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
1826 // RecvStmt with assignment
1827 if len(lhs) > 2 {
1828 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
1829 // continue with first two expressions
1830 lhs = lhs[0:2]
1831 }
1832 pos := p.pos
1833 p.next()
1834 rhs := p.parseRhs()
1835 as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
1836 if tok == token.DEFINE {
1837 p.shortVarDecl(as, lhs)
1838 }
1839 comm = as
1840 } else {
1841 // lhs must be single receive operation
1842 if len(lhs) > 1 {
1843 p.errorExpected(lhs[0].Pos(), "1 expression")
1844 // continue with first expression
1845 }
1846 comm = &ast.ExprStmt{X: lhs[0]}
1847 }
1848 }
1849 } else {
1850 p.expect(token.DEFAULT)
1851 }
1852
1853 colon := p.expect(token.COLON)
1854 body := p.parseStmtList()
1855 p.closeScope()
1856
1857 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
1858 }
1859
1860 func (p *parser) parseSelectStmt() *ast.SelectStmt {
1861 if p.trace {
1862 defer un(trace(p, "SelectStmt"))
1863 }
1864
1865 pos := p.expect(token.SELECT)
1866 lbrace := p.expect(token.LBRACE)
1867 var list []ast.Stmt
1868 for p.tok == token.CASE || p.tok == token.DEFAULT {
1869 list = append(list, p.parseCommClause())
1870 }
1871 rbrace := p.expect(token.RBRACE)
1872 p.expectSemi()
1873 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1874
1875 return &ast.SelectStmt{Select: pos, Body: body}
1876 }
1877
1878 func (p *parser) parseForStmt() ast.Stmt {
1879 if p.trace {
1880 defer un(trace(p, "ForStmt"))
1881 }
1882
1883 pos := p.expect(token.FOR)
1884 p.openScope()
1885 defer p.closeScope()
1886
1887 var s1, s2, s3 ast.Stmt
1888 var isRange bool
1889 if p.tok != token.LBRACE {
1890 prevLev := p.exprLev
1891 p.exprLev = -1
1892 if p.tok != token.SEMICOLON {
1893 s2, isRange = p.parseSimpleStmt(rangeOk)
1894 }
1895 if !isRange && p.tok == token.SEMICOLON {
1896 p.next()
1897 s1 = s2
1898 s2 = nil
1899 if p.tok != token.SEMICOLON {
1900 s2, _ = p.parseSimpleStmt(basic)
1901 }
1902 p.expectSemi()
1903 if p.tok != token.LBRACE {
1904 s3, _ = p.parseSimpleStmt(basic)
1905 }
1906 }
1907 p.exprLev = prevLev
1908 }
1909
1910 body := p.parseBlockStmt()
1911 p.expectSemi()
1912
1913 if isRange {
1914 as := s2.(*ast.AssignStmt)
1915 // check lhs
1916 var key, value ast.Expr
1917 switch len(as.Lhs) {
1918 case 2:
1919 key, value = as.Lhs[0], as.Lhs[1]
1920 case 1:
1921 key = as.Lhs[0]
1922 default:
1923 p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
1924 return &ast.BadStmt{From: pos, To: body.End()}
1925 }
1926 // parseSimpleStmt returned a right-hand side that
1927 // is a single unary expression of the form "range x"
1928 x := as.Rhs[0].(*ast.UnaryExpr).X
1929 return &ast.RangeStmt{
1930 For: pos,
1931 Key: key,
1932 Value: value,
1933 TokPos: as.TokPos,
1934 Tok: as.Tok,
1935 X: x,
1936 Body: body,
1937 }
1938 }
1939
1940 // regular for statement
1941 return &ast.ForStmt{
1942 For: pos,
1943 Init: s1,
1944 Cond: p.makeExpr(s2),
1945 Post: s3,
1946 Body: body,
1947 }
1948 }
1949
1950 func (p *parser) parseStmt() (s ast.Stmt) {
1951 if p.trace {
1952 defer un(trace(p, "Statement"))
1953 }
1954
1955 switch p.tok {
1956 case token.CONST, token.TYPE, token.VAR:
1957 s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
1958 case
1959 // tokens that may start an expression
1960 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
1961 token.LBRACK, token.STRUCT, // composite types
1962 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
1963 s, _ = p.parseSimpleStmt(labelOk)
1964 // because of the required look-ahead, labeled statements are
1965 // parsed by parseSimpleStmt - don't expect a semicolon after
1966 // them
1967 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
1968 p.expectSemi()
1969 }
1970 case token.GO:
1971 s = p.parseGoStmt()
1972 case token.DEFER:
1973 s = p.parseDeferStmt()
1974 case token.RETURN:
1975 s = p.parseReturnStmt()
1976 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
1977 s = p.parseBranchStmt(p.tok)
1978 case token.LBRACE:
1979 s = p.parseBlockStmt()
1980 p.expectSemi()
1981 case token.IF:
1982 s = p.parseIfStmt()
1983 case token.SWITCH:
1984 s = p.parseSwitchStmt()
1985 case token.SELECT:
1986 s = p.parseSelectStmt()
1987 case token.FOR:
1988 s = p.parseForStmt()
1989 case token.SEMICOLON:
1990 s = &ast.EmptyStmt{Semicolon: p.pos}
1991 p.next()
1992 case token.RBRACE:
1993 // a semicolon may be omitted before a closing "}"
1994 s = &ast.EmptyStmt{Semicolon: p.pos}
1995 default:
1996 // no statement found
1997 pos := p.pos
1998 p.errorExpected(pos, "statement")
1999 syncStmt(p)
2000 s = &ast.BadStmt{From: pos, To: p.pos}
2001 }
2002
2003 return
2004 }
2005
2006 // ----------------------------------------------------------------------------
2007 // Declarations
2008
2009 type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec
2010
2011 func isValidImport(lit string) bool {
2012 const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
2013 s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
2014 for _, r := range s {
2015 if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
2016 return false
2017 }
2018 }
2019 return s != ""
2020 }
2021
2022 func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
2023 if p.trace {
2024 defer un(trace(p, "ImportSpec"))
2025 }
2026
2027 var ident *ast.Ident
2028 switch p.tok {
2029 case token.PERIOD:
2030 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2031 p.next()
2032 case token.IDENT:
2033 ident = p.parseIdent()
2034 }
2035
2036 var path *ast.BasicLit
2037 if p.tok == token.STRING {
2038 if !isValidImport(p.lit) {
2039 p.error(p.pos, "invalid import path: "+p.lit)
2040 }
2041 path = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
2042 p.next()
2043 } else {
2044 p.expect(token.STRING) // use expect() error handling
2045 }
2046 p.expectSemi() // call before accessing p.linecomment
2047
2048 // collect imports
2049 spec := &ast.ImportSpec{
2050 Doc: doc,
2051 Name: ident,
2052 Path: path,
2053 Comment: p.lineComment,
2054 }
2055 p.imports = append(p.imports, spec)
2056
2057 return spec
2058 }
2059
2060 func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec {
2061 if p.trace {
2062 defer un(trace(p, "ConstSpec"))
2063 }
2064
2065 idents := p.parseIdentList()
2066 typ := p.tryType()
2067 var values []ast.Expr
2068 if typ != nil || p.tok == token.ASSIGN || iota == 0 {
2069 p.expect(token.ASSIGN)
2070 values = p.parseRhsList()
2071 }
2072 p.expectSemi() // call before accessing p.linecomment
2073
2074 // Go spec: The scope of a constant or variable identifier declared inside
2075 // a function begins at the end of the ConstSpec or VarSpec and ends at
2076 // the end of the innermost containing block.
2077 // (Global identifiers are resolved in a separate phase after parsing.)
2078 spec := &ast.ValueSpec{
2079 Doc: doc,
2080 Names: idents,
2081 Type: typ,
2082 Values: values,
2083 Comment: p.lineComment,
2084 }
2085 p.declare(spec, iota, p.topScope, ast.Con, idents...)
2086
2087 return spec
2088 }
2089
2090 func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
2091 if p.trace {
2092 defer un(trace(p, "TypeSpec"))
2093 }
2094
2095 ident := p.parseIdent()
2096
2097 // Go spec: The scope of a type identifier declared inside a function begins
2098 // at the identifier in the TypeSpec and ends at the end of the innermost
2099 // containing block.
2100 // (Global identifiers are resolved in a separate phase after parsing.)
2101 spec := &ast.TypeSpec{Doc: doc, Name: ident}
2102 p.declare(spec, nil, p.topScope, ast.Typ, ident)
2103
2104 spec.Type = p.parseType()
2105 p.expectSemi() // call before accessing p.linecomment
2106 spec.Comment = p.lineComment
2107
2108 return spec
2109 }
2110
2111 func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
2112 if p.trace {
2113 defer un(trace(p, "VarSpec"))
2114 }
2115
2116 idents := p.parseIdentList()
2117 typ := p.tryType()
2118 var values []ast.Expr
2119 if typ == nil || p.tok == token.ASSIGN {
2120 p.expect(token.ASSIGN)
2121 values = p.parseRhsList()
2122 }
2123 p.expectSemi() // call before accessing p.linecomment
2124
2125 // Go spec: The scope of a constant or variable identifier declared inside
2126 // a function begins at the end of the ConstSpec or VarSpec and ends at
2127 // the end of the innermost containing block.
2128 // (Global identifiers are resolved in a separate phase after parsing.)
2129 spec := &ast.ValueSpec{
2130 Doc: doc,
2131 Names: idents,
2132 Type: typ,
2133 Values: values,
2134 Comment: p.lineComment,
2135 }
2136 p.declare(spec, nil, p.topScope, ast.Var, idents...)
2137
2138 return spec
2139 }
2140
2141 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2142 if p.trace {
2143 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2144 }
2145
2146 doc := p.leadComment
2147 pos := p.expect(keyword)
2148 var lparen, rparen token.Pos
2149 var list []ast.Spec
2150 if p.tok == token.LPAREN {
2151 lparen = p.pos
2152 p.next()
2153 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2154 list = append(list, f(p, p.leadComment, iota))
2155 }
2156 rparen = p.expect(token.RPAREN)
2157 p.expectSemi()
2158 } else {
2159 list = append(list, f(p, nil, 0))
2160 }
2161
2162 return &ast.GenDecl{
2163 Doc: doc,
2164 TokPos: pos,
2165 Tok: keyword,
2166 Lparen: lparen,
2167 Specs: list,
2168 Rparen: rparen,
2169 }
2170 }
2171
2172 func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
2173 if p.trace {
2174 defer un(trace(p, "Receiver"))
2175 }
2176
2177 par := p.parseParameters(scope, false)
2178
2179 // must have exactly one receiver
2180 if par.NumFields() != 1 {
2181 p.errorExpected(par.Opening, "exactly one receiver")
2182 par.List = []*ast.Field{{Type: &ast.BadExpr{From: par.Opening, To: par.Closing + 1}}}
2183 return par
2184 }
2185
2186 // recv type must be of the form ["*"] identifier
2187 recv := par.List[0]
2188 base := deref(recv.Type)
2189 if _, isIdent := base.(*ast.Ident); !isIdent {
2190 if _, isBad := base.(*ast.BadExpr); !isBad {
2191 // only report error if it's a new one
2192 p.errorExpected(base.Pos(), "(unqualified) identifier")
2193 }
2194 par.List = []*ast.Field{
2195 {Type: &ast.BadExpr{From: recv.Pos(), To: recv.End()}},
2196 }
2197 }
2198
2199 return par
2200 }
2201
2202 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2203 if p.trace {
2204 defer un(trace(p, "FunctionDecl"))
2205 }
2206
2207 doc := p.leadComment
2208 pos := p.expect(token.FUNC)
2209 scope := ast.NewScope(p.topScope) // function scope
2210
2211 var recv *ast.FieldList
2212 if p.tok == token.LPAREN {
2213 recv = p.parseReceiver(scope)
2214 }
2215
2216 ident := p.parseIdent()
2217
2218 params, results := p.parseSignature(scope)
2219
2220 var body *ast.BlockStmt
2221 if p.tok == token.LBRACE {
2222 body = p.parseBody(scope)
2223 }
2224 p.expectSemi()
2225
2226 decl := &ast.FuncDecl{
2227 Doc: doc,
2228 Recv: recv,
2229 Name: ident,
2230 Type: &ast.FuncType{
2231 Func: pos,
2232 Params: params,
2233 Results: results,
2234 },
2235 Body: body,
2236 }
2237 if recv == nil {
2238 // Go spec: The scope of an identifier denoting a constant, type,
2239 // variable, or function (but not method) declared at top level
2240 // (outside any function) is the package block.
2241 //
2242 // init() functions cannot be referred to and there may
2243 // be more than one - don't put them in the pkgScope
2244 if ident.Name != "init" {
2245 p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
2246 }
2247 }
2248
2249 return decl
2250 }
2251
2252 func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
2253 if p.trace {
2254 defer un(trace(p, "Declaration"))
2255 }
2256
2257 var f parseSpecFunction
2258 switch p.tok {
2259 case token.CONST:
2260 f = parseConstSpec
2261
2262 case token.TYPE:
2263 f = parseTypeSpec
2264
2265 case token.VAR:
2266 f = parseVarSpec
2267
2268 case token.FUNC:
2269 return p.parseFuncDecl()
2270
2271 default:
2272 pos := p.pos
2273 p.errorExpected(pos, "declaration")
2274 sync(p)
2275 return &ast.BadDecl{From: pos, To: p.pos}
2276 }
2277
2278 return p.parseGenDecl(p.tok, f)
2279 }
2280
2281 // ----------------------------------------------------------------------------
2282 // Source files
2283
2284 func (p *parser) parseFile() *ast.File {
2285 if p.trace {
2286 defer un(trace(p, "File"))
2287 }
2288
2289 // package clause
2290 doc := p.leadComment
2291 pos := p.expect(token.PACKAGE)
2292 // Go spec: The package clause is not a declaration;
2293 // the package name does not appear in any scope.
2294 ident := p.parseIdent()
2295 if ident.Name == "_" {
2296 p.error(p.pos, "invalid package name _")
2297 }
2298 p.expectSemi()
2299
2300 var decls []ast.Decl
2301
2302 // Don't bother parsing the rest if we had errors already.
2303 // Likely not a Go source file at all.
2304
2305 if p.errors.Len() == 0 && p.mode&PackageClauseOnly == 0 {
2306 // import decls
2307 for p.tok == token.IMPORT {
2308 decls = append(decls, p.parseGenDecl(token.IMPORT, parseImportSpec))
2309 }
2310
2311 if p.mode&ImportsOnly == 0 {
2312 // rest of package body
2313 for p.tok != token.EOF {
2314 decls = append(decls, p.parseDecl(syncDecl))
2315 }
2316 }
2317 }
2318
2319 assert(p.topScope == p.pkgScope, "imbalanced scopes")
2320
2321 // resolve global identifiers within the same file
2322 i := 0
2323 for _, ident := range p.unresolved {
2324 // i <= index for current ident
2325 assert(ident.Obj == unresolved, "object already resolved")
2326 ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
2327 if ident.Obj == nil {
2328 p.unresolved[i] = ident
2329 i++
2330 }
2331 }
2332
2333 return &ast.File{
2334 Doc: doc,
2335 Package: pos,
2336 Name: ident,
2337 Decls: decls,
2338 Scope: p.pkgScope,
2339 Imports: p.imports,
2340 Unresolved: p.unresolved[0:i],
2341 Comments: p.comments,
2342 }
2343 }