2015-11-04 09:21:49 -08:00
|
|
|
// Copyright 2015 The Go Authors. All rights reserved.
|
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
|
|
package gc
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// The recursive-descent parser is built around a slighty modified grammar
|
2016-02-06 20:35:29 +09:00
|
|
|
// of Go to accommodate for the constraints imposed by strict one token look-
|
2015-11-24 14:49:10 -08:00
|
|
|
// ahead, and for better error handling. Subsequent checks of the constructed
|
|
|
|
|
// syntax tree restrict the language accepted by the compiler to proper Go.
|
|
|
|
|
//
|
|
|
|
|
// Semicolons are inserted by the lexer. The parser uses one-token look-ahead
|
|
|
|
|
// to handle optional commas and semicolons before a closing ) or } .
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
import (
|
2016-03-11 13:39:20 -05:00
|
|
|
"bufio"
|
2015-11-04 09:21:49 -08:00
|
|
|
"fmt"
|
|
|
|
|
"strconv"
|
|
|
|
|
"strings"
|
|
|
|
|
)
|
|
|
|
|
|
2015-11-20 10:16:34 -08:00
|
|
|
const trace = false // if set, parse tracing can be enabled with -x
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-02-22 11:53:20 -08:00
|
|
|
// parse_import parses the export data of a package that is imported.
|
2016-03-11 13:39:20 -05:00
|
|
|
func parse_import(bin *bufio.Reader, indent []byte) {
|
2016-02-22 11:53:20 -08:00
|
|
|
newparser(bin, indent).import_package()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2016-02-22 11:53:20 -08:00
|
|
|
// parse_file parses a single Go source file.
|
2016-03-11 13:39:20 -05:00
|
|
|
func parse_file(bin *bufio.Reader) {
|
2016-02-22 11:53:20 -08:00
|
|
|
newparser(bin, nil).file()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type parser struct {
|
2016-02-20 11:06:35 -08:00
|
|
|
lexer
|
|
|
|
|
fnest int // function nesting level (for error handling)
|
|
|
|
|
xnest int // expression nesting level (for complit ambiguity resolution)
|
|
|
|
|
indent []byte // tracing support
|
2016-03-11 17:12:31 -08:00
|
|
|
|
|
|
|
|
// TODO(gri) remove this once we switch to binary export format
|
|
|
|
|
structpkg *Pkg // for verification in addmethod only
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2016-02-22 11:53:20 -08:00
|
|
|
// newparser returns a new parser ready to parse from src.
|
|
|
|
|
// indent is the initial indentation for tracing output.
|
2016-03-11 13:39:20 -05:00
|
|
|
func newparser(src *bufio.Reader, indent []byte) *parser {
|
2016-02-22 11:53:20 -08:00
|
|
|
var p parser
|
|
|
|
|
p.bin = src
|
|
|
|
|
p.indent = indent
|
|
|
|
|
p.next()
|
|
|
|
|
return &p
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) got(tok int32) bool {
|
|
|
|
|
if p.tok == tok {
|
|
|
|
|
p.next()
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) want(tok int32) {
|
2015-11-16 12:29:55 -08:00
|
|
|
if !p.got(tok) {
|
2015-11-16 17:27:32 -08:00
|
|
|
p.syntax_error("expecting " + tokstring(tok))
|
2015-11-13 14:04:40 -08:00
|
|
|
p.advance()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
|
// Syntax error handling
|
|
|
|
|
|
2015-11-13 14:04:40 -08:00
|
|
|
func (p *parser) syntax_error(msg string) {
|
2015-11-13 16:00:46 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("syntax_error (" + msg + ")")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-13 14:04:40 -08:00
|
|
|
if p.tok == EOF && nerrors > 0 {
|
|
|
|
|
return // avoid meaningless follow-up errors
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2015-11-13 14:04:40 -08:00
|
|
|
// add punctuation etc. as needed to msg
|
|
|
|
|
switch {
|
|
|
|
|
case msg == "":
|
|
|
|
|
// nothing to do
|
|
|
|
|
case strings.HasPrefix(msg, "in"), strings.HasPrefix(msg, "at"), strings.HasPrefix(msg, "after"):
|
|
|
|
|
msg = " " + msg
|
|
|
|
|
case strings.HasPrefix(msg, "expecting"):
|
|
|
|
|
msg = ", " + msg
|
|
|
|
|
default:
|
|
|
|
|
// plain error - we don't care about current token
|
2015-11-16 11:24:43 -08:00
|
|
|
Yyerror("syntax error: %s", msg)
|
2015-11-04 09:21:49 -08:00
|
|
|
return
|
|
|
|
|
}
|
2015-11-13 14:04:40 -08:00
|
|
|
|
|
|
|
|
// determine token string
|
|
|
|
|
var tok string
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
2015-11-13 14:04:40 -08:00
|
|
|
case LNAME:
|
|
|
|
|
if p.sym_ != nil && p.sym_.Name != "" {
|
|
|
|
|
tok = p.sym_.Name
|
|
|
|
|
} else {
|
|
|
|
|
tok = "name"
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-03-01 14:47:26 -08:00
|
|
|
case LLITERAL:
|
|
|
|
|
tok = litbuf
|
|
|
|
|
case LOPER:
|
|
|
|
|
tok = goopnames[p.op]
|
2015-11-13 14:04:40 -08:00
|
|
|
case LASOP:
|
|
|
|
|
tok = goopnames[p.op] + "="
|
2016-02-25 17:27:10 -08:00
|
|
|
case LINCOP:
|
|
|
|
|
tok = goopnames[p.op] + goopnames[p.op]
|
2015-11-13 14:04:40 -08:00
|
|
|
default:
|
|
|
|
|
tok = tokstring(p.tok)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-16 17:27:32 -08:00
|
|
|
Yyerror("syntax error: unexpected %s", tok+msg)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Like syntax_error, but reports error at given line rather than current lexer line.
|
cmd/compile: rationalize (lex)?lineno handling
Previously, many error messages inconsistantly used either lexlineno
and lineno. In general this works out okay because they're almost
always the same. The only exceptional case is after lexing a
multi-line raw string literal, where lineno will be the line number of
the opening quote and lexlineno is the line number of the closing
quote.
This CL makes the compiler's error message more consistent:
- Lexer error messages related to invalid byte sequences (i.e., NUL
bytes, bad UTF-8 sequences, and non-initial BOMs) are emitted at
lexlineno (i.e., the source line that contains the invalid byte
sequence).
- All other error messages (notably the parser's "syntax errors") now
use lineno. The minor change from this is that bogus input like:
package `
bogus`
will emit "syntax error: unexpected string literal, expecting name"
error at line 1, instead of line 2.
- Instead of maintaining prevlineno all the time, just record it
when/where actually needed and not already available elsewhere (which
turns out to be just one function).
- Lastly, we remove the legacy "syntax error near ..." fallback in
Yerror, now that the parser always emits more detailed syntax error
messages.
Change-Id: Iaf5f784223d0385fa3a5b09ef2b2ad447feab02f
Reviewed-on: https://go-review.googlesource.com/19925
Reviewed-by: Robert Griesemer <gri@golang.org>
2016-02-25 16:07:04 -08:00
|
|
|
func (p *parser) syntax_error_at(lno int32, msg string) {
|
|
|
|
|
defer func(lno int32) {
|
|
|
|
|
lineno = lno
|
|
|
|
|
}(lineno)
|
|
|
|
|
lineno = lno
|
2015-11-16 17:27:32 -08:00
|
|
|
p.syntax_error(msg)
|
2015-11-13 14:04:40 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
// The stoplist contains keywords that start a statement.
|
|
|
|
|
// They are good synchronization points in case of syntax
|
|
|
|
|
// errors and (usually) shouldn't be skipped over.
|
|
|
|
|
var stoplist = map[int32]bool{
|
|
|
|
|
LBREAK: true,
|
|
|
|
|
LCONST: true,
|
|
|
|
|
LCONTINUE: true,
|
|
|
|
|
LDEFER: true,
|
|
|
|
|
LFALL: true,
|
|
|
|
|
LFOR: true,
|
|
|
|
|
LFUNC: true,
|
|
|
|
|
LGO: true,
|
|
|
|
|
LGOTO: true,
|
|
|
|
|
LIF: true,
|
|
|
|
|
LRETURN: true,
|
|
|
|
|
LSELECT: true,
|
|
|
|
|
LSWITCH: true,
|
|
|
|
|
LTYPE: true,
|
|
|
|
|
LVAR: true,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Advance consumes tokens until it finds a token of the stop- or followlist.
|
|
|
|
|
// The stoplist is only considered if we are inside a function (p.fnest > 0).
|
|
|
|
|
// The followlist is the list of valid tokens that can follow a production;
|
|
|
|
|
// if it is empty, exactly one token is consumed to ensure progress.
|
|
|
|
|
func (p *parser) advance(followlist ...int32) {
|
|
|
|
|
if len(followlist) == 0 {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.next()
|
|
|
|
|
return
|
|
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
for p.tok != EOF {
|
|
|
|
|
if p.fnest > 0 && stoplist[p.tok] {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
for _, follow := range followlist {
|
|
|
|
|
if p.tok == follow {
|
2015-11-13 14:04:40 -08:00
|
|
|
return
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-13 14:04:40 -08:00
|
|
|
p.next()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func tokstring(tok int32) string {
|
|
|
|
|
switch tok {
|
|
|
|
|
case EOF:
|
|
|
|
|
return "EOF"
|
|
|
|
|
case ',':
|
|
|
|
|
return "comma"
|
|
|
|
|
case ';':
|
|
|
|
|
return "semicolon or newline"
|
|
|
|
|
}
|
|
|
|
|
if 0 <= tok && tok < 128 {
|
|
|
|
|
// get invisibles properly backslashed
|
|
|
|
|
s := strconv.QuoteRune(tok)
|
|
|
|
|
if n := len(s); n > 0 && s[0] == '\'' && s[n-1] == '\'' {
|
|
|
|
|
s = s[1 : n-1]
|
|
|
|
|
}
|
|
|
|
|
return s
|
|
|
|
|
}
|
|
|
|
|
if s := tokstrings[tok]; s != "" {
|
|
|
|
|
return s
|
|
|
|
|
}
|
|
|
|
|
// catchall
|
2015-11-20 16:49:30 -08:00
|
|
|
return fmt.Sprintf("tok-%v", tok)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var tokstrings = map[int32]string{
|
2016-03-01 14:47:26 -08:00
|
|
|
LNAME: "NAME",
|
|
|
|
|
LLITERAL: "LITERAL",
|
|
|
|
|
|
|
|
|
|
LOPER: "op",
|
|
|
|
|
LASOP: "op=",
|
|
|
|
|
LINCOP: "opop",
|
|
|
|
|
|
|
|
|
|
LCOLAS: ":=",
|
|
|
|
|
LCOMM: "<-",
|
|
|
|
|
LDDD: "...",
|
|
|
|
|
|
2015-11-23 14:11:15 -08:00
|
|
|
LBREAK: "break",
|
|
|
|
|
LCASE: "case",
|
|
|
|
|
LCHAN: "chan",
|
|
|
|
|
LCONST: "const",
|
|
|
|
|
LCONTINUE: "continue",
|
|
|
|
|
LDEFAULT: "default",
|
|
|
|
|
LDEFER: "defer",
|
|
|
|
|
LELSE: "else",
|
|
|
|
|
LFALL: "fallthrough",
|
|
|
|
|
LFOR: "for",
|
|
|
|
|
LFUNC: "func",
|
|
|
|
|
LGO: "go",
|
|
|
|
|
LGOTO: "goto",
|
|
|
|
|
LIF: "if",
|
|
|
|
|
LIMPORT: "import",
|
|
|
|
|
LINTERFACE: "interface",
|
|
|
|
|
LMAP: "map",
|
|
|
|
|
LPACKAGE: "package",
|
|
|
|
|
LRANGE: "range",
|
|
|
|
|
LRETURN: "return",
|
|
|
|
|
LSELECT: "select",
|
|
|
|
|
LSTRUCT: "struct",
|
|
|
|
|
LSWITCH: "switch",
|
|
|
|
|
LTYPE: "type",
|
|
|
|
|
LVAR: "var",
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// usage: defer p.trace(msg)()
|
|
|
|
|
func (p *parser) trace(msg string) func() {
|
2015-11-20 10:16:34 -08:00
|
|
|
fmt.Printf("%5d: %s%s (\n", lineno, p.indent, msg)
|
|
|
|
|
const tab = ". "
|
|
|
|
|
p.indent = append(p.indent, tab...)
|
2015-11-04 09:21:49 -08:00
|
|
|
return func() {
|
2015-11-20 10:16:34 -08:00
|
|
|
p.indent = p.indent[:len(p.indent)-len(tab)]
|
2015-11-04 09:21:49 -08:00
|
|
|
if x := recover(); x != nil {
|
|
|
|
|
panic(x) // skip print_trace
|
|
|
|
|
}
|
2015-11-20 10:16:34 -08:00
|
|
|
fmt.Printf("%5d: %s)\n", lineno, p.indent)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
|
// Parsing package files
|
2015-11-24 14:49:10 -08:00
|
|
|
//
|
|
|
|
|
// Parse methods are annotated with matching Go productions as appropriate.
|
|
|
|
|
// The annotations are intended as guidelines only since a single Go grammar
|
|
|
|
|
// rule may be covered by multiple parse methods and vice versa.
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// SourceFile = PackageClause ";" { ImportDecl ";" } { TopLevelDecl ";" } .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) file() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("file")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.package_()
|
2015-11-24 14:49:10 -08:00
|
|
|
p.want(';')
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
for p.tok == LIMPORT {
|
|
|
|
|
p.import_()
|
|
|
|
|
p.want(';')
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
xtop = append(xtop, p.xdcl_list()...)
|
2015-11-16 12:29:55 -08:00
|
|
|
|
|
|
|
|
p.want(EOF)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// PackageClause = "package" PackageName .
|
|
|
|
|
// PackageName = identifier .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) package_() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("package_")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-23 15:29:19 -08:00
|
|
|
if !p.got(LPACKAGE) {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("package statement must be first")
|
2015-11-04 09:21:49 -08:00
|
|
|
errorexit()
|
|
|
|
|
}
|
2016-02-23 15:29:19 -08:00
|
|
|
mkpackage(p.sym().Name)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ImportDecl = "import" ( ImportSpec | "(" { ImportSpec ";" } ")" ) .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) import_() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("import_")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LIMPORT)
|
|
|
|
|
if p.got('(') {
|
|
|
|
|
for p.tok != EOF && p.tok != ')' {
|
2015-12-01 12:15:25 -08:00
|
|
|
p.importdcl()
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.osemi(')') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.want(')')
|
|
|
|
|
} else {
|
2015-12-01 12:15:25 -08:00
|
|
|
p.importdcl()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ImportSpec = [ "." | PackageName ] ImportPath .
|
|
|
|
|
// ImportPath = string_lit .
|
2015-12-01 12:15:25 -08:00
|
|
|
func (p *parser) importdcl() {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
2015-12-01 12:15:25 -08:00
|
|
|
defer p.trace("importdcl")()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-12-01 12:15:25 -08:00
|
|
|
var my *Sym
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
// import with given name
|
2015-12-01 12:15:25 -08:00
|
|
|
my = p.sym()
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case '.':
|
|
|
|
|
// import into my name space
|
2015-12-01 12:15:25 -08:00
|
|
|
my = Lookup(".")
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-01 12:15:25 -08:00
|
|
|
if p.tok != LLITERAL {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("missing import path; require quoted string")
|
|
|
|
|
p.advance(';', ')')
|
2015-12-01 12:15:25 -08:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-02 11:01:25 -08:00
|
|
|
line := lineno
|
2015-12-01 12:15:25 -08:00
|
|
|
|
2016-02-25 22:10:48 -08:00
|
|
|
// We need to clear importpkg before calling p.next(),
|
|
|
|
|
// otherwise it will affect lexlineno.
|
|
|
|
|
// TODO(mdempsky): Fix this clumsy API.
|
|
|
|
|
importfile(&p.val, p.indent)
|
|
|
|
|
ipkg := importpkg
|
|
|
|
|
importpkg = nil
|
|
|
|
|
|
|
|
|
|
p.next()
|
|
|
|
|
if ipkg == nil {
|
2015-12-01 12:15:25 -08:00
|
|
|
if nerrors == 0 {
|
|
|
|
|
Fatalf("phase error in import")
|
|
|
|
|
}
|
|
|
|
|
return
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-12-01 12:19:36 -08:00
|
|
|
ipkg.Direct = true
|
|
|
|
|
|
2015-12-01 12:15:25 -08:00
|
|
|
if my == nil {
|
|
|
|
|
my = Lookup(ipkg.Name)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pack := Nod(OPACK, nil, nil)
|
|
|
|
|
pack.Sym = my
|
|
|
|
|
pack.Name.Pkg = ipkg
|
|
|
|
|
pack.Lineno = line
|
|
|
|
|
|
|
|
|
|
if strings.HasPrefix(my.Name, ".") {
|
|
|
|
|
importdot(ipkg, pack)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if my.Name == "init" {
|
|
|
|
|
lineno = line
|
|
|
|
|
Yyerror("cannot import package as init - init must be a func")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if my.Name == "_" {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if my.Def != nil {
|
|
|
|
|
lineno = line
|
|
|
|
|
redeclare(my, "as imported package name")
|
|
|
|
|
}
|
|
|
|
|
my.Def = pack
|
|
|
|
|
my.Lastlineno = line
|
|
|
|
|
my.Block = 1 // at top level
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// import_package parses the header of an imported package as exported
|
|
|
|
|
// in textual format from another package.
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) import_package() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("import_package")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LPACKAGE)
|
|
|
|
|
var name string
|
|
|
|
|
if p.tok == LNAME {
|
|
|
|
|
name = p.sym_.Name
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-01 11:58:41 -08:00
|
|
|
importsafe := false
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok == LNAME {
|
|
|
|
|
if p.sym_.Name == "safe" {
|
2015-12-01 11:58:41 -08:00
|
|
|
importsafe = true
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
if importpkg.Name == "" {
|
|
|
|
|
importpkg.Name = name
|
|
|
|
|
numImport[name]++
|
|
|
|
|
} else if importpkg.Name != name {
|
|
|
|
|
Yyerror("conflicting names %s and %s for package %q", importpkg.Name, name, importpkg.Path)
|
|
|
|
|
}
|
2015-12-01 11:58:41 -08:00
|
|
|
importpkg.Safe = importsafe
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-02-19 18:47:01 -08:00
|
|
|
typecheckok = true
|
2015-11-04 09:21:49 -08:00
|
|
|
defercheckwidth()
|
|
|
|
|
|
|
|
|
|
p.hidden_import_list()
|
|
|
|
|
p.want('$')
|
|
|
|
|
// don't read past 2nd '$'
|
|
|
|
|
if p.tok != '$' {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
resumecheckwidth()
|
2016-02-19 18:47:01 -08:00
|
|
|
typecheckok = false
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Declaration = ConstDecl | TypeDecl | VarDecl .
|
|
|
|
|
// ConstDecl = "const" ( ConstSpec | "(" { ConstSpec ";" } ")" ) .
|
|
|
|
|
// TypeDecl = "type" ( TypeSpec | "(" { TypeSpec ";" } ")" ) .
|
|
|
|
|
// VarDecl = "var" ( VarSpec | "(" { VarSpec ";" } ")" ) .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) common_dcl() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("common_dcl")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
var dcl func() []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LVAR:
|
|
|
|
|
dcl = p.vardcl
|
|
|
|
|
|
|
|
|
|
case LCONST:
|
|
|
|
|
iota_ = 0
|
|
|
|
|
dcl = p.constdcl
|
|
|
|
|
|
|
|
|
|
case LTYPE:
|
|
|
|
|
dcl = p.typedcl
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
panic("unreachable")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.next()
|
2016-03-09 20:29:21 -08:00
|
|
|
var s []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.got('(') {
|
|
|
|
|
for p.tok != EOF && p.tok != ')' {
|
2016-03-09 20:29:21 -08:00
|
|
|
s = append(s, dcl()...)
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.osemi(')') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.want(')')
|
|
|
|
|
} else {
|
2016-03-09 20:29:21 -08:00
|
|
|
s = dcl()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
iota_ = -100000
|
|
|
|
|
lastconst = nil
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return s
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// VarSpec = IdentifierList ( Type [ "=" ExpressionList ] | "=" ExpressionList ) .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) vardcl() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("vardcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
names := p.dcl_name_list()
|
|
|
|
|
var typ *Node
|
2016-03-09 20:29:21 -08:00
|
|
|
var exprs []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.got('=') {
|
|
|
|
|
exprs = p.expr_list()
|
|
|
|
|
} else {
|
|
|
|
|
typ = p.ntype()
|
|
|
|
|
if p.got('=') {
|
|
|
|
|
exprs = p.expr_list()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return variter(names, typ, exprs)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ConstSpec = IdentifierList [ [ Type ] "=" ExpressionList ] .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) constdcl() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("constdcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
names := p.dcl_name_list()
|
|
|
|
|
var typ *Node
|
2016-03-09 20:29:21 -08:00
|
|
|
var exprs []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != EOF && p.tok != ';' && p.tok != ')' {
|
2015-11-20 17:31:33 -08:00
|
|
|
typ = p.try_ntype()
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.got('=') {
|
|
|
|
|
exprs = p.expr_list()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return constiter(names, typ, exprs)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// TypeSpec = identifier Type .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) typedcl() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("typedcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
name := typedcl0(p.sym())
|
|
|
|
|
|
2015-11-20 17:31:33 -08:00
|
|
|
typ := p.try_ntype()
|
2015-11-04 09:21:49 -08:00
|
|
|
// handle case where type is missing
|
2015-11-20 17:31:33 -08:00
|
|
|
if typ == nil {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("in type declaration")
|
|
|
|
|
p.advance(';', ')')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{typedcl1(name, typ, true)}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// SimpleStmt = EmptyStmt | ExpressionStmt | SendStmt | IncDecStmt | Assignment | ShortVarDecl .
|
|
|
|
|
//
|
|
|
|
|
// simple_stmt may return missing_stmt if labelOk is set.
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) simple_stmt(labelOk, rangeOk bool) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("simple_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if rangeOk && p.got(LRANGE) {
|
|
|
|
|
// LRANGE expr
|
|
|
|
|
r := Nod(ORANGE, nil, p.expr())
|
|
|
|
|
r.Etype = 0 // := flag
|
|
|
|
|
return r
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
lhs := p.expr_list()
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(lhs) == 1 && p.tok != '=' && p.tok != LCOLAS && p.tok != LRANGE {
|
2015-11-04 09:21:49 -08:00
|
|
|
// expr
|
2016-03-09 20:29:21 -08:00
|
|
|
lhs := lhs[0]
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LASOP:
|
|
|
|
|
// expr LASOP expr
|
|
|
|
|
op := p.op
|
|
|
|
|
p.next()
|
|
|
|
|
rhs := p.expr()
|
|
|
|
|
|
|
|
|
|
stmt := Nod(OASOP, lhs, rhs)
|
|
|
|
|
stmt.Etype = EType(op) // rathole to pass opcode
|
|
|
|
|
return stmt
|
|
|
|
|
|
2016-02-25 17:27:10 -08:00
|
|
|
case LINCOP:
|
|
|
|
|
// expr LINCOP
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
stmt := Nod(OASOP, lhs, Nodintconst(1))
|
|
|
|
|
stmt.Implicit = true
|
2016-02-25 17:27:10 -08:00
|
|
|
stmt.Etype = EType(p.op)
|
2015-11-04 09:21:49 -08:00
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
case ':':
|
|
|
|
|
// labelname ':' stmt
|
|
|
|
|
if labelOk {
|
|
|
|
|
// If we have a labelname, it was parsed by operand
|
2015-12-18 14:21:41 -08:00
|
|
|
// (calling p.name()) and given an ONAME, ONONAME, OTYPE, OPACK, or OLITERAL node.
|
2016-01-19 13:14:03 -08:00
|
|
|
// We only have a labelname if there is a symbol (was issue 14006).
|
2015-12-08 17:23:23 -08:00
|
|
|
switch lhs.Op {
|
2015-12-18 14:21:41 -08:00
|
|
|
case ONAME, ONONAME, OTYPE, OPACK, OLITERAL:
|
2016-01-19 13:14:03 -08:00
|
|
|
if lhs.Sym != nil {
|
|
|
|
|
lhs = newname(lhs.Sym)
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
fallthrough
|
2015-12-08 17:23:23 -08:00
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting semicolon or newline or }")
|
|
|
|
|
// we already progressed, no need to advance
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
lhs := Nod(OLABEL, lhs, nil)
|
|
|
|
|
lhs.Sym = dclstack // context, for goto restrictions
|
|
|
|
|
p.next() // consume ':' after making label node for correct lineno
|
|
|
|
|
return p.labeled_stmt(lhs)
|
|
|
|
|
}
|
|
|
|
|
fallthrough
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
// expr
|
|
|
|
|
// Since a bare name used as an expression is an error,
|
2015-11-18 14:16:28 -08:00
|
|
|
// introduce a wrapper node where necessary to give the
|
|
|
|
|
// correct line.
|
|
|
|
|
return wrapname(lhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// expr_list
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '=':
|
|
|
|
|
p.next()
|
|
|
|
|
if rangeOk && p.got(LRANGE) {
|
|
|
|
|
// expr_list '=' LRANGE expr
|
|
|
|
|
r := Nod(ORANGE, nil, p.expr())
|
2016-03-09 20:29:21 -08:00
|
|
|
r.List.Set(lhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
r.Etype = 0 // := flag
|
|
|
|
|
return r
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// expr_list '=' expr_list
|
|
|
|
|
rhs := p.expr_list()
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(lhs) == 1 && len(rhs) == 1 {
|
2015-11-04 09:21:49 -08:00
|
|
|
// simple
|
2016-03-09 20:29:21 -08:00
|
|
|
return Nod(OAS, lhs[0], rhs[0])
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
// multiple
|
|
|
|
|
stmt := Nod(OAS2, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.List.Set(lhs)
|
|
|
|
|
stmt.Rlist.Set(rhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
case LCOLAS:
|
2015-11-27 15:27:14 -08:00
|
|
|
lno := lineno
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
if rangeOk && p.got(LRANGE) {
|
|
|
|
|
// expr_list LCOLAS LRANGE expr
|
|
|
|
|
r := Nod(ORANGE, nil, p.expr())
|
2016-03-09 20:29:21 -08:00
|
|
|
r.List.Set(lhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
r.Colas = true
|
2016-03-07 14:32:18 -08:00
|
|
|
colasdefn(r.List, r)
|
2015-11-04 09:21:49 -08:00
|
|
|
return r
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// expr_list LCOLAS expr_list
|
|
|
|
|
rhs := p.expr_list()
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if rhs[0].Op == OTYPESW {
|
|
|
|
|
ts := Nod(OTYPESW, nil, rhs[0].Right)
|
|
|
|
|
if len(rhs) > 1 {
|
2015-11-04 09:21:49 -08:00
|
|
|
Yyerror("expr.(type) must be alone in list")
|
|
|
|
|
}
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(lhs) > 1 {
|
|
|
|
|
Yyerror("argument count mismatch: %d = %d", len(lhs), 1)
|
|
|
|
|
} else if (lhs[0].Op != ONAME && lhs[0].Op != OTYPE && lhs[0].Op != ONONAME && (lhs[0].Op != OLITERAL || lhs[0].Name == nil)) || isblank(lhs[0]) {
|
|
|
|
|
Yyerror("invalid variable name %s in type switch", lhs[0])
|
2015-11-04 09:21:49 -08:00
|
|
|
} else {
|
2016-03-09 20:29:21 -08:00
|
|
|
ts.Left = dclname(lhs[0].Sym)
|
2015-11-13 14:04:40 -08:00
|
|
|
} // it's a colas, so must not re-use an oldname
|
|
|
|
|
return ts
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-03-03 15:49:04 -08:00
|
|
|
return colas(lhs, rhs, lno)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting := or = or comma")
|
|
|
|
|
p.advance(';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// LabeledStmt = Label ":" Statement .
|
|
|
|
|
// Label = identifier .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) labeled_stmt(label *Node) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("labeled_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var ls *Node // labeled statement
|
|
|
|
|
if p.tok != '}' && p.tok != EOF {
|
|
|
|
|
ls = p.stmt()
|
|
|
|
|
if ls == missing_stmt {
|
|
|
|
|
// report error at line of ':' token
|
cmd/compile: rationalize (lex)?lineno handling
Previously, many error messages inconsistantly used either lexlineno
and lineno. In general this works out okay because they're almost
always the same. The only exceptional case is after lexing a
multi-line raw string literal, where lineno will be the line number of
the opening quote and lexlineno is the line number of the closing
quote.
This CL makes the compiler's error message more consistent:
- Lexer error messages related to invalid byte sequences (i.e., NUL
bytes, bad UTF-8 sequences, and non-initial BOMs) are emitted at
lexlineno (i.e., the source line that contains the invalid byte
sequence).
- All other error messages (notably the parser's "syntax errors") now
use lineno. The minor change from this is that bogus input like:
package `
bogus`
will emit "syntax error: unexpected string literal, expecting name"
error at line 1, instead of line 2.
- Instead of maintaining prevlineno all the time, just record it
when/where actually needed and not already available elsewhere (which
turns out to be just one function).
- Lastly, we remove the legacy "syntax error near ..." fallback in
Yerror, now that the parser always emits more detailed syntax error
messages.
Change-Id: Iaf5f784223d0385fa3a5b09ef2b2ad447feab02f
Reviewed-on: https://go-review.googlesource.com/19925
Reviewed-by: Robert Griesemer <gri@golang.org>
2016-02-25 16:07:04 -08:00
|
|
|
p.syntax_error_at(label.Lineno, "missing statement after label")
|
2015-11-13 14:04:40 -08:00
|
|
|
// we are already at the end of the labeled statement - no need to advance
|
2015-11-04 09:21:49 -08:00
|
|
|
return missing_stmt
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
label.Name.Defn = ls
|
2016-03-08 10:26:20 -08:00
|
|
|
l := []*Node{label}
|
2015-11-04 09:21:49 -08:00
|
|
|
if ls != nil {
|
2016-03-08 15:10:26 -08:00
|
|
|
if ls.Op == OBLOCK && ls.Ninit.Len() == 0 {
|
2016-03-08 10:26:20 -08:00
|
|
|
l = append(l, ls.List.Slice()...)
|
2016-03-05 16:25:58 -08:00
|
|
|
} else {
|
2016-03-08 10:26:20 -08:00
|
|
|
l = append(l, ls)
|
2016-03-05 16:25:58 -08:00
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return liststmt(l)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// case_ parses a superset of switch and select statement cases.
|
|
|
|
|
// Later checks restrict the syntax to valid forms.
|
|
|
|
|
//
|
|
|
|
|
// ExprSwitchCase = "case" ExpressionList | "default" .
|
|
|
|
|
// TypeSwitchCase = "case" TypeList | "default" .
|
|
|
|
|
// TypeList = Type { "," Type } .
|
|
|
|
|
// CommCase = "case" ( SendStmt | RecvStmt ) | "default" .
|
|
|
|
|
// RecvStmt = [ ExpressionList "=" | IdentifierList ":=" ] RecvExpr .
|
|
|
|
|
// RecvExpr = Expression .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) case_(tswitch *Node) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("case_")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LCASE:
|
|
|
|
|
p.next()
|
|
|
|
|
cases := p.expr_list() // expr_or_type_list
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case ':':
|
|
|
|
|
// LCASE expr_or_type_list ':'
|
|
|
|
|
|
|
|
|
|
// will be converted to OCASE
|
|
|
|
|
// right will point to next case
|
|
|
|
|
// done in casebody()
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // matching popdcl in caseblock
|
2015-11-04 09:21:49 -08:00
|
|
|
stmt := Nod(OXCASE, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.List.Set(cases)
|
2015-11-04 09:21:49 -08:00
|
|
|
if tswitch != nil {
|
|
|
|
|
if n := tswitch.Left; n != nil {
|
|
|
|
|
// type switch - declare variable
|
|
|
|
|
nn := newname(n.Sym)
|
|
|
|
|
declare(nn, dclcontext)
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.Rlist.Set1(nn)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
// keep track of the instances for reporting unused
|
|
|
|
|
nn.Name.Defn = tswitch
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.next() // consume ':' after declaring type switch var for correct lineno
|
|
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
case '=':
|
|
|
|
|
// LCASE expr_or_type_list '=' expr ':'
|
|
|
|
|
p.next()
|
|
|
|
|
rhs := p.expr()
|
|
|
|
|
|
|
|
|
|
// will be converted to OCASE
|
|
|
|
|
// right will point to next case
|
|
|
|
|
// done in casebody()
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // matching popdcl in caseblock
|
2015-11-04 09:21:49 -08:00
|
|
|
stmt := Nod(OXCASE, nil, nil)
|
|
|
|
|
var n *Node
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(cases) == 1 {
|
|
|
|
|
n = Nod(OAS, cases[0], rhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
} else {
|
|
|
|
|
n = Nod(OAS2, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
n.List.Set(cases)
|
2016-03-10 10:13:42 -08:00
|
|
|
n.Rlist.Set1(rhs)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.List.Set1(n)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
p.want(':') // consume ':' after declaring select cases for correct lineno
|
|
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
case LCOLAS:
|
|
|
|
|
// LCASE expr_or_type_list LCOLAS expr ':'
|
2015-11-27 15:27:14 -08:00
|
|
|
lno := lineno
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
|
|
|
|
rhs := p.expr()
|
|
|
|
|
|
|
|
|
|
// will be converted to OCASE
|
|
|
|
|
// right will point to next case
|
|
|
|
|
// done in casebody()
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // matching popdcl in caseblock
|
2015-11-04 09:21:49 -08:00
|
|
|
stmt := Nod(OXCASE, nil, nil)
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.List.Set1(colas(cases, []*Node{rhs}, lno))
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
p.want(':') // consume ':' after declaring select cases for correct lineno
|
|
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // for matching popdcl in caseblock
|
|
|
|
|
stmt := Nod(OXCASE, nil, nil) // don't return nil
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting := or = or : or comma")
|
|
|
|
|
p.advance(LCASE, LDEFAULT, '}')
|
2015-11-19 15:43:05 -08:00
|
|
|
return stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LDEFAULT:
|
|
|
|
|
// LDEFAULT ':'
|
|
|
|
|
p.next()
|
|
|
|
|
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // matching popdcl in caseblock
|
2015-11-04 09:21:49 -08:00
|
|
|
stmt := Nod(OXCASE, nil, nil)
|
|
|
|
|
if tswitch != nil {
|
|
|
|
|
if n := tswitch.Left; n != nil {
|
|
|
|
|
// type switch - declare variable
|
|
|
|
|
nn := newname(n.Sym)
|
|
|
|
|
declare(nn, dclcontext)
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.Rlist.Set1(nn)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
// keep track of the instances for reporting unused
|
|
|
|
|
nn.Name.Defn = tswitch
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(':') // consume ':' after declaring type switch var for correct lineno
|
|
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-19 15:43:05 -08:00
|
|
|
markdcl() // matching popdcl in caseblock
|
|
|
|
|
stmt := Nod(OXCASE, nil, nil) // don't return nil
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting case or default or }")
|
|
|
|
|
p.advance(LCASE, LDEFAULT, '}')
|
2015-11-19 15:43:05 -08:00
|
|
|
return stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Block = "{" StatementList "}" .
|
|
|
|
|
// StatementList = { Statement ";" } .
|
2016-03-10 14:18:37 -08:00
|
|
|
func (p *parser) compound_stmt() *Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("compound_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
markdcl()
|
2016-03-10 14:18:37 -08:00
|
|
|
p.want('{')
|
2015-11-04 09:21:49 -08:00
|
|
|
l := p.stmt_list()
|
2015-11-20 16:49:30 -08:00
|
|
|
p.want('}')
|
2016-03-10 14:18:37 -08:00
|
|
|
popdcl()
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(l) == 0 {
|
2016-03-10 14:18:37 -08:00
|
|
|
return Nod(OEMPTY, nil, nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-03-10 14:18:37 -08:00
|
|
|
return liststmt(l)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// caseblock parses a superset of switch and select clauses.
|
|
|
|
|
//
|
|
|
|
|
// ExprCaseClause = ExprSwitchCase ":" StatementList .
|
|
|
|
|
// TypeCaseClause = TypeSwitchCase ":" StatementList .
|
|
|
|
|
// CommClause = CommCase ":" StatementList .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) caseblock(tswitch *Node) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("caseblock")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 15:43:05 -08:00
|
|
|
stmt := p.case_(tswitch) // does markdcl
|
2015-11-04 09:21:49 -08:00
|
|
|
stmt.Xoffset = int64(block)
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.Nbody.Set(p.stmt_list())
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
popdcl()
|
|
|
|
|
|
|
|
|
|
return stmt
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// caseblock_list parses a superset of switch and select clause lists.
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) caseblock_list(tswitch *Node) (l []*Node) {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("caseblock_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !p.got('{') {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("missing { after switch clause")
|
2015-11-19 16:11:09 -08:00
|
|
|
p.advance(LCASE, LDEFAULT, '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
for p.tok != EOF && p.tok != '}' {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.caseblock(tswitch))
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.want('}')
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// loop_body parses if and for statement bodies.
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) loop_body(context string) []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("loop_body")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
markdcl()
|
|
|
|
|
if !p.got('{') {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("missing { after " + context)
|
2015-11-19 16:11:09 -08:00
|
|
|
p.advance(LNAME, '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
body := p.stmt_list()
|
|
|
|
|
popdcl()
|
|
|
|
|
p.want('}')
|
|
|
|
|
|
|
|
|
|
return body
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// for_header parses the header portion of a for statement.
|
|
|
|
|
//
|
|
|
|
|
// ForStmt = "for" [ Condition | ForClause | RangeClause ] Block .
|
|
|
|
|
// Condition = Expression .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) for_header() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("for_header")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
init, cond, post := p.header(true)
|
|
|
|
|
|
|
|
|
|
if init != nil || post != nil {
|
|
|
|
|
// init ; test ; incr
|
|
|
|
|
if post != nil && post.Colas {
|
|
|
|
|
Yyerror("cannot declare in the for-increment")
|
|
|
|
|
}
|
|
|
|
|
h := Nod(OFOR, nil, nil)
|
|
|
|
|
if init != nil {
|
2016-03-10 10:13:42 -08:00
|
|
|
h.Ninit.Set1(init)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
h.Left = cond
|
|
|
|
|
h.Right = post
|
|
|
|
|
return h
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if cond != nil && cond.Op == ORANGE {
|
|
|
|
|
// range_stmt - handled by pexpr
|
|
|
|
|
return cond
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// normal test
|
|
|
|
|
h := Nod(OFOR, nil, nil)
|
|
|
|
|
h.Left = cond
|
|
|
|
|
return h
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) for_body() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("for_body")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
stmt := p.for_header()
|
|
|
|
|
body := p.loop_body("for clause")
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.Nbody.Append(body...)
|
2015-11-04 09:21:49 -08:00
|
|
|
return stmt
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ForStmt = "for" [ Condition | ForClause | RangeClause ] Block .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) for_stmt() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("for_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LFOR)
|
|
|
|
|
markdcl()
|
|
|
|
|
body := p.for_body()
|
|
|
|
|
popdcl()
|
|
|
|
|
|
|
|
|
|
return body
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// header parses a combination of if, switch, and for statement headers:
|
|
|
|
|
//
|
|
|
|
|
// Header = [ InitStmt ";" ] [ Expression ] .
|
|
|
|
|
// Header = [ InitStmt ] ";" [ Condition ] ";" [ PostStmt ] . // for_stmt only
|
|
|
|
|
// InitStmt = SimpleStmt .
|
|
|
|
|
// PostStmt = SimpleStmt .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) header(for_stmt bool) (init, cond, post *Node) {
|
|
|
|
|
if p.tok == '{' {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
outer := p.xnest
|
|
|
|
|
p.xnest = -1
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
if p.tok != ';' {
|
|
|
|
|
// accept potential vardcl but complain
|
|
|
|
|
// (for test/syntax/forvar.go)
|
|
|
|
|
if for_stmt && p.tok == LVAR {
|
|
|
|
|
Yyerror("var declaration not allowed in for initializer")
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
init = p.simple_stmt(false, for_stmt)
|
|
|
|
|
// If we have a range clause, we are done.
|
|
|
|
|
if for_stmt && init.Op == ORANGE {
|
|
|
|
|
cond = init
|
|
|
|
|
init = nil
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest = outer
|
2015-11-04 09:21:49 -08:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if p.got(';') {
|
|
|
|
|
if for_stmt {
|
|
|
|
|
if p.tok != ';' {
|
|
|
|
|
cond = p.simple_stmt(false, false)
|
|
|
|
|
}
|
|
|
|
|
p.want(';')
|
|
|
|
|
if p.tok != '{' {
|
|
|
|
|
post = p.simple_stmt(false, false)
|
|
|
|
|
}
|
|
|
|
|
} else if p.tok != '{' {
|
|
|
|
|
cond = p.simple_stmt(false, false)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
cond = init
|
|
|
|
|
init = nil
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest = outer
|
2015-11-04 09:21:49 -08:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) if_header() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("if_header")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
init, cond, _ := p.header(false)
|
|
|
|
|
h := Nod(OIF, nil, nil)
|
2016-03-07 14:32:18 -08:00
|
|
|
if init != nil {
|
2016-03-10 10:13:42 -08:00
|
|
|
h.Ninit.Set1(init)
|
2016-03-07 14:32:18 -08:00
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
h.Left = cond
|
|
|
|
|
return h
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) if_stmt() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("if_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LIF)
|
|
|
|
|
|
|
|
|
|
markdcl()
|
|
|
|
|
|
|
|
|
|
stmt := p.if_header()
|
|
|
|
|
if stmt.Left == nil {
|
|
|
|
|
Yyerror("missing condition in if statement")
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.Nbody.Set(p.loop_body("if clause"))
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-02-20 00:00:53 -08:00
|
|
|
if p.got(LELSE) {
|
2016-03-10 14:18:37 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LIF:
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.Rlist.Set1(p.if_stmt())
|
2016-03-10 14:18:37 -08:00
|
|
|
case '{':
|
|
|
|
|
cs := p.compound_stmt()
|
2016-03-08 15:10:26 -08:00
|
|
|
if cs.Op == OBLOCK && cs.Ninit.Len() == 0 {
|
|
|
|
|
stmt.Rlist.Set(cs.List.Slice())
|
2016-03-05 16:25:58 -08:00
|
|
|
} else {
|
2016-03-10 10:13:42 -08:00
|
|
|
stmt.Rlist.Set1(cs)
|
2016-03-05 16:25:58 -08:00
|
|
|
}
|
2016-03-10 14:18:37 -08:00
|
|
|
default:
|
|
|
|
|
p.syntax_error("else must be followed by if or statement block")
|
|
|
|
|
p.advance(LNAME, '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-20 00:00:53 -08:00
|
|
|
popdcl()
|
|
|
|
|
return stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// switch_stmt parses both expression and type switch statements.
|
|
|
|
|
//
|
|
|
|
|
// SwitchStmt = ExprSwitchStmt | TypeSwitchStmt .
|
|
|
|
|
// ExprSwitchStmt = "switch" [ SimpleStmt ";" ] [ Expression ] "{" { ExprCaseClause } "}" .
|
|
|
|
|
// TypeSwitchStmt = "switch" [ SimpleStmt ";" ] TypeSwitchGuard "{" { TypeCaseClause } "}" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) switch_stmt() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("switch_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LSWITCH)
|
|
|
|
|
markdcl()
|
|
|
|
|
|
|
|
|
|
hdr := p.if_header()
|
|
|
|
|
hdr.Op = OSWITCH
|
|
|
|
|
|
|
|
|
|
tswitch := hdr.Left
|
|
|
|
|
if tswitch != nil && tswitch.Op != OTYPESW {
|
|
|
|
|
tswitch = nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
hdr.List.Set(p.caseblock_list(tswitch))
|
2015-11-04 09:21:49 -08:00
|
|
|
popdcl()
|
|
|
|
|
|
|
|
|
|
return hdr
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// SelectStmt = "select" "{" { CommClause } "}" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) select_stmt() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("select_stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LSELECT)
|
|
|
|
|
hdr := Nod(OSELECT, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
hdr.List.Set(p.caseblock_list(nil))
|
2015-11-04 09:21:49 -08:00
|
|
|
return hdr
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Expression = UnaryExpr | Expression binary_op Expression .
|
2016-02-25 17:27:10 -08:00
|
|
|
func (p *parser) bexpr(prec OpPrec) *Node {
|
2015-11-13 15:35:26 -08:00
|
|
|
// don't trace bexpr - only leads to overly nested trace output
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-02-25 17:27:10 -08:00
|
|
|
// prec is precedence of the prior/enclosing binary operator (if any),
|
|
|
|
|
// so we only want to parse tokens of greater precedence.
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
x := p.uexpr()
|
2016-02-25 17:27:10 -08:00
|
|
|
for p.prec > prec {
|
|
|
|
|
op, prec1 := p.op, p.prec
|
2016-02-25 17:16:42 -08:00
|
|
|
p.next()
|
2016-02-25 17:27:10 -08:00
|
|
|
x = Nod(op, x, p.bexpr(prec1))
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-02-25 17:27:10 -08:00
|
|
|
return x
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) expr() *Node {
|
2015-11-13 15:35:26 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("expr")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-25 17:27:10 -08:00
|
|
|
return p.bexpr(0)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-18 10:57:27 -08:00
|
|
|
func unparen(x *Node) *Node {
|
|
|
|
|
for x.Op == OPAREN {
|
|
|
|
|
x = x.Left
|
|
|
|
|
}
|
|
|
|
|
return x
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// UnaryExpr = PrimaryExpr | unary_op UnaryExpr .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) uexpr() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("uexpr")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var op Op
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '*':
|
|
|
|
|
op = OIND
|
|
|
|
|
|
|
|
|
|
case '&':
|
|
|
|
|
p.next()
|
2015-11-18 10:57:27 -08:00
|
|
|
// uexpr may have returned a parenthesized composite literal
|
|
|
|
|
// (see comment in operand) - remove parentheses if any
|
|
|
|
|
x := unparen(p.uexpr())
|
2015-11-04 09:21:49 -08:00
|
|
|
if x.Op == OCOMPLIT {
|
|
|
|
|
// Special case for &T{...}: turn into (*T){...}.
|
|
|
|
|
x.Right = Nod(OIND, x.Right, nil)
|
|
|
|
|
x.Right.Implicit = true
|
|
|
|
|
} else {
|
|
|
|
|
x = Nod(OADDR, x, nil)
|
|
|
|
|
}
|
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
|
case '+':
|
|
|
|
|
op = OPLUS
|
|
|
|
|
|
|
|
|
|
case '-':
|
|
|
|
|
op = OMINUS
|
|
|
|
|
|
|
|
|
|
case '!':
|
|
|
|
|
op = ONOT
|
|
|
|
|
|
|
|
|
|
case '~':
|
2015-11-13 16:28:46 -08:00
|
|
|
// TODO(gri) do this in the lexer instead (issue 13244)
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
|
|
|
|
x := p.uexpr()
|
|
|
|
|
Yyerror("the bitwise complement operator is ^")
|
|
|
|
|
return Nod(OCOM, x, nil)
|
|
|
|
|
|
|
|
|
|
case '^':
|
|
|
|
|
op = OCOM
|
|
|
|
|
|
|
|
|
|
case LCOMM:
|
2015-11-16 17:27:32 -08:00
|
|
|
// receive op (<-x) or receive-only channel (<-chan E)
|
2015-11-04 09:21:49 -08:00
|
|
|
p.next()
|
2015-11-16 17:27:32 -08:00
|
|
|
|
|
|
|
|
// If the next token is LCHAN we still don't know if it is
|
|
|
|
|
// a channel (<-chan int) or a receive op (<-chan int(ch)).
|
|
|
|
|
// We only know once we have found the end of the uexpr.
|
|
|
|
|
|
|
|
|
|
x := p.uexpr()
|
|
|
|
|
|
|
|
|
|
// There are two cases:
|
|
|
|
|
//
|
|
|
|
|
// <-chan... => <-x is a channel type
|
|
|
|
|
// <-x => <-x is a receive operation
|
|
|
|
|
//
|
|
|
|
|
// In the first case, <- must be re-associated with
|
|
|
|
|
// the channel type parsed already:
|
|
|
|
|
//
|
|
|
|
|
// <-(chan E) => (<-chan E)
|
|
|
|
|
// <-(chan<-E) => (<-chan (<-E))
|
|
|
|
|
|
|
|
|
|
if x.Op == OTCHAN {
|
|
|
|
|
// x is a channel type => re-associate <-
|
|
|
|
|
dir := EType(Csend)
|
|
|
|
|
t := x
|
|
|
|
|
for ; t.Op == OTCHAN && dir == Csend; t = t.Left {
|
|
|
|
|
dir = t.Etype
|
|
|
|
|
if dir == Crecv {
|
|
|
|
|
// t is type <-chan E but <-<-chan E is not permitted
|
|
|
|
|
// (report same error as for "type _ <-<-chan E")
|
|
|
|
|
p.syntax_error("unexpected <-, expecting chan")
|
2015-11-18 10:57:27 -08:00
|
|
|
// already progressed, no need to advance
|
2015-11-16 17:27:32 -08:00
|
|
|
}
|
|
|
|
|
t.Etype = Crecv
|
|
|
|
|
}
|
|
|
|
|
if dir == Csend {
|
|
|
|
|
// channel dir is <- but channel element E is not a channel
|
|
|
|
|
// (report same error as for "type _ <-chan<-E")
|
|
|
|
|
p.syntax_error(fmt.Sprintf("unexpected %v, expecting chan", t))
|
2015-11-18 10:57:27 -08:00
|
|
|
// already progressed, no need to advance
|
2015-11-16 17:27:32 -08:00
|
|
|
}
|
|
|
|
|
return x
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-16 17:27:32 -08:00
|
|
|
|
|
|
|
|
// x is not a channel type => we have a receive op
|
|
|
|
|
return Nod(ORECV, x, nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
return p.pexpr(false)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// simple uexpr
|
|
|
|
|
p.next()
|
|
|
|
|
return Nod(op, p.uexpr(), nil)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// pseudocall parses call-like statements that can be preceded by 'defer' and 'go'.
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) pseudocall() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("pseudocall")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 13:54:40 -08:00
|
|
|
x := p.pexpr(p.tok == '(') // keep_parens so we can report error below
|
2015-11-18 11:24:48 -08:00
|
|
|
switch x.Op {
|
|
|
|
|
case OCALL:
|
|
|
|
|
return x
|
|
|
|
|
case OPAREN:
|
|
|
|
|
Yyerror("expression in go/defer must not be parenthesized")
|
|
|
|
|
// already progressed, no need to advance
|
|
|
|
|
default:
|
|
|
|
|
Yyerror("expression in go/defer must be function call")
|
|
|
|
|
// already progressed, no need to advance
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-18 11:24:48 -08:00
|
|
|
return nil
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Operand = Literal | OperandName | MethodExpr | "(" Expression ")" .
|
|
|
|
|
// Literal = BasicLit | CompositeLit | FunctionLit .
|
|
|
|
|
// BasicLit = int_lit | float_lit | imaginary_lit | rune_lit | string_lit .
|
|
|
|
|
// OperandName = identifier | QualifiedIdent.
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) operand(keep_parens bool) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("operand")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LLITERAL:
|
|
|
|
|
x := nodlit(p.val)
|
|
|
|
|
p.next()
|
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
return p.name()
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest++
|
2015-11-04 09:21:49 -08:00
|
|
|
x := p.expr() // expr_or_type
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want(')')
|
|
|
|
|
|
2015-11-18 10:57:27 -08:00
|
|
|
// Optimization: Record presence of ()'s only where needed
|
|
|
|
|
// for error reporting. Don't bother in other cases; it is
|
|
|
|
|
// just a waste of memory and time.
|
|
|
|
|
|
|
|
|
|
// Parentheses are not permitted on lhs of := .
|
2015-11-04 09:21:49 -08:00
|
|
|
switch x.Op {
|
|
|
|
|
case ONAME, ONONAME, OPACK, OTYPE, OLITERAL, OTYPESW:
|
2015-11-18 10:57:27 -08:00
|
|
|
keep_parens = true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parentheses are not permitted around T in a composite
|
|
|
|
|
// literal T{}. If the next token is a {, assume x is a
|
|
|
|
|
// composite literal type T (it may not be, { could be
|
|
|
|
|
// the opening brace of a block, but we don't know yet).
|
|
|
|
|
if p.tok == '{' {
|
|
|
|
|
keep_parens = true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parentheses are also not permitted around the expression
|
|
|
|
|
// in a go/defer statement. In that case, operand is called
|
|
|
|
|
// with keep_parens set.
|
|
|
|
|
if keep_parens {
|
|
|
|
|
x = Nod(OPAREN, x, nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
|
case LFUNC:
|
2015-11-13 15:35:26 -08:00
|
|
|
t := p.ntype() // fntype
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok == '{' {
|
|
|
|
|
// fnlitdcl
|
|
|
|
|
closurehdr(t)
|
|
|
|
|
// fnliteral
|
|
|
|
|
p.next() // consume '{'
|
2015-11-19 16:11:09 -08:00
|
|
|
p.fnest++
|
|
|
|
|
p.xnest++
|
2015-11-04 09:21:49 -08:00
|
|
|
body := p.stmt_list()
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
|
|
|
|
p.fnest--
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want('}')
|
|
|
|
|
return closurebody(body)
|
|
|
|
|
}
|
|
|
|
|
return t
|
|
|
|
|
|
|
|
|
|
case '[', LCHAN, LMAP, LSTRUCT, LINTERFACE:
|
2015-11-13 15:35:26 -08:00
|
|
|
return p.ntype() // othertype
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case '{':
|
|
|
|
|
// common case: p.header is missing simple_stmt before { in if, for, switch
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("missing operand")
|
2015-11-04 09:21:49 -08:00
|
|
|
// '{' will be consumed in pexpr - no need to consume it here
|
|
|
|
|
return nil
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 21:21:39 -08:00
|
|
|
p.syntax_error("expecting expression")
|
|
|
|
|
p.advance()
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
2015-11-18 13:09:14 -08:00
|
|
|
|
|
|
|
|
// Syntactically, composite literals are operands. Because a complit
|
|
|
|
|
// type may be a qualified identifier which is handled by pexpr
|
|
|
|
|
// (together with selector expressions), complits are parsed there
|
|
|
|
|
// as well (operand is only called from pexpr).
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// PrimaryExpr =
|
|
|
|
|
// Operand |
|
|
|
|
|
// Conversion |
|
|
|
|
|
// PrimaryExpr Selector |
|
|
|
|
|
// PrimaryExpr Index |
|
|
|
|
|
// PrimaryExpr Slice |
|
|
|
|
|
// PrimaryExpr TypeAssertion |
|
|
|
|
|
// PrimaryExpr Arguments .
|
|
|
|
|
//
|
|
|
|
|
// Selector = "." identifier .
|
|
|
|
|
// Index = "[" Expression "]" .
|
|
|
|
|
// Slice = "[" ( [ Expression ] ":" [ Expression ] ) |
|
|
|
|
|
// ( [ Expression ] ":" Expression ":" Expression )
|
|
|
|
|
// "]" .
|
|
|
|
|
// TypeAssertion = "." "(" Type ")" .
|
|
|
|
|
// Arguments = "(" [ ( ExpressionList | Type [ "," ExpressionList ] ) [ "..." ] [ "," ] ] ")" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) pexpr(keep_parens bool) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("pexpr")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
x := p.operand(keep_parens)
|
|
|
|
|
|
|
|
|
|
loop:
|
|
|
|
|
for {
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '.':
|
|
|
|
|
p.next()
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
// pexpr '.' sym
|
2015-11-13 16:00:46 -08:00
|
|
|
x = p.new_dotname(x)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
// pexpr '.' '(' expr_or_type ')'
|
|
|
|
|
t := p.expr() // expr_or_type
|
|
|
|
|
p.want(')')
|
|
|
|
|
x = Nod(ODOTTYPE, x, t)
|
|
|
|
|
|
|
|
|
|
case LTYPE:
|
|
|
|
|
// pexpr '.' '(' LTYPE ')'
|
|
|
|
|
p.next()
|
|
|
|
|
p.want(')')
|
|
|
|
|
x = Nod(OTYPESW, nil, x)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting name or (")
|
|
|
|
|
p.advance(';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case '[':
|
|
|
|
|
p.next()
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest++
|
2015-11-04 09:21:49 -08:00
|
|
|
var index [3]*Node
|
|
|
|
|
if p.tok != ':' {
|
|
|
|
|
index[0] = p.expr()
|
|
|
|
|
}
|
|
|
|
|
ncol := 0
|
|
|
|
|
for ncol < len(index)-1 && p.got(':') {
|
|
|
|
|
ncol++
|
|
|
|
|
if p.tok != EOF && p.tok != ':' && p.tok != ']' {
|
|
|
|
|
index[ncol] = p.expr()
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want(']')
|
|
|
|
|
|
|
|
|
|
switch ncol {
|
|
|
|
|
case 0:
|
|
|
|
|
i := index[0]
|
|
|
|
|
if i == nil {
|
|
|
|
|
Yyerror("missing index in index expression")
|
|
|
|
|
}
|
|
|
|
|
x = Nod(OINDEX, x, i)
|
|
|
|
|
case 1:
|
|
|
|
|
i := index[0]
|
|
|
|
|
j := index[1]
|
|
|
|
|
x = Nod(OSLICE, x, Nod(OKEY, i, j))
|
|
|
|
|
case 2:
|
|
|
|
|
i := index[0]
|
|
|
|
|
j := index[1]
|
|
|
|
|
k := index[2]
|
|
|
|
|
if j == nil {
|
|
|
|
|
Yyerror("middle index required in 3-index slice")
|
|
|
|
|
}
|
|
|
|
|
if k == nil {
|
|
|
|
|
Yyerror("final index required in 3-index slice")
|
|
|
|
|
}
|
|
|
|
|
x = Nod(OSLICE3, x, Nod(OKEY, i, Nod(OKEY, j, k)))
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
panic("unreachable")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
// convtype '(' expr ocomma ')'
|
|
|
|
|
args, ddd := p.arg_list()
|
|
|
|
|
|
|
|
|
|
// call or conversion
|
|
|
|
|
x = Nod(OCALL, x, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
x.List.Set(args)
|
2015-11-04 09:21:49 -08:00
|
|
|
x.Isddd = ddd
|
|
|
|
|
|
|
|
|
|
case '{':
|
2015-11-18 13:09:14 -08:00
|
|
|
// operand may have returned a parenthesized complit
|
|
|
|
|
// type; accept it but complain if we have a complit
|
|
|
|
|
t := unparen(x)
|
2015-11-04 09:21:49 -08:00
|
|
|
// determine if '{' belongs to a complit or a compound_stmt
|
|
|
|
|
complit_ok := false
|
|
|
|
|
switch t.Op {
|
|
|
|
|
case ONAME, ONONAME, OTYPE, OPACK, OXDOT, ODOT:
|
2015-11-19 16:11:09 -08:00
|
|
|
if p.xnest >= 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
// x is considered a comptype
|
|
|
|
|
complit_ok = true
|
|
|
|
|
}
|
|
|
|
|
case OTARRAY, OTSTRUCT, OTMAP:
|
|
|
|
|
// x is a comptype
|
|
|
|
|
complit_ok = true
|
|
|
|
|
}
|
|
|
|
|
if !complit_ok {
|
|
|
|
|
break loop
|
|
|
|
|
}
|
|
|
|
|
if t != x {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("cannot parenthesize type in composite literal")
|
|
|
|
|
// already progressed, no need to advance
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
n := p.complitexpr()
|
|
|
|
|
n.Right = x
|
|
|
|
|
x = n
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
break loop
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return x
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// KeyedElement = [ Key ":" ] Element .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) keyval() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("keyval")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-18 14:16:28 -08:00
|
|
|
// A composite literal commonly spans several lines,
|
|
|
|
|
// so the line number on errors may be misleading.
|
|
|
|
|
// Wrap values (but not keys!) that don't carry line
|
|
|
|
|
// numbers.
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
x := p.bare_complitexpr()
|
2015-11-18 14:16:28 -08:00
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.got(':') {
|
2015-11-18 14:16:28 -08:00
|
|
|
// key ':' value
|
|
|
|
|
return Nod(OKEY, x, wrapname(p.bare_complitexpr()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// value
|
|
|
|
|
return wrapname(x)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func wrapname(x *Node) *Node {
|
|
|
|
|
// These nodes do not carry line numbers.
|
|
|
|
|
// Introduce a wrapper node to give the correct line.
|
|
|
|
|
switch x.Op {
|
|
|
|
|
case ONAME, ONONAME, OTYPE, OPACK, OLITERAL:
|
|
|
|
|
x = Nod(OPAREN, x, nil)
|
|
|
|
|
x.Implicit = true
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return x
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Element = Expression | LiteralValue .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) bare_complitexpr() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("bare_complitexpr")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if p.tok == '{' {
|
|
|
|
|
// '{' start_complit braced_keyval_list '}'
|
|
|
|
|
return p.complitexpr()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-18 14:16:28 -08:00
|
|
|
return p.expr()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// LiteralValue = "{" [ ElementList [ "," ] ] "}" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) complitexpr() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("complitexpr")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// make node early so we get the right line number
|
|
|
|
|
n := Nod(OCOMPLIT, nil, nil)
|
|
|
|
|
|
|
|
|
|
p.want('{')
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest++
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-03-04 15:26:38 -08:00
|
|
|
var l []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.tok != EOF && p.tok != '}' {
|
2016-03-04 15:26:38 -08:00
|
|
|
l = append(l, p.keyval())
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.ocomma('}') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want('}')
|
|
|
|
|
|
2016-03-08 15:10:26 -08:00
|
|
|
n.List.Set(l)
|
2015-11-04 09:21:49 -08:00
|
|
|
return n
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// names and types
|
|
|
|
|
// newname is used before declared
|
|
|
|
|
// oldname is used after declared
|
|
|
|
|
func (p *parser) new_name(sym *Sym) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("new_name")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if sym != nil {
|
|
|
|
|
return newname(sym)
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
cmd/compile: rationalize (lex)?lineno handling
Previously, many error messages inconsistantly used either lexlineno
and lineno. In general this works out okay because they're almost
always the same. The only exceptional case is after lexing a
multi-line raw string literal, where lineno will be the line number of
the opening quote and lexlineno is the line number of the closing
quote.
This CL makes the compiler's error message more consistent:
- Lexer error messages related to invalid byte sequences (i.e., NUL
bytes, bad UTF-8 sequences, and non-initial BOMs) are emitted at
lexlineno (i.e., the source line that contains the invalid byte
sequence).
- All other error messages (notably the parser's "syntax errors") now
use lineno. The minor change from this is that bogus input like:
package `
bogus`
will emit "syntax error: unexpected string literal, expecting name"
error at line 1, instead of line 2.
- Instead of maintaining prevlineno all the time, just record it
when/where actually needed and not already available elsewhere (which
turns out to be just one function).
- Lastly, we remove the legacy "syntax error near ..." fallback in
Yerror, now that the parser always emits more detailed syntax error
messages.
Change-Id: Iaf5f784223d0385fa3a5b09ef2b2ad447feab02f
Reviewed-on: https://go-review.googlesource.com/19925
Reviewed-by: Robert Griesemer <gri@golang.org>
2016-02-25 16:07:04 -08:00
|
|
|
func (p *parser) dcl_name() *Node {
|
2015-10-04 20:33:02 +00:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("dcl_name")()
|
|
|
|
|
}
|
|
|
|
|
|
cmd/compile: rationalize (lex)?lineno handling
Previously, many error messages inconsistantly used either lexlineno
and lineno. In general this works out okay because they're almost
always the same. The only exceptional case is after lexing a
multi-line raw string literal, where lineno will be the line number of
the opening quote and lexlineno is the line number of the closing
quote.
This CL makes the compiler's error message more consistent:
- Lexer error messages related to invalid byte sequences (i.e., NUL
bytes, bad UTF-8 sequences, and non-initial BOMs) are emitted at
lexlineno (i.e., the source line that contains the invalid byte
sequence).
- All other error messages (notably the parser's "syntax errors") now
use lineno. The minor change from this is that bogus input like:
package `
bogus`
will emit "syntax error: unexpected string literal, expecting name"
error at line 1, instead of line 2.
- Instead of maintaining prevlineno all the time, just record it
when/where actually needed and not already available elsewhere (which
turns out to be just one function).
- Lastly, we remove the legacy "syntax error near ..." fallback in
Yerror, now that the parser always emits more detailed syntax error
messages.
Change-Id: Iaf5f784223d0385fa3a5b09ef2b2ad447feab02f
Reviewed-on: https://go-review.googlesource.com/19925
Reviewed-by: Robert Griesemer <gri@golang.org>
2016-02-25 16:07:04 -08:00
|
|
|
symlineno := lineno
|
|
|
|
|
sym := p.sym()
|
2015-10-04 20:33:02 +00:00
|
|
|
if sym == nil {
|
2016-03-02 11:01:25 -08:00
|
|
|
yyerrorl(symlineno, "invalid declaration")
|
2015-10-04 20:33:02 +00:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
return dclname(sym)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) onew_name() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("onew_name")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
return p.new_name(p.sym())
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) sym() *Sym {
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME:
|
|
|
|
|
s := p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
// during imports, unqualified non-exported identifiers are from builtinpkg
|
|
|
|
|
if importpkg != nil && !exportname(s.Name) {
|
|
|
|
|
s = Pkglookup(s.Name, builtinpkg)
|
|
|
|
|
}
|
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
case '@':
|
|
|
|
|
return p.hidden_importsym()
|
|
|
|
|
|
|
|
|
|
case '?':
|
|
|
|
|
p.next()
|
|
|
|
|
return nil
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-16 11:24:43 -08:00
|
|
|
p.syntax_error("expecting name")
|
2015-11-13 14:04:40 -08:00
|
|
|
p.advance()
|
2015-11-04 09:21:49 -08:00
|
|
|
return new(Sym)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func mkname(sym *Sym) *Node {
|
|
|
|
|
n := oldname(sym)
|
|
|
|
|
if n.Name != nil && n.Name.Pack != nil {
|
|
|
|
|
n.Name.Pack.Used = true
|
|
|
|
|
}
|
|
|
|
|
return n
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) name() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("name")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return mkname(p.sym())
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// [ "..." ] Type
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) dotdotdot() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("dotdotdot")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LDDD)
|
2015-11-20 17:31:33 -08:00
|
|
|
if typ := p.try_ntype(); typ != nil {
|
|
|
|
|
return Nod(ODDD, typ, nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Yyerror("final argument in variadic function missing type")
|
|
|
|
|
return Nod(ODDD, typenod(typ(TINTER)), nil)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) ntype() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("ntype")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-20 17:31:33 -08:00
|
|
|
if typ := p.try_ntype(); typ != nil {
|
|
|
|
|
return typ
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.syntax_error("")
|
|
|
|
|
p.advance()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// try_ntype is like ntype but it returns nil if there was no type
|
|
|
|
|
// instead of reporting an error.
|
2015-11-24 14:49:10 -08:00
|
|
|
//
|
|
|
|
|
// Type = TypeName | TypeLit | "(" Type ")" .
|
|
|
|
|
// TypeName = identifier | QualifiedIdent .
|
|
|
|
|
// TypeLit = ArrayType | StructType | PointerType | FunctionType | InterfaceType |
|
|
|
|
|
// SliceType | MapType | ChannelType .
|
2015-11-20 17:31:33 -08:00
|
|
|
func (p *parser) try_ntype() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("try_ntype")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LCOMM:
|
2015-11-13 15:35:26 -08:00
|
|
|
// recvchantype
|
|
|
|
|
p.next()
|
|
|
|
|
p.want(LCHAN)
|
|
|
|
|
t := Nod(OTCHAN, p.chan_elem(), nil)
|
|
|
|
|
t.Etype = Crecv
|
|
|
|
|
return t
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LFUNC:
|
2015-11-13 15:35:26 -08:00
|
|
|
// fntype
|
|
|
|
|
p.next()
|
|
|
|
|
params := p.param_list()
|
|
|
|
|
result := p.fnres()
|
|
|
|
|
params = checkarglist(params, 1)
|
|
|
|
|
t := Nod(OTFUNC, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
t.List.Set(params)
|
|
|
|
|
t.Rlist.Set(result)
|
2015-11-13 15:35:26 -08:00
|
|
|
return t
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2015-11-13 15:35:26 -08:00
|
|
|
case '[':
|
|
|
|
|
// '[' oexpr ']' ntype
|
|
|
|
|
// '[' LDDD ']' ntype
|
|
|
|
|
p.next()
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest++
|
2015-11-13 15:35:26 -08:00
|
|
|
var len *Node
|
|
|
|
|
if p.tok != ']' {
|
|
|
|
|
if p.got(LDDD) {
|
|
|
|
|
len = Nod(ODDD, nil, nil)
|
|
|
|
|
} else {
|
|
|
|
|
len = p.expr()
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
2015-11-13 15:35:26 -08:00
|
|
|
p.want(']')
|
|
|
|
|
return Nod(OTARRAY, len, p.ntype())
|
|
|
|
|
|
|
|
|
|
case LCHAN:
|
|
|
|
|
// LCHAN non_recvchantype
|
|
|
|
|
// LCHAN LCOMM ntype
|
|
|
|
|
p.next()
|
|
|
|
|
var dir EType = Cboth
|
|
|
|
|
if p.got(LCOMM) {
|
|
|
|
|
dir = Csend
|
|
|
|
|
}
|
|
|
|
|
t := Nod(OTCHAN, p.chan_elem(), nil)
|
|
|
|
|
t.Etype = dir
|
|
|
|
|
return t
|
|
|
|
|
|
|
|
|
|
case LMAP:
|
|
|
|
|
// LMAP '[' ntype ']' ntype
|
|
|
|
|
p.next()
|
|
|
|
|
p.want('[')
|
|
|
|
|
key := p.ntype()
|
|
|
|
|
p.want(']')
|
|
|
|
|
val := p.ntype()
|
|
|
|
|
return Nod(OTMAP, key, val)
|
|
|
|
|
|
|
|
|
|
case LSTRUCT:
|
|
|
|
|
return p.structtype()
|
|
|
|
|
|
|
|
|
|
case LINTERFACE:
|
|
|
|
|
return p.interfacetype()
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case '*':
|
2015-11-13 15:35:26 -08:00
|
|
|
// ptrtype
|
|
|
|
|
p.next()
|
|
|
|
|
return Nod(OIND, p.ntype(), nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
return p.dotname()
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
t := p.ntype()
|
|
|
|
|
p.want(')')
|
|
|
|
|
return t
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) chan_elem() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("chan_elem")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-20 17:31:33 -08:00
|
|
|
if typ := p.try_ntype(); typ != nil {
|
|
|
|
|
return typ
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-20 17:31:33 -08:00
|
|
|
|
|
|
|
|
p.syntax_error("missing channel element type")
|
|
|
|
|
// assume element type is simply absent - don't advance
|
|
|
|
|
return nil
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-10-22 18:56:45 -07:00
|
|
|
func (p *parser) new_dotname(obj *Node) *Node {
|
2015-11-13 16:00:46 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("new_dotname")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sel := p.sym()
|
2015-10-22 18:56:45 -07:00
|
|
|
if obj.Op == OPACK {
|
|
|
|
|
s := restrictlookup(sel.Name, obj.Name.Pkg)
|
|
|
|
|
obj.Used = true
|
2015-11-13 16:00:46 -08:00
|
|
|
return oldname(s)
|
|
|
|
|
}
|
2015-10-22 18:56:45 -07:00
|
|
|
return Nod(OXDOT, obj, newname(sel))
|
2015-11-13 16:00:46 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) dotname() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("dotname")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-13 15:35:26 -08:00
|
|
|
name := p.name()
|
2015-11-13 16:00:46 -08:00
|
|
|
if p.got('.') {
|
|
|
|
|
return p.new_dotname(name)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-13 16:00:46 -08:00
|
|
|
return name
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// StructType = "struct" "{" { FieldDecl ";" } "}" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) structtype() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("structtype")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LSTRUCT)
|
|
|
|
|
p.want('{')
|
2016-03-09 20:29:21 -08:00
|
|
|
var l []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.tok != EOF && p.tok != '}' {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.structdcl()...)
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.osemi('}') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.want('}')
|
|
|
|
|
|
|
|
|
|
t := Nod(OTSTRUCT, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
t.List.Set(l)
|
2015-11-04 09:21:49 -08:00
|
|
|
return t
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// InterfaceType = "interface" "{" { MethodSpec ";" } "}" .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) interfacetype() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("interfacetype")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LINTERFACE)
|
|
|
|
|
p.want('{')
|
2016-03-04 15:26:38 -08:00
|
|
|
var l []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.tok != EOF && p.tok != '}' {
|
2016-03-04 15:26:38 -08:00
|
|
|
l = append(l, p.interfacedcl())
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.osemi('}') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
p.want('}')
|
|
|
|
|
|
|
|
|
|
t := Nod(OTINTER, nil, nil)
|
2016-03-08 15:10:26 -08:00
|
|
|
t.List.Set(l)
|
2015-11-04 09:21:49 -08:00
|
|
|
return t
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Function stuff.
|
|
|
|
|
// All in one place to show how crappy it all is.
|
2015-11-24 14:49:10 -08:00
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) xfndcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("xfndcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LFUNC)
|
2016-02-26 13:32:28 -08:00
|
|
|
f := p.fndcl(p.pragma&Nointerface != 0)
|
2015-11-04 09:21:49 -08:00
|
|
|
body := p.fnbody()
|
|
|
|
|
|
|
|
|
|
if f == nil {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
f.Nbody.Set(body)
|
2016-02-26 13:32:28 -08:00
|
|
|
f.Noescape = p.pragma&Noescape != 0
|
2016-03-09 20:29:21 -08:00
|
|
|
if f.Noescape && len(body) != 0 {
|
2016-02-26 13:32:28 -08:00
|
|
|
Yyerror("can only use //go:noescape with external func implementations")
|
|
|
|
|
}
|
|
|
|
|
f.Func.Pragma = p.pragma
|
2015-11-04 09:21:49 -08:00
|
|
|
f.Func.Endlineno = lineno
|
2016-02-26 13:32:28 -08:00
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
funcbody(f)
|
|
|
|
|
|
|
|
|
|
return f
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// FunctionDecl = "func" FunctionName ( Function | Signature ) .
|
|
|
|
|
// FunctionName = identifier .
|
|
|
|
|
// Function = Signature FunctionBody .
|
|
|
|
|
// MethodDecl = "func" Receiver MethodName ( Function | Signature ) .
|
|
|
|
|
// Receiver = Parameters .
|
2016-02-26 13:32:28 -08:00
|
|
|
func (p *parser) fndcl(nointerface bool) *Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("fndcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
// sym '(' oarg_type_list_ocomma ')' fnres
|
|
|
|
|
name := p.sym()
|
|
|
|
|
params := p.param_list()
|
|
|
|
|
result := p.fnres()
|
|
|
|
|
|
|
|
|
|
params = checkarglist(params, 1)
|
|
|
|
|
|
|
|
|
|
if name.Name == "init" {
|
|
|
|
|
name = renameinit()
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(params) != 0 || len(result) != 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
Yyerror("func init must have no arguments and no return values")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if localpkg.Name == "main" && name.Name == "main" {
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(params) != 0 || len(result) != 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
Yyerror("func main must have no arguments and no return values")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
t := Nod(OTFUNC, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
t.List.Set(params)
|
|
|
|
|
t.Rlist.Set(result)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
f := Nod(ODCLFUNC, nil, nil)
|
|
|
|
|
f.Func.Nname = newfuncname(name)
|
|
|
|
|
f.Func.Nname.Name.Defn = f
|
|
|
|
|
f.Func.Nname.Name.Param.Ntype = t // TODO: check if nname already has an ntype
|
|
|
|
|
declare(f.Func.Nname, PFUNC)
|
|
|
|
|
|
|
|
|
|
funchdr(f)
|
|
|
|
|
return f
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
// '(' oarg_type_list_ocomma ')' sym '(' oarg_type_list_ocomma ')' fnres
|
|
|
|
|
rparam := p.param_list()
|
|
|
|
|
name := p.sym()
|
|
|
|
|
params := p.param_list()
|
|
|
|
|
result := p.fnres()
|
|
|
|
|
|
|
|
|
|
rparam = checkarglist(rparam, 0)
|
|
|
|
|
params = checkarglist(params, 1)
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(rparam) == 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
Yyerror("method has no receiver")
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
if len(rparam) > 1 {
|
2015-11-04 09:21:49 -08:00
|
|
|
Yyerror("method has multiple receivers")
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
rcvr := rparam[0]
|
2015-11-04 09:21:49 -08:00
|
|
|
if rcvr.Op != ODCLFIELD {
|
|
|
|
|
Yyerror("bad receiver in method")
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
t := Nod(OTFUNC, rcvr, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
t.List.Set(params)
|
|
|
|
|
t.Rlist.Set(result)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
f := Nod(ODCLFUNC, nil, nil)
|
|
|
|
|
f.Func.Shortname = newfuncname(name)
|
|
|
|
|
f.Func.Nname = methodname1(f.Func.Shortname, rcvr.Right)
|
|
|
|
|
f.Func.Nname.Name.Defn = f
|
|
|
|
|
f.Func.Nname.Name.Param.Ntype = t
|
|
|
|
|
f.Func.Nname.Nointerface = nointerface
|
|
|
|
|
declare(f.Func.Nname, PFUNC)
|
|
|
|
|
|
|
|
|
|
funchdr(f)
|
|
|
|
|
return f
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting name or (")
|
|
|
|
|
p.advance('{', ';')
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_fndcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_fndcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
// hidden_pkg_importsym '(' ohidden_funarg_list ')' ohidden_funres
|
|
|
|
|
s1 := p.hidden_pkg_importsym()
|
|
|
|
|
p.want('(')
|
|
|
|
|
s3 := p.ohidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
s5 := p.ohidden_funres()
|
|
|
|
|
|
|
|
|
|
s := s1
|
2016-03-09 20:29:21 -08:00
|
|
|
t := functype(nil, s3, s5)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
importsym(s, ONAME)
|
|
|
|
|
if s.Def != nil && s.Def.Op == ONAME {
|
|
|
|
|
if Eqtype(t, s.Def.Type) {
|
|
|
|
|
dclcontext = PDISCARD // since we skip funchdr below
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
Yyerror("inconsistent definition for func %v during import\n\t%v\n\t%v", s, s.Def.Type, t)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ss := newfuncname(s)
|
|
|
|
|
ss.Type = t
|
|
|
|
|
declare(ss, PFUNC)
|
|
|
|
|
|
|
|
|
|
funchdr(ss)
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
// '(' hidden_funarg_list ')' sym '(' ohidden_funarg_list ')' ohidden_funres
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
s4 := p.sym()
|
|
|
|
|
p.want('(')
|
|
|
|
|
s6 := p.ohidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
s8 := p.ohidden_funres()
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
ss := methodname1(newname(s4), s2[0].Right)
|
|
|
|
|
ss.Type = functype(s2[0], s6, s8)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
checkwidth(ss.Type)
|
2016-03-11 17:12:31 -08:00
|
|
|
addmethod(s4, ss.Type, p.structpkg, false, false)
|
2015-11-04 09:21:49 -08:00
|
|
|
funchdr(ss)
|
|
|
|
|
|
|
|
|
|
// inl.C's inlnode in on a dotmeth node expects to find the inlineable body as
|
|
|
|
|
// (dotmeth's type).Nname.Inl, and dotmeth's type has been pulled
|
2016-03-01 23:21:55 +00:00
|
|
|
// out by typecheck's lookdot as this $$.ttype. So by providing
|
2015-11-04 09:21:49 -08:00
|
|
|
// this back link here we avoid special casing there.
|
|
|
|
|
ss.Type.Nname = ss
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// FunctionBody = Block .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) fnbody() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("fnbody")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if p.got('{') {
|
2015-11-19 16:11:09 -08:00
|
|
|
p.fnest++
|
2015-11-04 09:21:49 -08:00
|
|
|
body := p.stmt_list()
|
2015-11-19 16:11:09 -08:00
|
|
|
p.fnest--
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want('}')
|
|
|
|
|
if body == nil {
|
2016-03-09 20:29:21 -08:00
|
|
|
body = []*Node{Nod(OEMPTY, nil, nil)}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return body
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Result = Parameters | Type .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) fnres() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("fnres")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-20 17:31:33 -08:00
|
|
|
if p.tok == '(' {
|
2015-11-04 09:21:49 -08:00
|
|
|
result := p.param_list()
|
|
|
|
|
return checkarglist(result, 0)
|
|
|
|
|
}
|
2015-11-20 17:31:33 -08:00
|
|
|
|
|
|
|
|
if result := p.try_ntype(); result != nil {
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{Nod(ODCLFIELD, nil, result)}
|
2015-11-20 17:31:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Declaration = ConstDecl | TypeDecl | VarDecl .
|
|
|
|
|
// TopLevelDecl = Declaration | FunctionDecl | MethodDecl .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) xdcl_list() (l []*Node) {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("xdcl_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for p.tok != EOF {
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LVAR, LCONST, LTYPE:
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.common_dcl()...)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LFUNC:
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.xfndcl())
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
default:
|
2016-03-09 20:29:21 -08:00
|
|
|
if p.tok == '{' && len(l) != 0 && l[len(l)-1].Op == ODCLFUNC && l[len(l)-1].Nbody.Len() == 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
// opening { of function declaration on next line
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("unexpected semicolon or newline before {")
|
2015-11-04 09:21:49 -08:00
|
|
|
} else {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("non-declaration statement outside function body")
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-13 14:04:40 -08:00
|
|
|
p.advance(LVAR, LCONST, LTYPE, LFUNC)
|
2016-03-10 12:29:31 -08:00
|
|
|
continue
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if nsyntaxerrors == 0 {
|
|
|
|
|
testdclstack()
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-26 13:32:28 -08:00
|
|
|
// Reset p.pragma BEFORE advancing to the next token (consuming ';')
|
|
|
|
|
// since comments before may set pragmas for the next function decl.
|
|
|
|
|
p.pragma = 0
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != EOF && !p.got(';') {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("after top level declaration")
|
|
|
|
|
p.advance(LVAR, LCONST, LTYPE, LFUNC)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// FieldDecl = (IdentifierList Type | AnonymousField) [ Tag ] .
|
|
|
|
|
// AnonymousField = [ "*" ] TypeName .
|
|
|
|
|
// Tag = string_lit .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) structdcl() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("structdcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var sym *Sym
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME:
|
|
|
|
|
sym = p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
if sym == nil {
|
|
|
|
|
panic("unreachable") // we must have a sym for LNAME
|
|
|
|
|
}
|
|
|
|
|
if p.tok == '.' || p.tok == LLITERAL || p.tok == ';' || p.tok == '}' {
|
|
|
|
|
// embed oliteral
|
|
|
|
|
field := p.embed(sym)
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
|
|
|
|
field.SetVal(tag)
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{field}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// LNAME belongs to first *Sym of new_name_list
|
|
|
|
|
//
|
|
|
|
|
// during imports, unqualified non-exported identifiers are from builtinpkg
|
|
|
|
|
if importpkg != nil && !exportname(sym.Name) {
|
|
|
|
|
sym = Pkglookup(sym.Name, builtinpkg)
|
|
|
|
|
if sym == nil {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
fallthrough
|
|
|
|
|
|
|
|
|
|
case '@', '?':
|
|
|
|
|
// new_name_list ntype oliteral
|
|
|
|
|
fields := p.new_name_list(sym)
|
|
|
|
|
typ := p.ntype()
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
2016-03-10 15:07:08 -08:00
|
|
|
if len(fields) == 0 || fields[0].Sym.Name == "?" {
|
|
|
|
|
// ? symbol, during import
|
2015-11-04 09:21:49 -08:00
|
|
|
n := typ
|
|
|
|
|
if n.Op == OIND {
|
|
|
|
|
n = n.Left
|
|
|
|
|
}
|
|
|
|
|
n = embedded(n.Sym, importpkg)
|
|
|
|
|
n.Right = typ
|
|
|
|
|
n.SetVal(tag)
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{n}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
for i, n := range fields {
|
|
|
|
|
fields[i] = Nod(ODCLFIELD, n, typ)
|
|
|
|
|
fields[i].SetVal(tag)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return fields
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
if p.got('*') {
|
|
|
|
|
// '(' '*' embed ')' oliteral
|
|
|
|
|
field := p.embed(nil)
|
|
|
|
|
p.want(')')
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
|
|
|
|
field.Right = Nod(OIND, field.Right, nil)
|
|
|
|
|
field.SetVal(tag)
|
|
|
|
|
Yyerror("cannot parenthesize embedded type")
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{field}
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
// '(' embed ')' oliteral
|
|
|
|
|
field := p.embed(nil)
|
|
|
|
|
p.want(')')
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
|
|
|
|
field.SetVal(tag)
|
|
|
|
|
Yyerror("cannot parenthesize embedded type")
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{field}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case '*':
|
|
|
|
|
p.next()
|
|
|
|
|
if p.got('(') {
|
|
|
|
|
// '*' '(' embed ')' oliteral
|
|
|
|
|
field := p.embed(nil)
|
|
|
|
|
p.want(')')
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
|
|
|
|
field.Right = Nod(OIND, field.Right, nil)
|
|
|
|
|
field.SetVal(tag)
|
|
|
|
|
Yyerror("cannot parenthesize embedded type")
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{field}
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
// '*' embed oliteral
|
|
|
|
|
field := p.embed(nil)
|
|
|
|
|
tag := p.oliteral()
|
|
|
|
|
|
|
|
|
|
field.Right = Nod(OIND, field.Right, nil)
|
|
|
|
|
field.SetVal(tag)
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{field}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting field name or embedded type")
|
|
|
|
|
p.advance(';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) oliteral() (v Val) {
|
|
|
|
|
if p.tok == LLITERAL {
|
|
|
|
|
v = p.val
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) packname(name *Sym) *Sym {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("embed")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if name != nil {
|
|
|
|
|
// LNAME was already consumed and is coming in as name
|
|
|
|
|
} else if p.tok == LNAME {
|
|
|
|
|
name = p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting name")
|
|
|
|
|
p.advance('.', ';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
name = new(Sym)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if p.got('.') {
|
|
|
|
|
// LNAME '.' sym
|
|
|
|
|
s := p.sym()
|
|
|
|
|
|
|
|
|
|
var pkg *Pkg
|
|
|
|
|
if name.Def == nil || name.Def.Op != OPACK {
|
|
|
|
|
Yyerror("%v is not a package", name)
|
|
|
|
|
pkg = localpkg
|
|
|
|
|
} else {
|
|
|
|
|
name.Def.Used = true
|
|
|
|
|
pkg = name.Def.Name.Pkg
|
|
|
|
|
}
|
|
|
|
|
return restrictlookup(s.Name, pkg)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// LNAME
|
|
|
|
|
if n := oldname(name); n.Name != nil && n.Name.Pack != nil {
|
|
|
|
|
n.Name.Pack.Used = true
|
|
|
|
|
}
|
|
|
|
|
return name
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) embed(sym *Sym) *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("embed")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pkgname := p.packname(sym)
|
|
|
|
|
return embedded(pkgname, localpkg)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// MethodSpec = MethodName Signature | InterfaceTypeName .
|
|
|
|
|
// MethodName = identifier .
|
|
|
|
|
// InterfaceTypeName = TypeName .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) interfacedcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("interfacedcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME:
|
|
|
|
|
sym := p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
// accept potential name list but complain
|
|
|
|
|
hasNameList := false
|
|
|
|
|
for p.got(',') {
|
|
|
|
|
p.sym()
|
|
|
|
|
hasNameList = true
|
|
|
|
|
}
|
|
|
|
|
if hasNameList {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("name list not allowed in interface type")
|
|
|
|
|
// already progressed, no need to advance
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if p.tok != '(' {
|
|
|
|
|
// packname
|
|
|
|
|
pname := p.packname(sym)
|
|
|
|
|
return Nod(ODCLFIELD, nil, oldname(pname))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// newname indcl
|
|
|
|
|
mname := newname(sym)
|
|
|
|
|
sig := p.indcl()
|
|
|
|
|
|
|
|
|
|
meth := Nod(ODCLFIELD, mname, sig)
|
|
|
|
|
ifacedcl(meth)
|
2016-01-30 14:29:02 -08:00
|
|
|
return meth
|
|
|
|
|
|
|
|
|
|
case '@', '?':
|
|
|
|
|
// newname indcl
|
|
|
|
|
// We arrive here when parsing an interface type declared inside
|
|
|
|
|
// an exported and inlineable function and the interface declares
|
|
|
|
|
// unexported methods (which are then package-qualified).
|
|
|
|
|
//
|
|
|
|
|
// Since the compiler always flattens embedded interfaces, we
|
|
|
|
|
// will never see an embedded package-qualified interface in export
|
|
|
|
|
// data; i.e., when we reach here we know it must be a method.
|
|
|
|
|
//
|
|
|
|
|
// See also issue 14164.
|
|
|
|
|
mname := newname(p.sym())
|
|
|
|
|
sig := p.indcl()
|
|
|
|
|
|
|
|
|
|
meth := Nod(ODCLFIELD, mname, sig)
|
|
|
|
|
ifacedcl(meth)
|
2015-11-04 09:21:49 -08:00
|
|
|
return meth
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
pname := p.packname(nil)
|
|
|
|
|
p.want(')')
|
|
|
|
|
n := Nod(ODCLFIELD, nil, oldname(pname))
|
|
|
|
|
Yyerror("cannot parenthesize embedded type")
|
|
|
|
|
return n
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("")
|
|
|
|
|
p.advance(';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// MethodSpec = MethodName Signature .
|
|
|
|
|
// MethodName = identifier .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) indcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("indcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
params := p.param_list()
|
|
|
|
|
result := p.fnres()
|
|
|
|
|
|
|
|
|
|
// without func keyword
|
|
|
|
|
params = checkarglist(params, 1)
|
|
|
|
|
t := Nod(OTFUNC, fakethis(), nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
t.List.Set(params)
|
|
|
|
|
t.Rlist.Set(result)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
return t
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ParameterDecl = [ IdentifierList ] [ "..." ] Type .
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) arg_type() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("arg_type")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME, '@', '?':
|
2015-11-13 15:35:26 -08:00
|
|
|
name := p.sym()
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
|
|
|
|
case LCOMM, LFUNC, '[', LCHAN, LMAP, LSTRUCT, LINTERFACE, '*', LNAME, '@', '?', '(':
|
|
|
|
|
// sym name_or_type
|
2015-11-13 15:35:26 -08:00
|
|
|
typ := p.ntype()
|
|
|
|
|
nn := Nod(ONONAME, nil, nil)
|
|
|
|
|
nn.Sym = name
|
|
|
|
|
return Nod(OKEY, nn, typ)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LDDD:
|
|
|
|
|
// sym dotdotdot
|
2015-11-13 15:35:26 -08:00
|
|
|
typ := p.dotdotdot()
|
|
|
|
|
nn := Nod(ONONAME, nil, nil)
|
|
|
|
|
nn.Sym = name
|
|
|
|
|
return Nod(OKEY, nn, typ)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
// name_or_type
|
2015-11-13 15:35:26 -08:00
|
|
|
name := mkname(name)
|
2015-11-04 09:21:49 -08:00
|
|
|
// from dotname
|
|
|
|
|
if p.got('.') {
|
2015-11-13 16:00:46 -08:00
|
|
|
return p.new_dotname(name)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-13 15:35:26 -08:00
|
|
|
return name
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LDDD:
|
|
|
|
|
// dotdotdot
|
|
|
|
|
return p.dotdotdot()
|
|
|
|
|
|
|
|
|
|
case LCOMM, LFUNC, '[', LCHAN, LMAP, LSTRUCT, LINTERFACE, '*', '(':
|
|
|
|
|
// name_or_type
|
|
|
|
|
return p.ntype()
|
|
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("expecting )")
|
|
|
|
|
p.advance(',', ')')
|
2015-11-04 09:21:49 -08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Parameters = "(" [ ParameterList [ "," ] ] ")" .
|
|
|
|
|
// ParameterList = ParameterDecl { "," ParameterDecl } .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) param_list() (l []*Node) {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("param_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want('(')
|
2015-11-19 16:11:09 -08:00
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.tok != EOF && p.tok != ')' {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.arg_type())
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.ocomma(')') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
p.want(')')
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var missing_stmt = Nod(OXXX, nil, nil)
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Statement =
|
|
|
|
|
// Declaration | LabeledStmt | SimpleStmt |
|
|
|
|
|
// GoStmt | ReturnStmt | BreakStmt | ContinueStmt | GotoStmt |
|
|
|
|
|
// FallthroughStmt | Block | IfStmt | SwitchStmt | SelectStmt | ForStmt |
|
|
|
|
|
// DeferStmt .
|
|
|
|
|
//
|
|
|
|
|
// stmt may return missing_stmt.
|
2015-11-04 09:21:49 -08:00
|
|
|
func (p *parser) stmt() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("stmt")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '{':
|
2016-03-10 14:18:37 -08:00
|
|
|
return p.compound_stmt()
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LVAR, LCONST, LTYPE:
|
2016-03-09 20:29:21 -08:00
|
|
|
return liststmt(p.common_dcl())
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LNAME, '@', '?', LLITERAL, LFUNC, '(', // operands
|
|
|
|
|
'[', LSTRUCT, LMAP, LCHAN, LINTERFACE, // composite types
|
|
|
|
|
'+', '-', '*', '&', '^', '~', LCOMM, '!': // unary operators
|
|
|
|
|
return p.simple_stmt(true, false)
|
|
|
|
|
|
|
|
|
|
case LFOR:
|
|
|
|
|
return p.for_stmt()
|
|
|
|
|
|
|
|
|
|
case LSWITCH:
|
|
|
|
|
return p.switch_stmt()
|
|
|
|
|
|
|
|
|
|
case LSELECT:
|
|
|
|
|
return p.select_stmt()
|
|
|
|
|
|
|
|
|
|
case LIF:
|
|
|
|
|
return p.if_stmt()
|
|
|
|
|
|
|
|
|
|
case LFALL:
|
|
|
|
|
p.next()
|
|
|
|
|
// will be converted to OFALL
|
2015-11-13 14:31:31 -08:00
|
|
|
stmt := Nod(OXFALL, nil, nil)
|
|
|
|
|
stmt.Xoffset = int64(block)
|
|
|
|
|
return stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LBREAK:
|
|
|
|
|
p.next()
|
2015-11-13 14:31:31 -08:00
|
|
|
return Nod(OBREAK, p.onew_name(), nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LCONTINUE:
|
|
|
|
|
p.next()
|
2015-11-13 14:31:31 -08:00
|
|
|
return Nod(OCONTINUE, p.onew_name(), nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LGO:
|
|
|
|
|
p.next()
|
2015-11-13 14:31:31 -08:00
|
|
|
return Nod(OPROC, p.pseudocall(), nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LDEFER:
|
|
|
|
|
p.next()
|
2015-11-13 14:31:31 -08:00
|
|
|
return Nod(ODEFER, p.pseudocall(), nil)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LGOTO:
|
|
|
|
|
p.next()
|
2015-11-13 14:31:31 -08:00
|
|
|
stmt := Nod(OGOTO, p.new_name(p.sym()), nil)
|
|
|
|
|
stmt.Sym = dclstack // context, for goto restrictions
|
|
|
|
|
return stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LRETURN:
|
|
|
|
|
p.next()
|
2016-03-09 20:29:21 -08:00
|
|
|
var results []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != ';' && p.tok != '}' {
|
2015-11-13 14:31:31 -08:00
|
|
|
results = p.expr_list()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-13 14:31:31 -08:00
|
|
|
stmt := Nod(ORETURN, nil, nil)
|
2016-03-09 20:29:21 -08:00
|
|
|
stmt.List.Set(results)
|
2016-03-08 15:10:26 -08:00
|
|
|
if stmt.List.Len() == 0 && Curfn != nil {
|
2016-02-25 10:35:19 -08:00
|
|
|
for _, ln := range Curfn.Func.Dcl {
|
|
|
|
|
if ln.Class == PPARAM {
|
2015-11-04 09:21:49 -08:00
|
|
|
continue
|
|
|
|
|
}
|
2016-02-25 10:35:19 -08:00
|
|
|
if ln.Class != PPARAMOUT {
|
2015-11-04 09:21:49 -08:00
|
|
|
break
|
|
|
|
|
}
|
2016-02-25 10:35:19 -08:00
|
|
|
if ln.Sym.Def != ln {
|
|
|
|
|
Yyerror("%s is shadowed during return", ln.Sym.Name)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-13 14:31:31 -08:00
|
|
|
return stmt
|
|
|
|
|
|
|
|
|
|
case ';':
|
|
|
|
|
return nil
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
default:
|
2015-11-13 14:31:31 -08:00
|
|
|
return missing_stmt
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// StatementList = { Statement ";" } .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) stmt_list() (l []*Node) {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("stmt_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for p.tok != EOF && p.tok != '}' && p.tok != LCASE && p.tok != LDEFAULT {
|
|
|
|
|
s := p.stmt()
|
|
|
|
|
if s == missing_stmt {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-03-09 20:29:21 -08:00
|
|
|
if s == nil {
|
|
|
|
|
} else if s.Op == OBLOCK && s.Ninit.Len() == 0 {
|
|
|
|
|
l = append(l, s.List.Slice()...)
|
2016-03-05 16:25:58 -08:00
|
|
|
} else {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, s)
|
2016-03-05 16:25:58 -08:00
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
// customized version of osemi:
|
|
|
|
|
// ';' is optional before a closing ')' or '}'
|
|
|
|
|
if p.tok == ')' || p.tok == '}' {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if !p.got(';') {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("at end of statement")
|
|
|
|
|
p.advance(';', '}')
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// IdentifierList = identifier { "," identifier } .
|
|
|
|
|
//
|
|
|
|
|
// If first != nil we have the first symbol already.
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) new_name_list(first *Sym) []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("new_name_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if first == nil {
|
|
|
|
|
first = p.sym() // may still be nil
|
|
|
|
|
}
|
2016-03-09 20:29:21 -08:00
|
|
|
var l []*Node
|
|
|
|
|
n := p.new_name(first)
|
|
|
|
|
if n != nil {
|
|
|
|
|
l = append(l, n)
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(',') {
|
2016-03-09 20:29:21 -08:00
|
|
|
n = p.new_name(p.sym())
|
|
|
|
|
if n != nil {
|
|
|
|
|
l = append(l, n)
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return l
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// IdentifierList = identifier { "," identifier } .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) dcl_name_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("dcl_name_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
s := []*Node{p.dcl_name()}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(',') {
|
2016-03-09 20:29:21 -08:00
|
|
|
s = append(s, p.dcl_name())
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2016-03-09 20:29:21 -08:00
|
|
|
return s
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ExpressionList = Expression { "," Expression } .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) expr_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("expr_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
l := []*Node{p.expr()}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(',') {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.expr())
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return l
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// Arguments = "(" [ ( ExpressionList | Type [ "," ExpressionList ] ) [ "..." ] [ "," ] ] ")" .
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) arg_list() (l []*Node, ddd bool) {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("arg_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
p.want('(')
|
|
|
|
|
p.xnest++
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.tok != EOF && p.tok != ')' && !ddd {
|
2016-03-09 20:29:21 -08:00
|
|
|
l = append(l, p.expr()) // expr_or_type
|
2015-11-04 09:21:49 -08:00
|
|
|
ddd = p.got(LDDD)
|
2015-11-19 16:11:09 -08:00
|
|
|
if !p.ocomma(')') {
|
|
|
|
|
break
|
|
|
|
|
}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-19 16:11:09 -08:00
|
|
|
p.xnest--
|
|
|
|
|
p.want(')')
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// osemi parses an optional semicolon.
|
2015-11-19 16:11:09 -08:00
|
|
|
func (p *parser) osemi(follow int32) bool {
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case ';':
|
|
|
|
|
p.next()
|
|
|
|
|
return true
|
|
|
|
|
|
|
|
|
|
case ')', '}':
|
|
|
|
|
// semicolon is optional before ) or }
|
|
|
|
|
return true
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
|
|
|
|
|
p.syntax_error("expecting semicolon, newline, or " + tokstring(follow))
|
|
|
|
|
p.advance(follow)
|
|
|
|
|
return false
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// ocomma parses an optional comma.
|
2015-11-19 16:11:09 -08:00
|
|
|
func (p *parser) ocomma(follow int32) bool {
|
2015-11-04 09:21:49 -08:00
|
|
|
switch p.tok {
|
2015-11-19 16:11:09 -08:00
|
|
|
case ',':
|
|
|
|
|
p.next()
|
|
|
|
|
return true
|
|
|
|
|
|
2015-11-04 09:21:49 -08:00
|
|
|
case ')', '}':
|
2015-11-19 16:11:09 -08:00
|
|
|
// comma is optional before ) or }
|
|
|
|
|
return true
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
2015-11-19 16:11:09 -08:00
|
|
|
|
|
|
|
|
p.syntax_error("expecting comma or " + tokstring(follow))
|
|
|
|
|
p.advance(follow)
|
|
|
|
|
return false
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
|
// Importing packages
|
|
|
|
|
|
|
|
|
|
func (p *parser) import_error() {
|
2015-11-13 14:04:40 -08:00
|
|
|
p.syntax_error("in export data of imported package")
|
|
|
|
|
p.next()
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2015-11-24 14:49:10 -08:00
|
|
|
// The methods below reflect a 1:1 translation of the original (and now defunct)
|
|
|
|
|
// go.y yacc productions. They could be simplified significantly and also use better
|
2015-11-04 09:21:49 -08:00
|
|
|
// variable names. However, we will be able to delete them once we enable the
|
2015-11-24 14:49:10 -08:00
|
|
|
// new export format by default, so it's not worth the effort (issue 13241).
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
func (p *parser) hidden_importsym() *Sym {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_importsym")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want('@')
|
|
|
|
|
var s2 Val
|
|
|
|
|
if p.tok == LLITERAL {
|
|
|
|
|
s2 = p.val
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
p.want('.')
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LNAME:
|
|
|
|
|
s4 := p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
var p *Pkg
|
|
|
|
|
|
|
|
|
|
if s2.U.(string) == "" {
|
|
|
|
|
p = importpkg
|
|
|
|
|
} else {
|
|
|
|
|
if isbadimport(s2.U.(string)) {
|
|
|
|
|
errorexit()
|
|
|
|
|
}
|
|
|
|
|
p = mkpkg(s2.U.(string))
|
|
|
|
|
}
|
|
|
|
|
return Pkglookup(s4.Name, p)
|
|
|
|
|
|
|
|
|
|
case '?':
|
|
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
var p *Pkg
|
|
|
|
|
|
|
|
|
|
if s2.U.(string) == "" {
|
|
|
|
|
p = importpkg
|
|
|
|
|
} else {
|
|
|
|
|
if isbadimport(s2.U.(string)) {
|
|
|
|
|
errorexit()
|
|
|
|
|
}
|
|
|
|
|
p = mkpkg(s2.U.(string))
|
|
|
|
|
}
|
|
|
|
|
return Pkglookup("?", p)
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
p.import_error()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) ohidden_funarg_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("ohidden_funarg_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
var ss []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != ')' {
|
|
|
|
|
ss = p.hidden_funarg_list()
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) ohidden_structdcl_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("ohidden_structdcl_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
var ss []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != '}' {
|
|
|
|
|
ss = p.hidden_structdcl_list()
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) ohidden_interfacedcl_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("ohidden_interfacedcl_list")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
var ss []*Node
|
2015-11-04 09:21:49 -08:00
|
|
|
if p.tok != '}' {
|
|
|
|
|
ss = p.hidden_interfacedcl_list()
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// import syntax from package header
|
|
|
|
|
func (p *parser) hidden_import() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_import")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LIMPORT:
|
|
|
|
|
// LIMPORT LNAME LLITERAL ';'
|
|
|
|
|
p.next()
|
|
|
|
|
var s2 *Sym
|
|
|
|
|
if p.tok == LNAME {
|
|
|
|
|
s2 = p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
var s3 Val
|
|
|
|
|
if p.tok == LLITERAL {
|
|
|
|
|
s3 = p.val
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
importimport(s2, s3.U.(string))
|
|
|
|
|
|
|
|
|
|
case LVAR:
|
|
|
|
|
// LVAR hidden_pkg_importsym hidden_type ';'
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_pkg_importsym()
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
importvar(s2, s3)
|
|
|
|
|
|
|
|
|
|
case LCONST:
|
|
|
|
|
// LCONST hidden_pkg_importsym '=' hidden_constant ';'
|
|
|
|
|
// LCONST hidden_pkg_importsym hidden_type '=' hidden_constant ';'
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_pkg_importsym()
|
|
|
|
|
var s3 *Type = Types[TIDEAL]
|
|
|
|
|
if p.tok != '=' {
|
|
|
|
|
s3 = p.hidden_type()
|
|
|
|
|
}
|
|
|
|
|
p.want('=')
|
|
|
|
|
s4 := p.hidden_constant()
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
importconst(s2, s3, s4)
|
|
|
|
|
|
|
|
|
|
case LTYPE:
|
|
|
|
|
// LTYPE hidden_pkgtype hidden_type ';'
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_pkgtype()
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
importtype(s2, s3)
|
|
|
|
|
|
|
|
|
|
case LFUNC:
|
|
|
|
|
// LFUNC hidden_fndcl fnbody ';'
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_fndcl()
|
|
|
|
|
s3 := p.fnbody()
|
|
|
|
|
p.want(';')
|
|
|
|
|
|
|
|
|
|
if s2 == nil {
|
|
|
|
|
dclcontext = PEXTERN // since we skip the funcbody below
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
s2.Func.Inl.Set(s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
funcbody(s2)
|
|
|
|
|
importlist = append(importlist, s2)
|
|
|
|
|
|
|
|
|
|
if Debug['E'] > 0 {
|
|
|
|
|
fmt.Printf("import [%q] func %v \n", importpkg.Path, s2)
|
2016-02-27 14:31:33 -08:00
|
|
|
if Debug['m'] > 2 && len(s2.Func.Inl.Slice()) != 0 {
|
2015-11-04 09:21:49 -08:00
|
|
|
fmt.Printf("inl body:%v\n", s2.Func.Inl)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
p.import_error()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_pkg_importsym() *Sym {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_pkg_importsym")()
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-11 17:12:31 -08:00
|
|
|
s := p.hidden_importsym()
|
|
|
|
|
p.structpkg = s.Pkg
|
|
|
|
|
return s
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_pkgtype() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_pkgtype")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.hidden_pkg_importsym()
|
|
|
|
|
|
|
|
|
|
ss := pkgtype(s1)
|
|
|
|
|
importsym(s1, OTYPE)
|
|
|
|
|
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
|
// Importing types
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_type() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_type")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
return p.hidden_type_misc()
|
|
|
|
|
case LCOMM:
|
|
|
|
|
return p.hidden_type_recv_chan()
|
|
|
|
|
case LFUNC:
|
|
|
|
|
return p.hidden_type_func()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_type_non_recv_chan() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_type_non_recv_chan")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
return p.hidden_type_misc()
|
|
|
|
|
case LFUNC:
|
|
|
|
|
return p.hidden_type_func()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_type_misc() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_type_misc")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '@':
|
|
|
|
|
// hidden_importsym
|
|
|
|
|
s1 := p.hidden_importsym()
|
|
|
|
|
return pkgtype(s1)
|
|
|
|
|
|
|
|
|
|
case LNAME:
|
|
|
|
|
// LNAME
|
|
|
|
|
s1 := p.sym_
|
|
|
|
|
p.next()
|
|
|
|
|
|
|
|
|
|
// predefined name like uint8
|
|
|
|
|
s1 = Pkglookup(s1.Name, builtinpkg)
|
|
|
|
|
if s1.Def == nil || s1.Def.Op != OTYPE {
|
|
|
|
|
Yyerror("%s is not a type", s1.Name)
|
|
|
|
|
return nil
|
|
|
|
|
} else {
|
|
|
|
|
return s1.Def.Type
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case '[':
|
|
|
|
|
// '[' ']' hidden_type
|
|
|
|
|
// '[' LLITERAL ']' hidden_type
|
|
|
|
|
p.next()
|
|
|
|
|
var s2 *Node
|
|
|
|
|
if p.tok == LLITERAL {
|
|
|
|
|
s2 = nodlit(p.val)
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
p.want(']')
|
|
|
|
|
s4 := p.hidden_type()
|
|
|
|
|
|
|
|
|
|
return aindex(s2, s4)
|
|
|
|
|
|
|
|
|
|
case LMAP:
|
|
|
|
|
// LMAP '[' hidden_type ']' hidden_type
|
|
|
|
|
p.next()
|
|
|
|
|
p.want('[')
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
p.want(']')
|
|
|
|
|
s5 := p.hidden_type()
|
|
|
|
|
|
|
|
|
|
return maptype(s3, s5)
|
|
|
|
|
|
|
|
|
|
case LSTRUCT:
|
|
|
|
|
// LSTRUCT '{' ohidden_structdcl_list '}'
|
|
|
|
|
p.next()
|
|
|
|
|
p.want('{')
|
|
|
|
|
s3 := p.ohidden_structdcl_list()
|
|
|
|
|
p.want('}')
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return tostruct(s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case LINTERFACE:
|
|
|
|
|
// LINTERFACE '{' ohidden_interfacedcl_list '}'
|
|
|
|
|
p.next()
|
|
|
|
|
p.want('{')
|
|
|
|
|
s3 := p.ohidden_interfacedcl_list()
|
|
|
|
|
p.want('}')
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return tointerface(s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
|
|
|
|
|
case '*':
|
|
|
|
|
// '*' hidden_type
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_type()
|
|
|
|
|
return Ptrto(s2)
|
|
|
|
|
|
|
|
|
|
case LCHAN:
|
|
|
|
|
p.next()
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
// LCHAN hidden_type_non_recv_chan
|
|
|
|
|
s2 := p.hidden_type_non_recv_chan()
|
|
|
|
|
ss := typ(TCHAN)
|
|
|
|
|
ss.Type = s2
|
|
|
|
|
ss.Chan = Cboth
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
case '(':
|
|
|
|
|
// LCHAN '(' hidden_type_recv_chan ')'
|
|
|
|
|
p.next()
|
|
|
|
|
s3 := p.hidden_type_recv_chan()
|
|
|
|
|
p.want(')')
|
|
|
|
|
ss := typ(TCHAN)
|
|
|
|
|
ss.Type = s3
|
|
|
|
|
ss.Chan = Cboth
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
case LCOMM:
|
|
|
|
|
// LCHAN hidden_type
|
|
|
|
|
p.next()
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
ss := typ(TCHAN)
|
|
|
|
|
ss.Type = s3
|
|
|
|
|
ss.Chan = Csend
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
p.import_error()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_type_recv_chan() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_type_recv_chan")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LCOMM)
|
|
|
|
|
p.want(LCHAN)
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
|
|
|
|
|
ss := typ(TCHAN)
|
|
|
|
|
ss.Type = s3
|
|
|
|
|
ss.Chan = Crecv
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_type_func() *Type {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_type_func")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.want(LFUNC)
|
|
|
|
|
p.want('(')
|
|
|
|
|
s3 := p.ohidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
s5 := p.ohidden_funres()
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return functype(nil, s3, s5)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_funarg() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_funarg")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.sym()
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
s2 := p.hidden_type()
|
|
|
|
|
s3 := p.oliteral()
|
|
|
|
|
|
|
|
|
|
ss := Nod(ODCLFIELD, nil, typenod(s2))
|
|
|
|
|
if s1 != nil {
|
|
|
|
|
ss.Left = newname(s1)
|
|
|
|
|
}
|
|
|
|
|
ss.SetVal(s3)
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
case LDDD:
|
|
|
|
|
p.next()
|
|
|
|
|
s3 := p.hidden_type()
|
|
|
|
|
s4 := p.oliteral()
|
|
|
|
|
|
|
|
|
|
var t *Type
|
|
|
|
|
|
|
|
|
|
t = typ(TARRAY)
|
|
|
|
|
t.Bound = -1
|
|
|
|
|
t.Type = s3
|
|
|
|
|
|
|
|
|
|
ss := Nod(ODCLFIELD, nil, typenod(t))
|
|
|
|
|
if s1 != nil {
|
|
|
|
|
ss.Left = newname(s1)
|
|
|
|
|
}
|
|
|
|
|
ss.Isddd = true
|
|
|
|
|
ss.SetVal(s4)
|
|
|
|
|
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_structdcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_structdcl")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.sym()
|
|
|
|
|
s2 := p.hidden_type()
|
|
|
|
|
s3 := p.oliteral()
|
|
|
|
|
|
|
|
|
|
var s *Sym
|
|
|
|
|
var pkg *Pkg
|
|
|
|
|
|
|
|
|
|
var ss *Node
|
|
|
|
|
if s1 != nil && s1.Name != "?" {
|
|
|
|
|
ss = Nod(ODCLFIELD, newname(s1), typenod(s2))
|
|
|
|
|
ss.SetVal(s3)
|
|
|
|
|
} else {
|
|
|
|
|
s = s2.Sym
|
|
|
|
|
if s == nil && Isptr[s2.Etype] {
|
|
|
|
|
s = s2.Type.Sym
|
|
|
|
|
}
|
|
|
|
|
pkg = importpkg
|
|
|
|
|
if s1 != nil {
|
|
|
|
|
pkg = s1.Pkg
|
|
|
|
|
}
|
|
|
|
|
ss = embedded(s, pkg)
|
|
|
|
|
ss.Right = typenod(s2)
|
|
|
|
|
ss.SetVal(s3)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_interfacedcl() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_interfacedcl")()
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-24 15:36:35 -08:00
|
|
|
// The original (now defunct) grammar in go.y accepted both a method
|
|
|
|
|
// or an (embedded) type:
|
|
|
|
|
//
|
|
|
|
|
// hidden_interfacedcl:
|
|
|
|
|
// sym '(' ohidden_funarg_list ')' ohidden_funres
|
|
|
|
|
// {
|
|
|
|
|
// $$ = Nod(ODCLFIELD, newname($1), typenod(functype(fakethis(), $3, $5)));
|
|
|
|
|
// }
|
|
|
|
|
// | hidden_type
|
|
|
|
|
// {
|
|
|
|
|
// $$ = Nod(ODCLFIELD, nil, typenod($1));
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// But the current textual export code only exports (inlined) methods,
|
|
|
|
|
// even if the methods came from embedded interfaces. Furthermore, in
|
|
|
|
|
// the original grammar, hidden_type may also start with a sym (LNAME
|
|
|
|
|
// or '@'), complicating matters further. Since we never have embedded
|
|
|
|
|
// types, only parse methods here.
|
2015-11-13 16:28:46 -08:00
|
|
|
|
2015-11-24 15:36:35 -08:00
|
|
|
s1 := p.sym()
|
|
|
|
|
p.want('(')
|
|
|
|
|
s3 := p.ohidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
s5 := p.ohidden_funres()
|
2015-11-04 09:21:49 -08:00
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
return Nod(ODCLFIELD, newname(s1), typenod(functype(fakethis(), s3, s5)))
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) ohidden_funres() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("ohidden_funres")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
return nil
|
|
|
|
|
|
|
|
|
|
case '(', '@', LNAME, '[', LMAP, LSTRUCT, LINTERFACE, '*', LCHAN, LCOMM, LFUNC:
|
|
|
|
|
return p.hidden_funres()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) hidden_funres() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_funres")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.ohidden_funarg_list()
|
|
|
|
|
p.want(')')
|
|
|
|
|
return s2
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
s1 := p.hidden_type()
|
2016-03-09 20:29:21 -08:00
|
|
|
return []*Node{Nod(ODCLFIELD, nil, typenod(s1))}
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
|
// Importing constants
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_literal() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_literal")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
case LLITERAL:
|
|
|
|
|
ss := nodlit(p.val)
|
|
|
|
|
p.next()
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
case '-':
|
|
|
|
|
p.next()
|
|
|
|
|
if p.tok == LLITERAL {
|
|
|
|
|
ss := nodlit(p.val)
|
|
|
|
|
p.next()
|
|
|
|
|
switch ss.Val().Ctype() {
|
|
|
|
|
case CTINT, CTRUNE:
|
|
|
|
|
mpnegfix(ss.Val().U.(*Mpint))
|
|
|
|
|
break
|
|
|
|
|
case CTFLT:
|
|
|
|
|
mpnegflt(ss.Val().U.(*Mpflt))
|
|
|
|
|
break
|
|
|
|
|
case CTCPLX:
|
|
|
|
|
mpnegflt(&ss.Val().U.(*Mpcplx).Real)
|
|
|
|
|
mpnegflt(&ss.Val().U.(*Mpcplx).Imag)
|
|
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
Yyerror("bad negated constant")
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
} else {
|
|
|
|
|
p.import_error()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LNAME, '@', '?':
|
|
|
|
|
s1 := p.sym()
|
|
|
|
|
ss := oldname(Pkglookup(s1.Name, builtinpkg))
|
|
|
|
|
if ss.Op != OLITERAL {
|
|
|
|
|
Yyerror("bad constant %v", ss.Sym)
|
|
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
p.import_error()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_constant() *Node {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_constant")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch p.tok {
|
|
|
|
|
default:
|
|
|
|
|
return p.hidden_literal()
|
|
|
|
|
case '(':
|
|
|
|
|
p.next()
|
|
|
|
|
s2 := p.hidden_literal()
|
|
|
|
|
p.want('+')
|
|
|
|
|
s4 := p.hidden_literal()
|
|
|
|
|
p.want(')')
|
|
|
|
|
|
|
|
|
|
if s2.Val().Ctype() == CTRUNE && s4.Val().Ctype() == CTINT {
|
|
|
|
|
ss := s2
|
|
|
|
|
mpaddfixfix(s2.Val().U.(*Mpint), s4.Val().U.(*Mpint), 0)
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
s4.Val().U.(*Mpcplx).Real = s4.Val().U.(*Mpcplx).Imag
|
|
|
|
|
Mpmovecflt(&s4.Val().U.(*Mpcplx).Imag, 0.0)
|
|
|
|
|
return nodcplxlit(s2.Val(), s4.Val())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *parser) hidden_import_list() {
|
|
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_import_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for p.tok != '$' {
|
|
|
|
|
p.hidden_import()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) hidden_funarg_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_funarg_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.hidden_funarg()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss := []*Node{s1}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(',') {
|
|
|
|
|
s3 := p.hidden_funarg()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss = append(ss, s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) hidden_structdcl_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_structdcl_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.hidden_structdcl()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss := []*Node{s1}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(';') {
|
|
|
|
|
s3 := p.hidden_structdcl()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss = append(ss, s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-09 20:29:21 -08:00
|
|
|
func (p *parser) hidden_interfacedcl_list() []*Node {
|
2015-11-04 09:21:49 -08:00
|
|
|
if trace && Debug['x'] != 0 {
|
|
|
|
|
defer p.trace("hidden_interfacedcl_list")()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s1 := p.hidden_interfacedcl()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss := []*Node{s1}
|
2015-11-04 09:21:49 -08:00
|
|
|
for p.got(';') {
|
|
|
|
|
s3 := p.hidden_interfacedcl()
|
2016-03-09 20:29:21 -08:00
|
|
|
ss = append(ss, s3)
|
2015-11-04 09:21:49 -08:00
|
|
|
}
|
|
|
|
|
return ss
|
|
|
|
|
}
|