1
0
mirror of https://github.com/golang/go synced 2024-11-23 18:50:05 -07:00

cmd/compile: remove support for textual export format

Fixes #15323.

Change-Id: I50e996e6fde6b24327cb45dd84da31deef4dcc56
Reviewed-on: https://go-review.googlesource.com/27171
Run-TryBot: Robert Griesemer <gri@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
This commit is contained in:
Robert Griesemer 2016-08-16 12:55:17 -07:00
parent e492d9f018
commit b4e9f70412
10 changed files with 86 additions and 2125 deletions

View File

@ -174,8 +174,8 @@ const exportInlined = true // default: true
// errors.
// If disabled, only named types are tracked, possibly leading to slightly
// less efficient encoding in rare cases. It also prevents the export of
// some corner-case type declarations (but those are not handled correctly
// with with the textual export format either).
// some corner-case type declarations (but those were not handled correctly
// with the former textual export format either).
// TODO(gri) enable and remove once issues caused by it are fixed
const trackAllTypes = false

View File

@ -80,7 +80,6 @@ func Import(in *bufio.Reader) {
p.pkg()
// defer some type-checking until all types are read in completely
// (parser.go:import_package)
tcok := typecheckok
typecheckok = true
defercheckwidth()

View File

@ -9,14 +9,12 @@ import (
"bytes"
"cmd/internal/bio"
"fmt"
"sort"
"unicode"
"unicode/utf8"
)
var (
newexport bool // if set, use new export format
Debug_export int // if set, print debugging information about export data
Debug_export int // if set, print debugging information about export data
exportsize int
)
@ -94,18 +92,6 @@ func autoexport(n *Node, ctxt Class) {
}
}
func dumppkg(p *Pkg) {
if p == nil || p == localpkg || p.Exported || p == builtinpkg {
return
}
p.Exported = true
suffix := ""
if !p.Direct {
suffix = " // indirect"
}
exportf("\timport %s %q%s\n", p.Name, p.Path, suffix)
}
// Look for anything we need for the inline body
func reexportdeplist(ll Nodes) {
for _, n := range ll.Slice() {
@ -224,53 +210,6 @@ func reexportdep(n *Node) {
reexportdeplist(n.Nbody)
}
func dumpexportconst(s *Sym) {
n := typecheck(s.Def, Erv)
if n == nil || n.Op != OLITERAL {
Fatalf("dumpexportconst: oconst nil: %v", s)
}
t := n.Type // may or may not be specified
dumpexporttype(t)
if t != nil && !t.IsUntyped() {
exportf("\tconst %v %v = %v\n", sconv(s, FmtSharp), Tconv(t, FmtSharp), vconv(n.Val(), FmtSharp))
} else {
exportf("\tconst %v = %v\n", sconv(s, FmtSharp), vconv(n.Val(), FmtSharp))
}
}
func dumpexportvar(s *Sym) {
n := s.Def
n = typecheck(n, Erv|Ecall)
if n == nil || n.Type == nil {
Yyerror("variable exported but not defined: %v", s)
return
}
t := n.Type
dumpexporttype(t)
if t.Etype == TFUNC && n.Class == PFUNC {
if n.Func != nil && n.Func.Inl.Len() != 0 {
// when lazily typechecking inlined bodies, some re-exported ones may not have been typechecked yet.
// currently that can leave unresolved ONONAMEs in import-dot-ed packages in the wrong package
if Debug['l'] < 2 {
typecheckinl(n)
}
// NOTE: The space after %#S here is necessary for ld's export data parser.
exportf("\tfunc %v %v { %v }\n", sconv(s, FmtSharp), Tconv(t, FmtShort|FmtSharp), hconv(n.Func.Inl, FmtSharp|FmtBody))
reexportdeplist(n.Func.Inl)
} else {
exportf("\tfunc %v %v\n", sconv(s, FmtSharp), Tconv(t, FmtShort|FmtSharp))
}
} else {
exportf("\tvar %v %v\n", sconv(s, FmtSharp), Tconv(t, FmtSharp))
}
}
// methodbyname sorts types by symbol name.
type methodbyname []*Field
@ -278,167 +217,44 @@ func (x methodbyname) Len() int { return len(x) }
func (x methodbyname) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x methodbyname) Less(i, j int) bool { return x[i].Sym.Name < x[j].Sym.Name }
func dumpexporttype(t *Type) {
if t == nil {
return
}
if t.Printed || t == Types[t.Etype] || t == bytetype || t == runetype || t == errortype {
return
}
t.Printed = true
if t.Sym != nil {
dumppkg(t.Sym.Pkg)
}
switch t.Etype {
case TSTRUCT, TINTER:
for _, f := range t.Fields().Slice() {
dumpexporttype(f.Type)
}
case TFUNC:
dumpexporttype(t.Recvs())
dumpexporttype(t.Results())
dumpexporttype(t.Params())
case TMAP:
dumpexporttype(t.Val())
dumpexporttype(t.Key())
case TARRAY, TCHAN, TPTR32, TPTR64, TSLICE:
dumpexporttype(t.Elem())
}
if t.Sym == nil {
return
}
var m []*Field
for _, f := range t.Methods().Slice() {
dumpexporttype(f.Type)
m = append(m, f)
}
sort.Sort(methodbyname(m))
exportf("\ttype %v %v\n", sconv(t.Sym, FmtSharp), Tconv(t, FmtSharp|FmtLong))
for _, f := range m {
if f.Nointerface {
exportf("\t//go:nointerface\n")
}
if f.Type.Nname() != nil && f.Type.Nname().Func.Inl.Len() != 0 { // nname was set by caninl
// when lazily typechecking inlined bodies, some re-exported ones may not have been typechecked yet.
// currently that can leave unresolved ONONAMEs in import-dot-ed packages in the wrong package
if Debug['l'] < 2 {
typecheckinl(f.Type.Nname())
}
exportf("\tfunc %v %v %v { %v }\n", Tconv(f.Type.Recvs(), FmtSharp), sconv(f.Sym, FmtShort|FmtByte|FmtSharp), Tconv(f.Type, FmtShort|FmtSharp), hconv(f.Type.Nname().Func.Inl, FmtSharp|FmtBody))
reexportdeplist(f.Type.Nname().Func.Inl)
} else {
exportf("\tfunc %v %v %v\n", Tconv(f.Type.Recvs(), FmtSharp), sconv(f.Sym, FmtShort|FmtByte|FmtSharp), Tconv(f.Type, FmtShort|FmtSharp))
}
}
}
func dumpsym(s *Sym) {
if s.Flags&SymExported != 0 {
return
}
s.Flags |= SymExported
if s.Def == nil {
Yyerror("unknown export symbol: %v", s)
return
}
// print("dumpsym %O %+S\n", s->def->op, s);
dumppkg(s.Pkg)
switch s.Def.Op {
default:
Yyerror("unexpected export symbol: %v %v", s.Def.Op, s)
case OLITERAL:
dumpexportconst(s)
case OTYPE:
if s.Def.Type.Etype == TFORW {
Yyerror("export of incomplete type %v", s)
} else {
dumpexporttype(s.Def.Type)
}
case ONAME:
dumpexportvar(s)
}
}
func dumpexport() {
if buildid != "" {
exportf("build id %q\n", buildid)
}
size := 0 // size of export section without enclosing markers
if newexport {
// binary export
// The linker also looks for the $$ marker - use char after $$ to distinguish format.
exportf("\n$$B\n") // indicate binary format
if debugFormat {
// save a copy of the export data
var copy bytes.Buffer
bcopy := bufio.NewWriter(&copy)
size = export(bcopy, Debug_export != 0)
bcopy.Flush() // flushing to bytes.Buffer cannot fail
if n, err := bout.Write(copy.Bytes()); n != size || err != nil {
Fatalf("error writing export data: got %d bytes, want %d bytes, err = %v", n, size, err)
}
// export data must contain no '$' so that we can find the end by searching for "$$"
if bytes.IndexByte(copy.Bytes(), '$') >= 0 {
Fatalf("export data contains $")
}
// verify that we can read the copied export data back in
// (use empty package map to avoid collisions)
savedPkgMap := pkgMap
savedPkgs := pkgs
pkgMap = make(map[string]*Pkg)
pkgs = nil
importpkg = mkpkg("")
Import(bufio.NewReader(&copy)) // must not die
importpkg = nil
pkgs = savedPkgs
pkgMap = savedPkgMap
} else {
size = export(bout.Writer, Debug_export != 0)
// The linker also looks for the $$ marker - use char after $$ to distinguish format.
exportf("\n$$B\n") // indicate binary export format
if debugFormat {
// save a copy of the export data
var copy bytes.Buffer
bcopy := bufio.NewWriter(&copy)
size = export(bcopy, Debug_export != 0)
bcopy.Flush() // flushing to bytes.Buffer cannot fail
if n, err := bout.Write(copy.Bytes()); n != size || err != nil {
Fatalf("error writing export data: got %d bytes, want %d bytes, err = %v", n, size, err)
}
exportf("\n$$\n")
// export data must contain no '$' so that we can find the end by searching for "$$"
// TODO(gri) is this still needed?
if bytes.IndexByte(copy.Bytes(), '$') >= 0 {
Fatalf("export data contains $")
}
// verify that we can read the copied export data back in
// (use empty package map to avoid collisions)
savedPkgMap := pkgMap
savedPkgs := pkgs
pkgMap = make(map[string]*Pkg)
pkgs = nil
importpkg = mkpkg("")
Import(bufio.NewReader(&copy)) // must not die
importpkg = nil
pkgs = savedPkgs
pkgMap = savedPkgMap
} else {
// textual export
lno := lineno
exportf("\n$$\n") // indicate textual format
exportsize = 0
exportf("package %s", localpkg.Name)
if safemode {
exportf(" safe")
}
exportf("\n")
for _, p := range pkgs {
if p.Direct {
dumppkg(p)
}
}
// exportlist grows during iteration - cannot use range
for i := 0; i < len(exportlist); i++ {
n := exportlist[i]
lineno = n.Lineno
dumpsym(n.Sym)
}
size = exportsize
exportf("\n$$\n")
lineno = lno
size = export(bout.Writer, Debug_export != 0)
}
exportf("\n$$\n")
if Debug_export != 0 {
fmt.Printf("export data size = %d bytes\n", size)

View File

@ -73,7 +73,7 @@ const (
const (
FErr = iota
FDbg
FExp
_ // formerly FExp - leave gap for now just in case there's some hard-wired dependency on the const value
FTypeId
)
@ -113,7 +113,8 @@ func setfmode(flags *FmtFlag) (fm int, fb bool) {
if *flags&FmtSign != 0 {
fmtmode = FDbg
} else if *flags&FmtSharp != 0 {
fmtmode = FExp
// for textual export format - no longer supported
Fatalf("textual export format request")
} else if *flags&FmtLeft != 0 {
fmtmode = FTypeId
}
@ -340,7 +341,7 @@ func vconv(v Val, flag FmtFlag) string {
switch u := v.U.(type) {
case *Mpint:
if !u.Rune {
if (flag&FmtSharp != 0) || fmtmode == FExp {
if flag&FmtSharp != 0 {
return bconv(u, FmtSharp)
}
return bconv(u, 0)
@ -359,13 +360,13 @@ func vconv(v Val, flag FmtFlag) string {
return fmt.Sprintf("('\\x00' + %v)", u)
case *Mpflt:
if (flag&FmtSharp != 0) || fmtmode == FExp {
if flag&FmtSharp != 0 {
return fconv(u, 0)
}
return fconv(u, FmtSharp)
case *Mpcplx:
if (flag&FmtSharp != 0) || fmtmode == FExp {
if flag&FmtSharp != 0 {
return fmt.Sprintf("(%v+%vi)", &u.Real, &u.Imag)
}
if v.U.(*Mpcplx).Real.CmpFloat64(0) == 0 {
@ -474,14 +475,6 @@ func symfmt(s *Sym, flag FmtFlag) string {
return s.Pkg.Name + "." + s.Name // dcommontype, typehash
}
return s.Pkg.Prefix + "." + s.Name // (methodsym), typesym, weaksym
case FExp:
if s.Name != "" && s.Name[0] == '.' {
Fatalf("exporting synthetic symbol %s", s.Name)
}
if s.Pkg != builtinpkg {
return fmt.Sprintf("@%q.%s", s.Pkg.Path, s.Name)
}
}
}
@ -493,8 +486,7 @@ func symfmt(s *Sym, flag FmtFlag) string {
p = s.Name[i+1:]
}
// exportname needs to see the name without the prefix too.
if (fmtmode == FExp && !exportname(p)) || fmtmode == FDbg {
if fmtmode == FDbg {
return fmt.Sprintf("@%q.%s", s.Pkg.Path, p)
}
@ -559,9 +551,7 @@ func typefmt(t *Type, flag FmtFlag) string {
if flag&FmtUnsigned != 0 {
return sconv(t.Sym, FmtUnsigned)
}
fallthrough
case FExp:
if t.Sym.Pkg == localpkg && t.Vargen != 0 {
return fmt.Sprintf("%v·%d", t.Sym, t.Vargen)
}
@ -660,15 +650,11 @@ func typefmt(t *Type, flag FmtFlag) string {
switch t.Results().NumFields() {
case 0:
break
// nothing to do
case 1:
if fmtmode != FExp {
buf.WriteString(" ")
buf.WriteString(Tconv(t.Results().Field(0).Type, 0)) // struct->field->field's type
break
}
fallthrough
buf.WriteString(" ")
buf.WriteString(Tconv(t.Results().Field(0).Type, 0)) // struct->field->field's type
default:
buf.WriteString(" ")
@ -733,25 +719,15 @@ func typefmt(t *Type, flag FmtFlag) string {
return "undefined"
case TUNSAFEPTR:
if fmtmode == FExp {
return "@\"unsafe\".Pointer"
}
return "unsafe.Pointer"
case TDDDFIELD:
if fmtmode == FExp {
Fatalf("cannot use TDDDFIELD with old exporter")
}
return fmt.Sprintf("%v <%v> %v", t.Etype, t.Sym, t.DDDField())
case Txxx:
return "Txxx"
}
if fmtmode == FExp {
Fatalf("missing %v case during export", t.Etype)
}
// Don't know how to handle - fall back to detailed prints.
return fmt.Sprintf("%v <%v> %v", t.Etype, t.Sym, t.Elem())
}
@ -793,14 +769,6 @@ func stmtfmt(n *Node) string {
switch n.Op {
case ODCL:
if fmtmode == FExp {
switch n.Left.Class {
case PPARAM, PPARAMOUT, PAUTO, PAUTOHEAP:
f += fmt.Sprintf("var %v %v", n.Left, n.Left.Type)
goto ret
}
}
f += fmt.Sprintf("var %v %v", n.Left.Sym, n.Left.Type)
case ODCLFIELD:
@ -814,10 +782,6 @@ func stmtfmt(n *Node) string {
// preceded by the DCL which will be re-parsed and typechecked to reproduce
// the "v = <N>" again.
case OAS, OASWB:
if fmtmode == FExp && n.Right == nil {
break
}
if n.Colas && !complexinit {
f += fmt.Sprintf("%v := %v", n.Left, n.Right)
} else {
@ -947,7 +911,6 @@ func stmtfmt(n *Node) string {
f += fmt.Sprintf("%v: ", n.Left)
}
ret:
if extrablock {
f += "}"
}
@ -1124,23 +1087,9 @@ func exprfmt(n *Node, prec int) string {
// Special case: name used as local variable in export.
// _ becomes ~b%d internally; print as _ for export
case ONAME:
if (fmtmode == FExp || fmtmode == FErr) && n.Sym != nil && n.Sym.Name[0] == '~' && n.Sym.Name[1] == 'b' {
if fmtmode == FErr && n.Sym != nil && n.Sym.Name[0] == '~' && n.Sym.Name[1] == 'b' {
return "_"
}
if fmtmode == FExp && n.Sym != nil && !isblank(n) && n.Name.Vargen > 0 {
return fmt.Sprintf("%v·%d", n.Sym, n.Name.Vargen)
}
// Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
// but for export, this should be rendered as (*pkg.T).meth.
// These nodes have the special property that they are names with a left OTYPE and a right ONAME.
if fmtmode == FExp && n.Left != nil && n.Left.Op == OTYPE && n.Right != nil && n.Right.Op == ONAME {
if n.Left.Type.IsPtr() {
return fmt.Sprintf("(%v).%v", n.Left.Type, sconv(n.Right.Sym, FmtShort|FmtByte))
} else {
return fmt.Sprintf("%v.%v", n.Left.Type, sconv(n.Right.Sym, FmtShort|FmtByte))
}
}
fallthrough
case OPACK, ONONAME:
@ -1209,63 +1158,20 @@ func exprfmt(n *Node, prec int) string {
return "composite literal"
}
if fmtmode == FExp && ptrlit {
// typecheck has overwritten OIND by OTYPE with pointer type.
return fmt.Sprintf("(&%v{ %v })", n.Right.Type.Elem(), hconv(n.List, FmtComma))
}
return fmt.Sprintf("(%v{ %v })", n.Right, hconv(n.List, FmtComma))
case OPTRLIT:
if fmtmode == FExp && n.Left.Implicit {
return Nconv(n.Left, 0)
}
return fmt.Sprintf("&%v", n.Left)
case OSTRUCTLIT:
if fmtmode == FExp { // requires special handling of field names
var f string
if n.Implicit {
f += "{"
} else {
f += fmt.Sprintf("(%v{", n.Type)
}
for i1, n1 := range n.List.Slice() {
f += fmt.Sprintf(" %v:%v", sconv(n1.Left.Sym, FmtShort|FmtByte), n1.Right)
if i1+1 < n.List.Len() {
f += ","
} else {
f += " "
}
}
if !n.Implicit {
f += "})"
return f
}
f += "}"
return f
}
fallthrough
case OARRAYLIT, OMAPLIT:
case OSTRUCTLIT, OARRAYLIT, OMAPLIT:
if fmtmode == FErr {
return fmt.Sprintf("%v literal", n.Type)
}
if fmtmode == FExp && n.Implicit {
return fmt.Sprintf("{ %v }", hconv(n.List, FmtComma))
}
return fmt.Sprintf("(%v{ %v })", n.Type, hconv(n.List, FmtComma))
case OKEY:
if n.Left != nil && n.Right != nil {
if fmtmode == FExp && n.Left.Type == structkey {
// requires special handling of field names
return fmt.Sprintf("%v:%v", sconv(n.Left.Sym, FmtShort|FmtByte), n.Right)
} else {
return fmt.Sprintf("%v:%v", n.Left, n.Right)
}
return fmt.Sprintf("%v:%v", n.Left, n.Right)
}
if n.Left == nil && n.Right != nil {
@ -1473,7 +1379,7 @@ func nodefmt(n *Node, flag FmtFlag) string {
// we almost always want the original, except in export mode for literals
// this saves the importer some work, and avoids us having to redo some
// special casing for package unsafe
if (fmtmode != FExp || n.Op != OLITERAL) && n.Orig != nil {
if n.Op != OLITERAL && n.Orig != nil {
n = n.Orig
}
@ -1643,7 +1549,7 @@ func Fldconv(f *Field, flag FmtFlag) string {
// Take the name from the original, lest we substituted it with ~r%d or ~b%d.
// ~r%d is a (formerly) unnamed result.
if (fmtmode == FErr || fmtmode == FExp) && f.Nname != nil {
if fmtmode == FErr && f.Nname != nil {
if f.Nname.Orig != nil {
s = f.Nname.Orig.Sym
if s != nil && s.Name[0] == '~' {
@ -1666,12 +1572,6 @@ func Fldconv(f *Field, flag FmtFlag) string {
} else {
name = sconv(s, 0)
}
} else if fmtmode == FExp {
if f.Embedded != 0 && s.Pkg != nil && len(s.Pkg.Path) > 0 {
name = fmt.Sprintf("@%q.?", s.Pkg.Path)
} else {
name = "?"
}
}
}
@ -1759,7 +1659,7 @@ func Nconv(n *Node, flag FmtFlag) string {
var str string
switch fmtmode {
case FErr, FExp:
case FErr:
str = nodefmt(n, flag)
case FDbg:

View File

@ -469,12 +469,6 @@ l0:
l.nlsemi = true
goto lx
case '#', '$', '?', '@', '\\':
if importpkg != nil {
goto lx
}
fallthrough
default:
// anything else is illegal
Yyerror("syntax error: illegal character %#U", c)
@ -536,7 +530,7 @@ func (l *lexer) ident(c rune) {
// general case
for {
if c >= utf8.RuneSelf {
if unicode.IsLetter(c) || c == '_' || unicode.IsDigit(c) || importpkg != nil && c == 0xb7 {
if unicode.IsLetter(c) || c == '_' || unicode.IsDigit(c) {
if cp.Len() == 0 && unicode.IsDigit(c) {
Yyerror("identifier cannot begin with digit %#U", c)
}
@ -672,18 +666,10 @@ func (l *lexer) number(c rune) {
cp.WriteByte(byte(c))
c = l.getr()
}
// Falling through to exponent parsing here permits invalid
// floating-point numbers with fractional mantissa and base-2
// (p or P) exponent. We don't care because base-2 exponents
// can only show up in machine-generated textual export data
// which will use correct formatting.
}
// exponent
// base-2 exponent (p or P) is only allowed in export data (see #9036)
// TODO(gri) Once we switch to binary import data, importpkg will
// always be nil in this function. Simplify the code accordingly.
if c == 'e' || c == 'E' || importpkg != nil && (c == 'p' || c == 'P') {
if c == 'e' || c == 'E' {
isInt = false
cp.WriteByte(byte(c))
c = l.getr()
@ -1124,9 +1110,7 @@ redo:
case 0:
yyerrorl(lexlineno, "illegal NUL byte")
case '\n':
if importpkg == nil {
lexlineno++
}
lexlineno++
case utf8.RuneError:
if w == 1 {
yyerrorl(lexlineno, "illegal UTF-8 sequence")

View File

@ -182,7 +182,6 @@ func Main() {
obj.Flagcount("live", "debug liveness analysis", &debuglive)
obj.Flagcount("m", "print optimization decisions", &Debug['m'])
flag.BoolVar(&flag_msan, "msan", false, "build code compatible with C/C++ memory sanitizer")
flag.BoolVar(&newexport, "newexport", true, "use new export format") // TODO(gri) remove eventually (issue 15323)
flag.BoolVar(&nolocalimports, "nolocalimports", false, "reject local (relative) imports")
flag.StringVar(&outfile, "o", "", "write output to `file`")
flag.StringVar(&myimportpath, "p", "", "set expected package import `path`")
@ -644,24 +643,10 @@ func loadsys() {
iota_ = -1000000
incannedimport = 1
// The first byte in the binary export format is a 'c' or 'd'
// specifying the encoding format. We could just check that
// byte, but this is a perhaps more robust. Also, it is not
// speed-critical.
// TODO(gri) simplify once textual export format has gone
if strings.HasPrefix(runtimeimport, "package") {
// textual export format
importpkg = Runtimepkg
parse_import(bufio.NewReader(strings.NewReader(runtimeimport)), nil)
importpkg = unsafepkg
parse_import(bufio.NewReader(strings.NewReader(unsafeimport)), nil)
} else {
// binary export format
importpkg = Runtimepkg
Import(bufio.NewReader(strings.NewReader(runtimeimport)))
importpkg = unsafepkg
Import(bufio.NewReader(strings.NewReader(unsafeimport)))
}
importpkg = Runtimepkg
Import(bufio.NewReader(strings.NewReader(runtimeimport)))
importpkg = unsafepkg
Import(bufio.NewReader(strings.NewReader(unsafeimport)))
importpkg = nil
incannedimport = 0
@ -804,8 +789,8 @@ func importfile(f *Val, indent []byte) {
linehistpragma(file[len(file)-len(path_)-2:]) // acts as #pragma lib
// In the importfile, if we find:
// $$\n (old format): position the input right after $$\n and return
// $$B\n (new format): import directly, then feed the lexer a dummy statement
// $$\n (textual format): not supported anymore
// $$B\n (binary format) : import directly, then feed the lexer a dummy statement
// look for $$
var c byte
@ -829,11 +814,9 @@ func importfile(f *Val, indent []byte) {
switch c {
case '\n':
// old export format
parse_import(imp, indent)
Yyerror("cannot import %s: old export format no longer supported (recompile library)", path_)
case 'B':
// new export format
if Debug_export != 0 {
fmt.Printf("importing %s (%s)\n", path_, file)
}

View File

@ -66,16 +66,9 @@ func mkbuiltin(w io.Writer, name string) {
}
// Look for $$B that introduces binary export data.
textual := false // TODO(gri) remove once we switched to binary export format
i := bytes.Index(b, []byte("\n$$B\n"))
if i < 0 {
// Look for $$ that introduces textual export data.
i = bytes.Index(b, []byte("\n$$\n"))
if i < 0 {
log.Fatal("did not find beginning of export data")
}
textual = true
i-- // textual data doesn't have B
log.Fatal("did not find beginning of export data")
}
b = b[i+5:]
@ -87,27 +80,15 @@ func mkbuiltin(w io.Writer, name string) {
b = b[:i+4]
// Process and reformat export data.
const n = 40 // number of bytes per line
fmt.Fprintf(w, "\nconst %simport = \"\"", name)
if textual {
for _, p := range bytes.SplitAfter(b, []byte("\n")) {
// Chop leading white space.
p = bytes.TrimLeft(p, " \t")
if len(p) == 0 {
continue
}
fmt.Fprintf(w, " +\n\t%q", p)
}
} else {
const n = 40 // number of bytes per line
for len(b) > 0 {
i := len(b)
if i > n {
i = n
}
fmt.Fprintf(w, " +\n\t%q", b[:i])
b = b[i:]
for len(b) > 0 {
i := len(b)
if i > n {
i = n
}
fmt.Fprintf(w, " +\n\t%q", b[:i])
b = b[i:]
}
fmt.Fprintf(w, "\n")
}

View File

@ -21,11 +21,6 @@ import (
const trace = false // if set, parse tracing can be enabled with -x
// parse_import parses the export data of a package that is imported.
func parse_import(bin *bufio.Reader, indent []byte) {
newparser(bin, indent).import_package()
}
// parse_file parses a single Go source file.
func parse_file(bin *bufio.Reader) {
newparser(bin, nil).file()
@ -36,9 +31,6 @@ type parser struct {
fnest int // function nesting level (for error handling)
xnest int // expression nesting level (for complit ambiguity resolution)
indent []byte // tracing support
// TODO(gri) remove this once we switch to binary export format
structpkg *Pkg // for verification in addmethod only
}
// newparser returns a new parser ready to parse from src.
@ -317,7 +309,7 @@ func (p *parser) importdcl() {
var my *Sym
switch p.tok {
case LNAME, '@', '?':
case LNAME:
// import with given name
my = p.sym()
@ -382,49 +374,6 @@ func (p *parser) importdcl() {
my.Block = 1 // at top level
}
// import_package parses the header of an imported package as exported
// in textual format from another package.
func (p *parser) import_package() {
if trace && Debug['x'] != 0 {
defer p.trace("import_package")()
}
p.want(LPACKAGE)
var name string
if p.tok == LNAME {
name = p.sym_.Name
p.next()
} else {
p.import_error()
}
// read but skip "safe" bit (see issue #15772)
if p.tok == LNAME {
p.next()
}
p.want(';')
if importpkg.Name == "" {
importpkg.Name = name
numImport[name]++
} else if importpkg.Name != name {
Yyerror("conflicting names %s and %s for package %q", importpkg.Name, name, importpkg.Path)
}
typecheckok = true
defercheckwidth()
p.hidden_import_list()
p.want('$')
// don't read past 2nd '$'
if p.tok != '$' {
p.import_error()
}
resumecheckwidth()
typecheckok = false
}
// Declaration = ConstDecl | TypeDecl | VarDecl .
// ConstDecl = "const" ( ConstSpec | "(" { ConstSpec ";" } ")" ) .
// TypeDecl = "type" ( TypeSpec | "(" { TypeSpec ";" } ")" ) .
@ -1254,7 +1203,7 @@ func (p *parser) operand(keep_parens bool) *Node {
p.next()
return x
case LNAME, '@', '?':
case LNAME:
return p.name()
case '(':
@ -1357,7 +1306,7 @@ loop:
case '.':
p.next()
switch p.tok {
case LNAME, '@', '?':
case LNAME:
// pexpr '.' sym
x = p.new_dotname(x)
@ -1574,36 +1523,22 @@ func (p *parser) onew_name() *Node {
defer p.trace("onew_name")()
}
switch p.tok {
case LNAME, '@', '?':
if p.tok == LNAME {
return p.new_name(p.sym())
}
return nil
}
func (p *parser) sym() *Sym {
switch p.tok {
case LNAME:
if p.tok == LNAME {
s := p.sym_ // from localpkg
p.next()
// during imports, unqualified non-exported identifiers are from builtinpkg
if importpkg != nil && !exportname(s.Name) {
s = Pkglookup(s.Name, builtinpkg)
}
return s
case '@':
return p.hidden_importsym()
case '?':
p.next()
return nil
default:
p.syntax_error("expecting name")
p.advance()
return new(Sym)
}
p.syntax_error("expecting name")
p.advance()
return new(Sym)
}
func mkname(sym *Sym) *Node {
@ -1750,7 +1685,7 @@ func (p *parser) try_ntype() *Node {
p.next()
return Nod(OIND, p.ntype(), nil)
case LNAME, '@', '?':
case LNAME:
return p.dotname()
case '(':
@ -1888,7 +1823,7 @@ func (p *parser) fndcl() *Node {
}
switch p.tok {
case LNAME, '@', '?':
case LNAME:
// FunctionName Signature
name := p.sym()
t := p.signature(nil)
@ -1958,67 +1893,6 @@ func (p *parser) fndcl() *Node {
}
}
func (p *parser) hidden_fndcl() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_fndcl")()
}
switch p.tok {
default:
// hidden_pkg_importsym '(' ohidden_funarg_list ')' ohidden_funres
s1 := p.hidden_pkg_importsym()
p.want('(')
s3 := p.ohidden_funarg_list()
p.want(')')
s5 := p.ohidden_funres()
s := s1
t := functype(nil, s3, s5)
importsym(s, ONAME)
if s.Def != nil && s.Def.Op == ONAME {
if Eqtype(t, s.Def.Type) {
dclcontext = PDISCARD // since we skip funchdr below
return nil
}
Yyerror("inconsistent definition for func %v during import\n\t%v\n\t%v", s, s.Def.Type, t)
}
ss := newfuncname(s)
ss.Type = t
declare(ss, PFUNC)
funchdr(ss)
return ss
case '(':
// '(' hidden_funarg_list ')' sym '(' ohidden_funarg_list ')' ohidden_funres
p.next()
s2 := p.hidden_funarg_list()
p.want(')')
s4 := p.sym()
p.want('(')
s6 := p.ohidden_funarg_list()
p.want(')')
s8 := p.ohidden_funres()
ss := methodname1(newname(s4), s2[0].Right)
ss.Type = functype(s2[0], s6, s8)
checkwidth(ss.Type)
addmethod(s4, ss.Type, p.structpkg, false, p.pragma&Nointerface != 0)
p.pragma = 0
funchdr(ss)
// inl.C's inlnode in on a dotmeth node expects to find the inlineable body as
// (dotmeth's type).Nname.Inl, and dotmeth's type has been pulled
// out by typecheck's lookdot as this $$.ttype. So by providing
// this back link here we avoid special casing there.
ss.Type.SetNname(ss)
return ss
}
}
// FunctionBody = Block .
func (p *parser) fnbody() []*Node {
if trace && Debug['x'] != 0 {
@ -2106,18 +1980,6 @@ func (p *parser) structdcl() []*Node {
return []*Node{field}
}
// LNAME belongs to first *Sym of new_name_list
//
// during imports, unqualified non-exported identifiers are from builtinpkg
if importpkg != nil && !exportname(sym.Name) {
sym = Pkglookup(sym.Name, builtinpkg)
if sym == nil {
p.import_error()
}
}
fallthrough
case '@', '?':
// new_name_list ntype oliteral
fields := p.new_name_list(sym)
typ := p.ntype()
@ -2288,25 +2150,6 @@ func (p *parser) interfacedcl() *Node {
ifacedcl(meth)
return meth
case '@', '?':
// MethodName Signature
//
// We arrive here when parsing an interface type declared inside
// an exported and inlineable function and the interface declares
// unexported methods (which are then package-qualified).
//
// Since the compiler always flattens embedded interfaces, we
// will never see an embedded package-qualified interface in export
// data; i.e., when we reach here we know it must be a method.
//
// See also issue 14164.
mname := newname(p.sym())
sig := p.signature(fakethis())
meth := Nod(ODCLFIELD, mname, sig)
ifacedcl(meth)
return meth
case '(':
p.next()
pname := p.packname(nil)
@ -2334,10 +2177,10 @@ func (p *parser) param() (name *Sym, typ *Node) {
}
switch p.tok {
case LNAME, '@', '?':
name = p.sym() // nil if p.tok == '?' (importing only)
case LNAME:
name = p.sym()
switch p.tok {
case LCOMM, LFUNC, '[', LCHAN, LMAP, LSTRUCT, LINTERFACE, '*', LNAME, '@', '?', '(':
case LCOMM, LFUNC, '[', LCHAN, LMAP, LSTRUCT, LINTERFACE, '*', LNAME, '(':
// sym name_or_type
typ = p.ntype()
@ -2423,13 +2266,7 @@ func (p *parser) param_list(dddOk bool) []*Node {
// explicit type: use type for earlier parameters
T = t
// an explicitly typed entry must have a name
// TODO(gri) remove extra importpkg == nil check below
// after switch to binary eport format
// Exported inlined function bodies containing function
// literals may print parameter names as '?' resulting
// in nil *Sym and thus nil names. Don't report an error
// in this case.
if p.name == nil && importpkg == nil {
if p.name == nil {
T = nil // error
}
} else {
@ -2504,7 +2341,7 @@ func (p *parser) stmt() *Node {
case LVAR, LCONST, LTYPE:
return liststmt(p.common_dcl())
case LNAME, '@', '?', LLITERAL, LFUNC, '(', // operands
case LNAME, LLITERAL, LFUNC, '(', // operands
'[', LSTRUCT, LMAP, LCHAN, LINTERFACE, // composite types
'+', '-', '*', '&', '^', LCOMM, '!': // unary operators
return p.simple_stmt(true, false)
@ -2720,634 +2557,3 @@ func (p *parser) ocomma(follow int32) bool {
p.advance(follow)
return false
}
// ----------------------------------------------------------------------------
// Importing packages
func (p *parser) import_error() {
p.syntax_error("in export data of imported package")
p.next()
}
// The methods below reflect a 1:1 translation of the original (and now defunct)
// go.y yacc productions. They could be simplified significantly and also use better
// variable names. However, we will be able to delete them once we enable the
// new export format by default, so it's not worth the effort (issue 13241).
func (p *parser) hidden_importsym() *Sym {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_importsym")()
}
p.want('@')
var s2 Val
if p.tok == LLITERAL {
s2 = p.val
p.next()
} else {
p.import_error()
}
p.want('.')
switch p.tok {
case LNAME:
s4 := p.sym_
p.next()
var p *Pkg
if s2.U.(string) == "" {
p = importpkg
} else {
if isbadimport(s2.U.(string)) {
errorexit()
}
p = mkpkg(s2.U.(string))
}
return Pkglookup(s4.Name, p)
case '?':
p.next()
var p *Pkg
if s2.U.(string) == "" {
p = importpkg
} else {
if isbadimport(s2.U.(string)) {
errorexit()
}
p = mkpkg(s2.U.(string))
}
return Pkglookup("?", p)
default:
p.import_error()
return nil
}
}
func (p *parser) ohidden_funarg_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("ohidden_funarg_list")()
}
var ss []*Node
if p.tok != ')' {
ss = p.hidden_funarg_list()
}
return ss
}
func (p *parser) ohidden_structdcl_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("ohidden_structdcl_list")()
}
var ss []*Node
if p.tok != '}' {
ss = p.hidden_structdcl_list()
}
return ss
}
func (p *parser) ohidden_interfacedcl_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("ohidden_interfacedcl_list")()
}
var ss []*Node
if p.tok != '}' {
ss = p.hidden_interfacedcl_list()
}
return ss
}
// import syntax from package header
func (p *parser) hidden_import() {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_import")()
}
switch p.tok {
case LIMPORT:
// LIMPORT LNAME LLITERAL ';'
p.next()
var s2 *Sym
if p.tok == LNAME {
s2 = p.sym_
p.next()
} else {
p.import_error()
}
var s3 Val
if p.tok == LLITERAL {
s3 = p.val
p.next()
} else {
p.import_error()
}
p.want(';')
importimport(s2, s3.U.(string))
case LVAR:
// LVAR hidden_pkg_importsym hidden_type ';'
p.next()
s2 := p.hidden_pkg_importsym()
s3 := p.hidden_type()
p.want(';')
importvar(s2, s3)
case LCONST:
// LCONST hidden_pkg_importsym '=' hidden_constant ';'
// LCONST hidden_pkg_importsym hidden_type '=' hidden_constant ';'
p.next()
s2 := p.hidden_pkg_importsym()
var s3 *Type = Types[TIDEAL]
if p.tok != '=' {
s3 = p.hidden_type()
}
p.want('=')
s4 := p.hidden_constant()
p.want(';')
importconst(s2, s3, s4)
case LTYPE:
// LTYPE hidden_pkgtype hidden_type ';'
p.next()
s2 := p.hidden_pkgtype()
s3 := p.hidden_type()
p.want(';')
importtype(s2, s3)
case LFUNC:
// LFUNC hidden_fndcl fnbody ';'
p.next()
s2 := p.hidden_fndcl()
s3 := p.fnbody()
p.want(';')
if s2 == nil {
dclcontext = PEXTERN // since we skip the funcbody below
return
}
s2.Func.Inl.Set(s3)
funcbody(s2)
importlist = append(importlist, s2)
if Debug['E'] > 0 {
fmt.Printf("import [%q] func %v \n", importpkg.Path, s2)
if Debug['m'] > 2 && s2.Func.Inl.Len() != 0 {
fmt.Printf("inl body:%v\n", s2.Func.Inl)
}
}
default:
p.import_error()
}
}
func (p *parser) hidden_pkg_importsym() *Sym {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_pkg_importsym")()
}
s := p.hidden_importsym()
p.structpkg = s.Pkg
return s
}
func (p *parser) hidden_pkgtype() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_pkgtype")()
}
return pkgtype(p.hidden_pkg_importsym())
}
// ----------------------------------------------------------------------------
// Importing types
func (p *parser) hidden_type() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_type")()
}
switch p.tok {
default:
return p.hidden_type_misc()
case LCOMM:
return p.hidden_type_recv_chan()
case LFUNC:
return p.hidden_type_func()
}
}
func (p *parser) hidden_type_non_recv_chan() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_type_non_recv_chan")()
}
switch p.tok {
default:
return p.hidden_type_misc()
case LFUNC:
return p.hidden_type_func()
}
}
func (p *parser) hidden_type_misc() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_type_misc")()
}
switch p.tok {
case '@':
// hidden_importsym
s1 := p.hidden_importsym()
return pkgtype(s1)
case LNAME:
// LNAME
s1 := p.sym_
p.next()
// predefined name like uint8
s1 = Pkglookup(s1.Name, builtinpkg)
if s1.Def == nil || s1.Def.Op != OTYPE {
Yyerror("%s is not a type", s1.Name)
return nil
} else {
return s1.Def.Type
}
case '[':
// '[' ']' hidden_type
// '[' LLITERAL ']' hidden_type
p.next()
var s2 *Node
if p.tok == LLITERAL {
s2 = nodlit(p.val)
p.next()
}
p.want(']')
s4 := p.hidden_type()
return aindex(s2, s4)
case LMAP:
// LMAP '[' hidden_type ']' hidden_type
p.next()
p.want('[')
s3 := p.hidden_type()
p.want(']')
s5 := p.hidden_type()
return typMap(s3, s5)
case LSTRUCT:
// LSTRUCT '{' ohidden_structdcl_list '}'
p.next()
p.want('{')
s3 := p.ohidden_structdcl_list()
p.want('}')
return tostruct(s3)
case LINTERFACE:
// LINTERFACE '{' ohidden_interfacedcl_list '}'
p.next()
p.want('{')
s3 := p.ohidden_interfacedcl_list()
p.want('}')
return tointerface(s3)
case '*':
// '*' hidden_type
p.next()
s2 := p.hidden_type()
return Ptrto(s2)
case LCHAN:
p.next()
switch p.tok {
default:
// LCHAN hidden_type_non_recv_chan
s2 := p.hidden_type_non_recv_chan()
ss := typChan(s2, Cboth)
return ss
case '(':
// LCHAN '(' hidden_type_recv_chan ')'
p.next()
s3 := p.hidden_type_recv_chan()
p.want(')')
ss := typChan(s3, Cboth)
return ss
case LCOMM:
// LCHAN hidden_type
p.next()
s3 := p.hidden_type()
ss := typChan(s3, Csend)
return ss
}
default:
p.import_error()
return nil
}
}
func (p *parser) hidden_type_recv_chan() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_type_recv_chan")()
}
p.want(LCOMM)
p.want(LCHAN)
s3 := p.hidden_type()
ss := typChan(s3, Crecv)
return ss
}
func (p *parser) hidden_type_func() *Type {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_type_func")()
}
p.want(LFUNC)
p.want('(')
s3 := p.ohidden_funarg_list()
p.want(')')
s5 := p.ohidden_funres()
return functype(nil, s3, s5)
}
func (p *parser) hidden_funarg() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_funarg")()
}
s1 := p.sym()
switch p.tok {
default:
s2 := p.hidden_type()
s3 := p.oliteral()
ss := Nod(ODCLFIELD, nil, typenod(s2))
if s1 != nil {
ss.Left = newname(s1)
}
ss.SetVal(s3)
return ss
case LDDD:
p.next()
s3 := p.hidden_type()
s4 := p.oliteral()
t := typSlice(s3)
ss := Nod(ODCLFIELD, nil, typenod(t))
if s1 != nil {
ss.Left = newname(s1)
}
ss.Isddd = true
ss.SetVal(s4)
return ss
}
}
func (p *parser) hidden_structdcl() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_structdcl")()
}
s1 := p.sym()
s2 := p.hidden_type()
s3 := p.oliteral()
var ss *Node
if s1 != nil && s1.Name != "?" {
ss = Nod(ODCLFIELD, newname(s1), typenod(s2))
ss.SetVal(s3)
} else {
s := s2.Sym
if s == nil && s2.IsPtr() {
s = s2.Elem().Sym
}
pkg := importpkg
if s1 != nil {
pkg = s1.Pkg
}
ss = embedded(s, pkg)
ss.Right = typenod(s2)
ss.SetVal(s3)
}
return ss
}
func (p *parser) hidden_interfacedcl() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_interfacedcl")()
}
// The original (now defunct) grammar in go.y accepted both a method
// or an (embedded) type:
//
// hidden_interfacedcl:
// sym '(' ohidden_funarg_list ')' ohidden_funres
// {
// $$ = Nod(ODCLFIELD, newname($1), typenod(functype(fakethis(), $3, $5)));
// }
// | hidden_type
// {
// $$ = Nod(ODCLFIELD, nil, typenod($1));
// }
//
// But the current textual export code only exports (inlined) methods,
// even if the methods came from embedded interfaces. Furthermore, in
// the original grammar, hidden_type may also start with a sym (LNAME
// or '@'), complicating matters further. Since we never have embedded
// types, only parse methods here.
s1 := p.sym()
p.want('(')
s3 := p.ohidden_funarg_list()
p.want(')')
s5 := p.ohidden_funres()
return Nod(ODCLFIELD, newname(s1), typenod(functype(fakethis(), s3, s5)))
}
func (p *parser) ohidden_funres() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("ohidden_funres")()
}
switch p.tok {
default:
return nil
case '(', '@', LNAME, '[', LMAP, LSTRUCT, LINTERFACE, '*', LCHAN, LCOMM, LFUNC:
return p.hidden_funres()
}
}
func (p *parser) hidden_funres() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_funres")()
}
switch p.tok {
case '(':
p.next()
s2 := p.ohidden_funarg_list()
p.want(')')
return s2
default:
s1 := p.hidden_type()
return []*Node{Nod(ODCLFIELD, nil, typenod(s1))}
}
}
// ----------------------------------------------------------------------------
// Importing constants
func (p *parser) hidden_literal() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_literal")()
}
switch p.tok {
case LLITERAL:
ss := nodlit(p.val)
p.next()
return ss
case '-':
p.next()
if p.tok == LLITERAL {
ss := nodlit(p.val)
p.next()
switch u := ss.Val().U.(type) {
case *Mpint:
u.Neg()
case *Mpflt:
u.Neg()
case *Mpcplx:
u.Real.Neg()
u.Imag.Neg()
default:
Yyerror("bad negated constant")
}
return ss
} else {
p.import_error()
return nil
}
case LNAME, '@', '?':
s1 := p.sym()
ss := oldname(Pkglookup(s1.Name, builtinpkg))
if ss.Op != OLITERAL {
Yyerror("bad constant %v", ss.Sym)
}
return ss
default:
p.import_error()
return nil
}
}
func (p *parser) hidden_constant() *Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_constant")()
}
switch p.tok {
default:
return p.hidden_literal()
case '(':
p.next()
s2 := p.hidden_literal()
p.want('+')
s4 := p.hidden_literal()
p.want(')')
if s2.Val().Ctype() == CTRUNE && s4.Val().Ctype() == CTINT {
ss := s2
s2.Val().U.(*Mpint).Add(s4.Val().U.(*Mpint))
return ss
}
s4.Val().U.(*Mpcplx).Real = s4.Val().U.(*Mpcplx).Imag
s4.Val().U.(*Mpcplx).Imag.SetFloat64(0.0)
return nodcplxlit(s2.Val(), s4.Val())
}
}
func (p *parser) hidden_import_list() {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_import_list")()
}
for p.tok != '$' {
p.hidden_import()
}
}
func (p *parser) hidden_funarg_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_funarg_list")()
}
s1 := p.hidden_funarg()
ss := []*Node{s1}
for p.got(',') {
s3 := p.hidden_funarg()
ss = append(ss, s3)
}
return ss
}
func (p *parser) hidden_structdcl_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_structdcl_list")()
}
s1 := p.hidden_structdcl()
ss := []*Node{s1}
for p.got(';') {
s3 := p.hidden_structdcl()
ss = append(ss, s3)
}
return ss
}
func (p *parser) hidden_interfacedcl_list() []*Node {
if trace && Debug['x'] != 0 {
defer p.trace("hidden_interfacedcl_list")()
}
s1 := p.hidden_interfacedcl()
ss := []*Node{s1}
for p.got(';') {
s3 := p.hidden_interfacedcl()
ss = append(ss, s3)
}
return ss
}

View File

@ -7,21 +7,13 @@ package gcimporter // import "go/internal/gcimporter"
import (
"bufio"
"errors"
"fmt"
"go/build"
"go/token"
"io"
"go/types"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"text/scanner"
exact "go/constant"
"go/types"
)
// debugging/development support
@ -86,38 +78,6 @@ func FindPkg(path, srcDir string) (filename, id string) {
return
}
// ImportData imports a package by reading the gc-generated export data,
// adds the corresponding package object to the packages map indexed by id,
// and returns the object.
//
// The packages map must contains all packages already imported. The data
// reader position must be the beginning of the export data section. The
// filename is only used in error messages.
//
// If packages[id] contains the completely imported package, that package
// can be used directly, and there is no need to call this function (but
// there is also no harm but for extra time used).
//
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
// support for parser error handling
defer func() {
switch r := recover().(type) {
case nil:
// nothing to do
case importError:
err = r
default:
panic(r) // internal error
}
}()
var p parser
p.init(filename, id, data, packages)
pkg = p.parseExport()
return
}
// Import imports a gc-generated package given its import path and srcDir, adds
// the corresponding package object to the packages map, and returns the object.
// The packages map must contain all packages already imported.
@ -158,7 +118,7 @@ func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types
switch hdr {
case "$$\n":
return ImportData(packages, filename, id, buf)
err = fmt.Errorf("cannot import %s: old export format no longer supported (recompile library)", path)
case "$$B\n":
var data []byte
data, err = ioutil.ReadAll(buf)
@ -173,312 +133,6 @@ func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types
return
}
// ----------------------------------------------------------------------------
// Parser
// TODO(gri) Imported objects don't have position information.
// Ideally use the debug table line info; alternatively
// create some fake position (or the position of the
// import). That way error messages referring to imported
// objects can print meaningful information.
// parser parses the exports inside a gc compiler-produced
// object/archive file and populates its scope with the results.
type parser struct {
scanner scanner.Scanner
tok rune // current token
lit string // literal string; only valid for Ident, Int, String tokens
id string // package id of imported package
sharedPkgs map[string]*types.Package // package id -> package object (across importer)
localPkgs map[string]*types.Package // package id -> package object (just this package)
}
func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
p.scanner.Init(src)
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
p.scanner.Whitespace = 1<<'\t' | 1<<' '
p.scanner.Filename = filename // for good error messages
p.next()
p.id = id
p.sharedPkgs = packages
if debug {
// check consistency of packages map
for _, pkg := range packages {
if pkg.Name() == "" {
fmt.Printf("no package name for %s\n", pkg.Path())
}
}
}
}
func (p *parser) next() {
p.tok = p.scanner.Scan()
switch p.tok {
case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
p.lit = p.scanner.TokenText()
default:
p.lit = ""
}
if debug {
fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
}
}
func declTypeName(pkg *types.Package, name string) *types.TypeName {
scope := pkg.Scope()
if obj := scope.Lookup(name); obj != nil {
return obj.(*types.TypeName)
}
obj := types.NewTypeName(token.NoPos, pkg, name, nil)
// a named type may be referred to before the underlying type
// is known - set it up
types.NewNamed(obj, nil, nil)
scope.Insert(obj)
return obj
}
// ----------------------------------------------------------------------------
// Error handling
// Internal errors are boxed as importErrors.
type importError struct {
pos scanner.Position
err error
}
func (e importError) Error() string {
return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
}
func (p *parser) error(err interface{}) {
if s, ok := err.(string); ok {
err = errors.New(s)
}
// panic with a runtime.Error if err is not an error
panic(importError{p.scanner.Pos(), err.(error)})
}
func (p *parser) errorf(format string, args ...interface{}) {
p.error(fmt.Sprintf(format, args...))
}
func (p *parser) expect(tok rune) string {
lit := p.lit
if p.tok != tok {
p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
}
p.next()
return lit
}
func (p *parser) expectSpecial(tok string) {
sep := 'x' // not white space
i := 0
for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
p.next()
i++
}
if i < len(tok) {
p.errorf("expected %q, got %q", tok, tok[0:i])
}
}
func (p *parser) expectKeyword(keyword string) {
lit := p.expect(scanner.Ident)
if lit != keyword {
p.errorf("expected keyword %s, got %q", keyword, lit)
}
}
// ----------------------------------------------------------------------------
// Qualified and unqualified names
// PackageId = string_lit .
//
func (p *parser) parsePackageId() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
p.error(err)
}
// id == "" stands for the imported package id
// (only known at time of package installation)
if id == "" {
id = p.id
}
return id
}
// PackageName = ident .
//
func (p *parser) parsePackageName() string {
return p.expect(scanner.Ident)
}
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
func (p *parser) parseDotIdent() string {
ident := ""
if p.tok != scanner.Int {
sep := 'x' // not white space
for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
ident += p.lit
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
p.next()
}
}
if ident == "" {
p.expect(scanner.Ident) // use expect() for error handling
}
return ident
}
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
//
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
id = p.parsePackageId()
p.expect('.')
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
if p.tok == '?' {
p.next()
} else {
name = p.parseDotIdent()
}
return
}
// getPkg returns the package for a given id. If the package is
// not found, create the package and add it to the p.localPkgs
// and p.sharedPkgs maps. name is the (expected) name of the
// package. If name == "", the package name is expected to be
// set later via an import clause in the export data.
//
// id identifies a package, usually by a canonical package path like
// "encoding/json" but possibly by a non-canonical import path like
// "./json".
//
func (p *parser) getPkg(id, name string) *types.Package {
// package unsafe is not in the packages maps - handle explicitly
if id == "unsafe" {
return types.Unsafe
}
pkg := p.localPkgs[id]
if pkg == nil {
// first import of id from this package
pkg = p.sharedPkgs[id]
if pkg == nil {
// first import of id by this importer;
// add (possibly unnamed) pkg to shared packages
pkg = types.NewPackage(id, name)
p.sharedPkgs[id] = pkg
}
// add (possibly unnamed) pkg to local packages
if p.localPkgs == nil {
p.localPkgs = make(map[string]*types.Package)
}
p.localPkgs[id] = pkg
} else if name != "" {
// package exists already and we have an expected package name;
// make sure names match or set package name if necessary
if pname := pkg.Name(); pname == "" {
pkg.SetName(name)
} else if pname != name {
p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
}
}
return pkg
}
// parseExportedName is like parseQualifiedName, but
// the package id is resolved to an imported *types.Package.
//
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
id, name := p.parseQualifiedName()
pkg = p.getPkg(id, "")
return
}
// ----------------------------------------------------------------------------
// Types
// BasicType = identifier .
//
func (p *parser) parseBasicType() types.Type {
id := p.expect(scanner.Ident)
obj := types.Universe.Lookup(id)
if obj, ok := obj.(*types.TypeName); ok {
return obj.Type()
}
p.errorf("not a basic type: %s", id)
return nil
}
// ArrayType = "[" int_lit "]" Type .
//
func (p *parser) parseArrayType(parent *types.Package) types.Type {
// "[" already consumed and lookahead known not to be "]"
lit := p.expect(scanner.Int)
p.expect(']')
elem := p.parseType(parent)
n, err := strconv.ParseInt(lit, 10, 64)
if err != nil {
p.error(err)
}
return types.NewArray(elem, n)
}
// MapType = "map" "[" Type "]" Type .
//
func (p *parser) parseMapType(parent *types.Package) types.Type {
p.expectKeyword("map")
p.expect('[')
key := p.parseType(parent)
p.expect(']')
elem := p.parseType(parent)
return types.NewMap(key, elem)
}
// Name = identifier | "?" | QualifiedName .
//
// For unqualified and anonymous names, the returned package is the parent
// package unless parent == nil, in which case the returned package is the
// package being imported. (The parent package is not nil if the the name
// is an unqualified struct field or interface method name belonging to a
// type declared in another package.)
//
// For qualified names, the returned package is nil (and not created if
// it doesn't exist yet) unless materializePkg is set (which creates an
// unnamed package with valid package path). In the latter case, a
// subsequent import clause is expected to provide a name for the package.
//
func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
pkg = parent
if pkg == nil {
pkg = p.sharedPkgs[p.id]
}
switch p.tok {
case scanner.Ident:
name = p.lit
p.next()
case '?':
// anonymous
p.next()
case '@':
// exported name prefixed with package path
pkg = nil
var id string
id, name = p.parseQualifiedName()
if materializePkg {
pkg = p.getPkg(id, "")
}
default:
p.error("name expected")
}
return
}
func deref(typ types.Type) types.Type {
if p, _ := typ.(*types.Pointer); p != nil {
return p.Elem()
@ -486,531 +140,6 @@ func deref(typ types.Type) types.Type {
return typ
}
// Field = Name Type [ string_lit ] .
//
func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
pkg, name := p.parseName(parent, true)
typ := p.parseType(parent)
anonymous := false
if name == "" {
// anonymous field - typ must be T or *T and T must be a type name
switch typ := deref(typ).(type) {
case *types.Basic: // basic types are named types
pkg = nil // objects defined in Universe scope have no package
name = typ.Name()
case *types.Named:
name = typ.Obj().Name()
default:
p.errorf("anonymous field expected")
}
anonymous = true
}
tag := ""
if p.tok == scanner.String {
s := p.expect(scanner.String)
var err error
tag, err = strconv.Unquote(s)
if err != nil {
p.errorf("invalid struct tag %s: %s", s, err)
}
}
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
}
// StructType = "struct" "{" [ FieldList ] "}" .
// FieldList = Field { ";" Field } .
//
func (p *parser) parseStructType(parent *types.Package) types.Type {
var fields []*types.Var
var tags []string
p.expectKeyword("struct")
p.expect('{')
for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
if i > 0 {
p.expect(';')
}
fld, tag := p.parseField(parent)
if tag != "" && tags == nil {
tags = make([]string, i)
}
if tags != nil {
tags = append(tags, tag)
}
fields = append(fields, fld)
}
p.expect('}')
return types.NewStruct(fields, tags)
}
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
//
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
_, name := p.parseName(nil, false)
// remove gc-specific parameter numbering
if i := strings.Index(name, "·"); i >= 0 {
name = name[:i]
}
if p.tok == '.' {
p.expectSpecial("...")
isVariadic = true
}
typ := p.parseType(nil)
if isVariadic {
typ = types.NewSlice(typ)
}
// ignore argument tag (e.g. "noescape")
if p.tok == scanner.String {
p.next()
}
// TODO(gri) should we provide a package?
par = types.NewVar(token.NoPos, nil, name, typ)
return
}
// Parameters = "(" [ ParameterList ] ")" .
// ParameterList = { Parameter "," } Parameter .
//
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
p.expect('(')
for p.tok != ')' && p.tok != scanner.EOF {
if len(list) > 0 {
p.expect(',')
}
par, variadic := p.parseParameter()
list = append(list, par)
if variadic {
if isVariadic {
p.error("... not on final argument")
}
isVariadic = true
}
}
p.expect(')')
return
}
// Signature = Parameters [ Result ] .
// Result = Type | Parameters .
//
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
params, isVariadic := p.parseParameters()
// optional result type
var results []*types.Var
if p.tok == '(' {
var variadic bool
results, variadic = p.parseParameters()
if variadic {
p.error("... not permitted on result type")
}
}
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
}
// InterfaceType = "interface" "{" [ MethodList ] "}" .
// MethodList = Method { ";" Method } .
// Method = Name Signature .
//
// The methods of embedded interfaces are always "inlined"
// by the compiler and thus embedded interfaces are never
// visible in the export data.
//
func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
var methods []*types.Func
p.expectKeyword("interface")
p.expect('{')
for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
if i > 0 {
p.expect(';')
}
pkg, name := p.parseName(parent, true)
sig := p.parseSignature(nil)
methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
}
p.expect('}')
// Complete requires the type's embedded interfaces to be fully defined,
// but we do not define any
return types.NewInterface(methods, nil).Complete()
}
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
//
func (p *parser) parseChanType(parent *types.Package) types.Type {
dir := types.SendRecv
if p.tok == scanner.Ident {
p.expectKeyword("chan")
if p.tok == '<' {
p.expectSpecial("<-")
dir = types.SendOnly
}
} else {
p.expectSpecial("<-")
p.expectKeyword("chan")
dir = types.RecvOnly
}
elem := p.parseType(parent)
return types.NewChan(dir, elem)
}
// Type =
// BasicType | TypeName | ArrayType | SliceType | StructType |
// PointerType | FuncType | InterfaceType | MapType | ChanType |
// "(" Type ")" .
//
// BasicType = ident .
// TypeName = ExportedName .
// SliceType = "[" "]" Type .
// PointerType = "*" Type .
// FuncType = "func" Signature .
//
func (p *parser) parseType(parent *types.Package) types.Type {
switch p.tok {
case scanner.Ident:
switch p.lit {
default:
return p.parseBasicType()
case "struct":
return p.parseStructType(parent)
case "func":
// FuncType
p.next()
return p.parseSignature(nil)
case "interface":
return p.parseInterfaceType(parent)
case "map":
return p.parseMapType(parent)
case "chan":
return p.parseChanType(parent)
}
case '@':
// TypeName
pkg, name := p.parseExportedName()
return declTypeName(pkg, name).Type()
case '[':
p.next() // look ahead
if p.tok == ']' {
// SliceType
p.next()
return types.NewSlice(p.parseType(parent))
}
return p.parseArrayType(parent)
case '*':
// PointerType
p.next()
return types.NewPointer(p.parseType(parent))
case '<':
return p.parseChanType(parent)
case '(':
// "(" Type ")"
p.next()
typ := p.parseType(parent)
p.expect(')')
return typ
}
p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
return nil
}
// ----------------------------------------------------------------------------
// Declarations
// ImportDecl = "import" PackageName PackageId .
//
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
p.getPkg(p.parsePackageId(), name)
}
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
//
func (p *parser) parseInt() string {
s := ""
switch p.tok {
case '-':
s = "-"
p.next()
case '+':
p.next()
}
return s + p.expect(scanner.Int)
}
// number = int_lit [ "p" int_lit ] .
//
func (p *parser) parseNumber() (typ *types.Basic, val exact.Value) {
// mantissa
mant := exact.MakeFromLiteral(p.parseInt(), token.INT, 0)
if mant == nil {
panic("invalid mantissa")
}
if p.lit == "p" {
// exponent (base 2)
p.next()
exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
if err != nil {
p.error(err)
}
if exp < 0 {
denom := exact.MakeInt64(1)
denom = exact.Shift(denom, token.SHL, uint(-exp))
typ = types.Typ[types.UntypedFloat]
val = exact.BinaryOp(mant, token.QUO, denom)
return
}
if exp > 0 {
mant = exact.Shift(mant, token.SHL, uint(exp))
}
typ = types.Typ[types.UntypedFloat]
val = mant
return
}
typ = types.Typ[types.UntypedInt]
val = mant
return
}
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
// bool_lit = "true" | "false" .
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
// rune_lit = "(" int_lit "+" int_lit ")" .
// string_lit = `"` { unicode_char } `"` .
//
func (p *parser) parseConstDecl() {
p.expectKeyword("const")
pkg, name := p.parseExportedName()
var typ0 types.Type
if p.tok != '=' {
// constant types are never structured - no need for parent type
typ0 = p.parseType(nil)
}
p.expect('=')
var typ types.Type
var val exact.Value
switch p.tok {
case scanner.Ident:
// bool_lit
if p.lit != "true" && p.lit != "false" {
p.error("expected true or false")
}
typ = types.Typ[types.UntypedBool]
val = exact.MakeBool(p.lit == "true")
p.next()
case '-', scanner.Int:
// int_lit
typ, val = p.parseNumber()
case '(':
// complex_lit or rune_lit
p.next()
if p.tok == scanner.Char {
p.next()
p.expect('+')
typ = types.Typ[types.UntypedRune]
_, val = p.parseNumber()
p.expect(')')
break
}
_, re := p.parseNumber()
p.expect('+')
_, im := p.parseNumber()
p.expectKeyword("i")
p.expect(')')
typ = types.Typ[types.UntypedComplex]
val = exact.BinaryOp(re, token.ADD, exact.MakeImag(im))
case scanner.Char:
// rune_lit
typ = types.Typ[types.UntypedRune]
val = exact.MakeFromLiteral(p.lit, token.CHAR, 0)
p.next()
case scanner.String:
// string_lit
typ = types.Typ[types.UntypedString]
val = exact.MakeFromLiteral(p.lit, token.STRING, 0)
p.next()
default:
p.errorf("expected literal got %s", scanner.TokenString(p.tok))
}
if typ0 == nil {
typ0 = typ
}
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
}
// TypeDecl = "type" ExportedName Type .
//
func (p *parser) parseTypeDecl() {
p.expectKeyword("type")
pkg, name := p.parseExportedName()
obj := declTypeName(pkg, name)
// The type object may have been imported before and thus already
// have a type associated with it. We still need to parse the type
// structure, but throw it away if the object already has a type.
// This ensures that all imports refer to the same type object for
// a given type declaration.
typ := p.parseType(pkg)
if name := obj.Type().(*types.Named); name.Underlying() == nil {
name.SetUnderlying(typ)
}
}
// VarDecl = "var" ExportedName Type .
//
func (p *parser) parseVarDecl() {
p.expectKeyword("var")
pkg, name := p.parseExportedName()
typ := p.parseType(pkg)
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
}
// Func = Signature [ Body ] .
// Body = "{" ... "}" .
//
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
sig := p.parseSignature(recv)
if p.tok == '{' {
p.next()
for i := 1; i > 0; p.next() {
switch p.tok {
case '{':
i++
case '}':
i--
}
}
}
return sig
}
// MethodDecl = "func" Receiver Name Func .
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
//
func (p *parser) parseMethodDecl() {
// "func" already consumed
p.expect('(')
recv, _ := p.parseParameter() // receiver
p.expect(')')
// determine receiver base type object
base := deref(recv.Type()).(*types.Named)
// parse method name, signature, and possibly inlined body
_, name := p.parseName(nil, false)
sig := p.parseFunc(recv)
// methods always belong to the same package as the base type object
pkg := base.Obj().Pkg()
// add method to type unless type was imported before
// and method exists already
// TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
}
// FuncDecl = "func" ExportedName Func .
//
func (p *parser) parseFuncDecl() {
// "func" already consumed
pkg, name := p.parseExportedName()
typ := p.parseFunc(nil)
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
}
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
//
func (p *parser) parseDecl() {
if p.tok == scanner.Ident {
switch p.lit {
case "import":
p.parseImportDecl()
case "const":
p.parseConstDecl()
case "type":
p.parseTypeDecl()
case "var":
p.parseVarDecl()
case "func":
p.next() // look ahead
if p.tok == '(' {
p.parseMethodDecl()
} else {
p.parseFuncDecl()
}
}
}
p.expect('\n')
}
// ----------------------------------------------------------------------------
// Export
// Export = "PackageClause { Decl } "$$" .
// PackageClause = "package" PackageName [ "safe" ] "\n" .
//
func (p *parser) parseExport() *types.Package {
p.expectKeyword("package")
name := p.parsePackageName()
if p.tok == scanner.Ident && p.lit == "safe" {
// package was compiled with -u option - ignore
p.next()
}
p.expect('\n')
pkg := p.getPkg(p.id, name)
for p.tok != '$' && p.tok != scanner.EOF {
p.parseDecl()
}
if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
// don't call next()/expect() since reading past the
// export data may cause scanner errors (e.g. NUL chars)
p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
}
if n := p.scanner.ErrorCount; n != 0 {
p.errorf("expected no scanner errors, got %d", n)
}
// Record all locally referenced packages as imports.
var imports []*types.Package
for id, pkg2 := range p.localPkgs {
if pkg2.Name() == "" {
p.errorf("%s package has no name", id)
}
if id == p.id {
continue // avoid self-edge
}
imports = append(imports, pkg2)
}
sort.Sort(byPath(imports))
pkg.SetImports(imports)
// package was imported completely and without errors
pkg.MarkComplete()
return pkg
}
type byPath []*types.Package
func (a byPath) Len() int { return len(a) }

View File

@ -46,20 +46,6 @@ func compile(t *testing.T, dirname, filename string) string {
return filepath.Join(dirname, filename[:len(filename)-2]+"o")
}
// TODO(gri) Remove this function once we switched to new export format by default.
func compileNewExport(t *testing.T, dirname, filename string) string {
testenv.MustHaveGoBuild(t)
cmd := exec.Command("go", "tool", "compile", "-newexport", filename)
cmd.Dir = dirname
out, err := cmd.CombinedOutput()
if err != nil {
t.Logf("%s", out)
t.Fatalf("go tool compile %s failed: %s", filename, err)
}
// filename should end with ".go"
return filepath.Join(dirname, filename[:len(filename)-2]+"o")
}
func testPath(t *testing.T, path, srcDir string) *types.Package {
t0 := time.Now()
pkg, err := Import(make(map[string]*types.Package), path, srcDir)
@ -121,6 +107,8 @@ func TestImportTestdata(t *testing.T) {
// additional packages that are not strictly required for
// import processing alone (they are exported to err "on
// the safe side").
// TODO(gri) update the want list to be precise, now that
// the textual export data is gone.
got := fmt.Sprint(pkg.Imports())
for _, want := range []string{"go/ast", "go/token"} {
if !strings.Contains(got, want) {
@ -130,31 +118,6 @@ func TestImportTestdata(t *testing.T) {
}
}
// TODO(gri) Remove this function once we switched to new export format by default
// (and update the comment and want list in TestImportTestdata).
func TestImportTestdataNewExport(t *testing.T) {
// This package only handles gc export data.
if runtime.Compiler != "gc" {
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
return
}
if outFn := compileNewExport(t, "testdata", "exports.go"); outFn != "" {
defer os.Remove(outFn)
}
if pkg := testPath(t, "./testdata/exports", "."); pkg != nil {
// The package's Imports list must include all packages
// explicitly imported by exports.go, plus all packages
// referenced indirectly via exported objects in exports.go.
want := `[package ast ("go/ast") package token ("go/token")]`
got := fmt.Sprint(pkg.Imports())
if got != want {
t.Errorf(`Package("exports").Imports() = %s, want %s`, got, want)
}
}
}
func TestImportStdLib(t *testing.T) {
skipSpecialPlatforms(t)