1
0
mirror of https://github.com/golang/go synced 2024-10-01 01:48:32 -06:00

go/internal/gccgoimporter: update package to match std lib version

This CL updates the importer to match the original code in the std lib
but for the necessary changes to make the code work in x/tools and
with older versions of the std lib.

Notably, it brings over changes from:

   https://go-review.googlesource.com/c/152078
   https://go-review.googlesource.com/c/152077
   https://golang.org/cl/151997
   https://golang.org/cl/151557
   https://golang.org/cl/149957

including test fixes (we want tests to run when gccgo is available,
not just when all go tools are gccgo-based), bug fixes (primarily
related to aliases), performance enhancements, and new code to read
the V3 export data emitted by the most recent gccgo.

Change-Id: I2d34bace23769e62795599b93db8295169076594
Reviewed-on: https://go-review.googlesource.com/c/151717
Run-TryBot: Than McIntosh <thanm@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Robert Griesemer <gri@golang.org>
This commit is contained in:
Than McIntosh 2018-11-29 11:37:58 -05:00
parent 34bb05f9d8
commit e51e3523bf
7 changed files with 382 additions and 99 deletions

View File

@ -9,10 +9,14 @@ package gccgoimporter
import ( import (
"go/types" "go/types"
"runtime"
"testing" "testing"
) )
// importablePackages is a list of packages that we verify that we can
// import. This should be all standard library packages in all relevant
// versions of gccgo. Note that since gccgo follows a different release
// cycle, and since different systems have different versions installed,
// we can't use the last-two-versions rule of the gc toolchain.
var importablePackages = [...]string{ var importablePackages = [...]string{
"archive/tar", "archive/tar",
"archive/zip", "archive/zip",
@ -59,7 +63,7 @@ var importablePackages = [...]string{
"encoding/binary", "encoding/binary",
"encoding/csv", "encoding/csv",
"encoding/gob", "encoding/gob",
"encoding", // "encoding", // Added in GCC 4.9.
"encoding/hex", "encoding/hex",
"encoding/json", "encoding/json",
"encoding/pem", "encoding/pem",
@ -71,7 +75,7 @@ var importablePackages = [...]string{
"go/ast", "go/ast",
"go/build", "go/build",
"go/doc", "go/doc",
"go/format", // "go/format", // Added in GCC 4.8.
"go/parser", "go/parser",
"go/printer", "go/printer",
"go/scanner", "go/scanner",
@ -84,7 +88,7 @@ var importablePackages = [...]string{
"html", "html",
"html/template", "html/template",
"image/color", "image/color",
"image/color/palette", // "image/color/palette", // Added in GCC 4.9.
"image/draw", "image/draw",
"image/gif", "image/gif",
"image", "image",
@ -103,7 +107,7 @@ var importablePackages = [...]string{
"mime/multipart", "mime/multipart",
"net", "net",
"net/http/cgi", "net/http/cgi",
"net/http/cookiejar", // "net/http/cookiejar", // Added in GCC 4.8.
"net/http/fcgi", "net/http/fcgi",
"net/http", "net/http",
"net/http/httptest", "net/http/httptest",
@ -147,14 +151,14 @@ var importablePackages = [...]string{
} }
func TestInstallationImporter(t *testing.T) { func TestInstallationImporter(t *testing.T) {
// This test relies on gccgo being around, which it most likely will be if we // This test relies on gccgo being around.
// were compiled with gccgo. gpath := gccgoPath()
if runtime.Compiler != "gccgo" { if gpath == "" {
t.Skip("This test needs gccgo") t.Skip("This test needs gccgo")
} }
var inst GccgoInstallation var inst GccgoInstallation
err := inst.InitFromDriver("gccgo") err := inst.InitFromDriver(gpath)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -179,12 +183,12 @@ func TestInstallationImporter(t *testing.T) {
// Test for certain specific entities in the imported data. // Test for certain specific entities in the imported data.
for _, test := range [...]importerTest{ for _, test := range [...]importerTest{
{pkgpath: "io", name: "Reader", want: "type Reader interface{Read(p []uint8) (n int, err error)}"}, {pkgpath: "io", name: "Reader", want: "type Reader interface{Read(p []byte) (n int, err error)}"},
{pkgpath: "io", name: "ReadWriter", want: "type ReadWriter interface{Reader; Writer}"}, {pkgpath: "io", name: "ReadWriter", want: "type ReadWriter interface{Reader; Writer}"},
{pkgpath: "math", name: "Pi", want: "const Pi untyped float"}, {pkgpath: "math", name: "Pi", want: "const Pi untyped float"},
{pkgpath: "math", name: "Sin", want: "func Sin(x float64) float64"}, {pkgpath: "math", name: "Sin", want: "func Sin(x float64) float64"},
{pkgpath: "sort", name: "Ints", want: "func Ints(a []int)"}, {pkgpath: "sort", name: "Ints", want: "func Ints(a []int)"},
{pkgpath: "unsafe", name: "Pointer", want: "type Pointer unsafe.Pointer"}, {pkgpath: "unsafe", name: "Pointer", want: "type Pointer"},
} { } {
runImporterTest(t, imp, nil, &test) runImporterTest(t, imp, nil, &test)
} }

View File

@ -65,6 +65,7 @@ func findExportFile(searchpaths []string, pkgpath string) (string, error) {
const ( const (
gccgov1Magic = "v1;\n" gccgov1Magic = "v1;\n"
gccgov2Magic = "v2;\n" gccgov2Magic = "v2;\n"
gccgov3Magic = "v3;\n"
goimporterMagic = "\n$$ " goimporterMagic = "\n$$ "
archiveMagic = "!<ar" archiveMagic = "!<ar"
) )
@ -93,7 +94,7 @@ func openExportFile(fpath string) (reader io.ReadSeeker, closer io.Closer, err e
var elfreader io.ReaderAt var elfreader io.ReaderAt
switch string(magic[:]) { switch string(magic[:]) {
case gccgov1Magic, gccgov2Magic, goimporterMagic: case gccgov1Magic, gccgov2Magic, gccgov3Magic, goimporterMagic:
// Raw export data. // Raw export data.
reader = f reader = f
return return
@ -198,7 +199,7 @@ func GetImporter(searchpaths []string, initmap map[*types.Package]InitData) Impo
} }
switch magics { switch magics {
case gccgov1Magic, gccgov2Magic: case gccgov1Magic, gccgov2Magic, gccgov3Magic:
var p parser var p parser
p.init(fpath, reader, imports) p.init(fpath, reader, imports)
pkg = p.parsePackage() pkg = p.parsePackage()

View File

@ -14,7 +14,8 @@ import (
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"runtime" "regexp"
"strconv"
"testing" "testing"
) )
@ -56,9 +57,6 @@ func runImporterTest(t *testing.T, imp Importer, initmap map[*types.Package]Init
// Check that the package's own init function has the package's priority // Check that the package's own init function has the package's priority
for _, pkginit := range initdata.Inits { for _, pkginit := range initdata.Inits {
if pkginit.InitFunc == test.wantinits[0] { if pkginit.InitFunc == test.wantinits[0] {
if initdata.Priority != pkginit.Priority {
t.Errorf("%s: got self priority %d; want %d", test.pkgpath, pkginit.Priority, initdata.Priority)
}
found = true found = true
break break
} }
@ -68,27 +66,11 @@ func runImporterTest(t *testing.T, imp Importer, initmap map[*types.Package]Init
t.Errorf("%s: could not find expected function %q", test.pkgpath, test.wantinits[0]) t.Errorf("%s: could not find expected function %q", test.pkgpath, test.wantinits[0])
} }
// Each init function in the list other than the first one is a // FIXME: the original version of this test was written against
// dependency of the function immediately before it. Check that // the v1 export data scheme for capturing init functions, so it
// the init functions appear in descending priority order. // verified the priority values. We moved away from the priority
priority := initdata.Priority // scheme some time ago; it is not clear how much work it would be
for _, wantdepinit := range test.wantinits[1:] { // to validate the new init export data.
found = false
for _, pkginit := range initdata.Inits {
if pkginit.InitFunc == wantdepinit {
if priority <= pkginit.Priority {
t.Errorf("%s: got dep priority %d; want less than %d", test.pkgpath, pkginit.Priority, priority)
}
found = true
priority = pkginit.Priority
break
}
}
if !found {
t.Errorf("%s: could not find expected function %q", test.pkgpath, wantdepinit)
}
}
} }
} }
@ -103,17 +85,17 @@ var importerTests = [...]importerTest{
{pkgpath: "time", name: "Nanosecond", want: "const Nanosecond Duration", wantval: "1"}, {pkgpath: "time", name: "Nanosecond", want: "const Nanosecond Duration", wantval: "1"},
{pkgpath: "unicode", name: "IsUpper", want: "func IsUpper(r rune) bool"}, {pkgpath: "unicode", name: "IsUpper", want: "func IsUpper(r rune) bool"},
{pkgpath: "unicode", name: "MaxRune", want: "const MaxRune untyped rune", wantval: "1114111"}, {pkgpath: "unicode", name: "MaxRune", want: "const MaxRune untyped rune", wantval: "1114111"},
{pkgpath: "imports", wantinits: []string{"imports..import", "fmt..import", "math..import"}}, {pkgpath: "imports", wantinits: []string{"imports..import", "fmt..import"}},
{pkgpath: "importsar", name: "Hello", want: "var Hello string"}, {pkgpath: "importsar", name: "Hello", want: "var Hello string"},
{pkgpath: "aliases", name: "A14", want: "type A14 = func(int, T0) chan T2"}, {pkgpath: "aliases", name: "A14", want: "type A14 = func(int, T0) chan T2"},
{pkgpath: "aliases", name: "C0", want: "type C0 struct{f1 C1; f2 C1}"}, {pkgpath: "aliases", name: "C0", want: "type C0 struct{f1 C1; f2 C1}"},
{pkgpath: "escapeinfo", name: "NewT", want: "func NewT(data []byte) *T"}, {pkgpath: "escapeinfo", name: "NewT", want: "func NewT(data []byte) *T"},
{pkgpath: "issue27856", name: "M", want: "type M struct{E F}"}, {pkgpath: "issue27856", name: "M", want: "type M struct{E F}"},
{pkgpath: "v1reflect", name: "Type", want: "type Type interface{Align() int; AssignableTo(u Type) bool; Bits() int; ChanDir() ChanDir; Elem() Type; Field(i int) StructField; FieldAlign() int; FieldByIndex(index []int) StructField; FieldByName(name string) (StructField, bool); FieldByNameFunc(match func(string) bool) (StructField, bool); Implements(u Type) bool; In(i int) Type; IsVariadic() bool; Key() Type; Kind() Kind; Len() int; Method(int) Method; MethodByName(string) (Method, bool); Name() string; NumField() int; NumIn() int; NumMethod() int; NumOut() int; Out(i int) Type; PkgPath() string; Size() uintptr; String() string; common() *commonType; rawString() string; runtimeType() *runtimeType; uncommon() *uncommonType}"},
} }
func TestGoxImporter(t *testing.T) { func TestGoxImporter(t *testing.T) {
testenv.MustHaveGoBuild(t) testenv.MustHaveExec(t) // this is to skip nacl, js
initmap := make(map[*types.Package]InitData) initmap := make(map[*types.Package]InitData)
imp := GetImporter([]string{"testdata"}, initmap) imp := GetImporter([]string{"testdata"}, initmap)
@ -122,15 +104,46 @@ func TestGoxImporter(t *testing.T) {
} }
} }
func TestObjImporter(t *testing.T) { // gccgoPath returns a path to gccgo if it is present (either in
testenv.MustHaveGoBuild(t) // path or specified via GCCGO environment variable), or an
// empty string if no gccgo is available.
func gccgoPath() string {
gccgoname := os.Getenv("GCCGO")
if gccgoname == "" {
gccgoname = "gccgo"
}
if gpath, gerr := exec.LookPath(gccgoname); gerr == nil {
return gpath
}
return ""
}
// This test relies on gccgo being around, which it most likely will be if we func TestObjImporter(t *testing.T) {
// were compiled with gccgo. // This test relies on gccgo being around.
if runtime.Compiler != "gccgo" { gpath := gccgoPath()
if gpath == "" {
t.Skip("This test needs gccgo") t.Skip("This test needs gccgo")
} }
verout, err := exec.Command(gpath, "--version").CombinedOutput()
if err != nil {
t.Logf("%s", verout)
t.Fatal(err)
}
vers := regexp.MustCompile(`([0-9]+)\.([0-9]+)`).FindSubmatch(verout)
if len(vers) == 0 {
t.Fatalf("could not find version number in %s", verout)
}
major, err := strconv.Atoi(string(vers[1]))
if err != nil {
t.Fatal(err)
}
minor, err := strconv.Atoi(string(vers[2]))
if err != nil {
t.Fatal(err)
}
t.Logf("gccgo version %d.%d", major, minor)
tmpdir, err := ioutil.TempDir("", "") tmpdir, err := ioutil.TempDir("", "")
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -146,11 +159,22 @@ func TestObjImporter(t *testing.T) {
arimp := GetImporter([]string{artmpdir}, arinitmap) arimp := GetImporter([]string{artmpdir}, arinitmap)
for _, test := range importerTests { for _, test := range importerTests {
// Support for type aliases was added in GCC 7.
if test.pkgpath == "aliases" || test.pkgpath == "issue27856" {
if major < 7 {
t.Logf("skipping %q: not supported before gccgo version 7", test.pkgpath)
continue
}
}
gofile := filepath.Join("testdata", test.pkgpath+".go") gofile := filepath.Join("testdata", test.pkgpath+".go")
if _, err := os.Stat(gofile); os.IsNotExist(err) {
continue
}
ofile := filepath.Join(tmpdir, test.pkgpath+".o") ofile := filepath.Join(tmpdir, test.pkgpath+".o")
afile := filepath.Join(artmpdir, "lib"+test.pkgpath+".a") afile := filepath.Join(artmpdir, "lib"+test.pkgpath+".a")
cmd := exec.Command("gccgo", "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile) cmd := exec.Command(gpath, "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile)
out, err := cmd.CombinedOutput() out, err := cmd.CombinedOutput()
if err != nil { if err != nil {
t.Logf("%s", out) t.Logf("%s", out)

View File

@ -19,10 +19,11 @@ import (
"strconv" "strconv"
"strings" "strings"
"text/scanner" "text/scanner"
"unicode/utf8"
) )
type parser struct { type parser struct {
scanner scanner.Scanner scanner *scanner.Scanner
version string // format version version string // format version
tok rune // current token tok rune // current token
lit string // literal string; only valid for Ident, Int, String tokens lit string // literal string; only valid for Ident, Int, String tokens
@ -31,20 +32,47 @@ type parser struct {
pkg *types.Package // reference to imported package pkg *types.Package // reference to imported package
imports map[string]*types.Package // package path -> package object imports map[string]*types.Package // package path -> package object
typeList []types.Type // type number -> type typeList []types.Type // type number -> type
typeData []string // unparsed type data (v3 and later)
fixups []fixupRecord // fixups to apply at end of parsing
initdata InitData // package init priority data initdata InitData // package init priority data
} }
// When reading V1 export data it's possible to encounter a defined
// type N1 with an underlying defined type N2 while we are still
// reading in that defined type N2; see issue #29006 for an instance
// of this. Example:
//
// type N1 N2
// type N2 struct {
// ...
// p *N1
// }
//
// To handle such cases, the parser generates a fixup record (below) and
// delays setting of N1's underlying type until parsing is complete, at
// which point fixups are applied.
type fixupRecord struct {
toUpdate *types.Named // type to modify when fixup is processed
target types.Type // type that was incomplete when fixup was created
}
func (p *parser) init(filename string, src io.Reader, imports map[string]*types.Package) { func (p *parser) init(filename string, src io.Reader, imports map[string]*types.Package) {
p.scanner.Init(src) p.scanner = new(scanner.Scanner)
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } p.initScanner(filename, src)
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
p.scanner.Whitespace = 1<<'\t' | 1<<'\n' | 1<<' '
p.scanner.Filename = filename // for good error messages
p.next()
p.imports = imports p.imports = imports
p.typeList = make([]types.Type, 1 /* type numbers start at 1 */, 16) p.typeList = make([]types.Type, 1 /* type numbers start at 1 */, 16)
} }
func (p *parser) initScanner(filename string, src io.Reader) {
p.scanner.Init(src)
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings
p.scanner.Whitespace = 1<<'\t' | 1<<' '
p.scanner.Filename = filename // for good error messages
p.next()
}
type importError struct { type importError struct {
pos scanner.Position pos scanner.Position
err error err error
@ -75,6 +103,13 @@ func (p *parser) expect(tok rune) string {
return lit return lit
} }
func (p *parser) expectEOL() {
if p.version == "v1" || p.version == "v2" {
p.expect(';')
}
p.expect('\n')
}
func (p *parser) expectKeyword(keyword string) { func (p *parser) expectKeyword(keyword string) {
lit := p.expect(scanner.Ident) lit := p.expect(scanner.Ident)
if lit != keyword { if lit != keyword {
@ -100,7 +135,7 @@ func (p *parser) parseUnquotedString() string {
buf.WriteString(p.scanner.TokenText()) buf.WriteString(p.scanner.TokenText())
// This loop needs to examine each character before deciding whether to consume it. If we see a semicolon, // This loop needs to examine each character before deciding whether to consume it. If we see a semicolon,
// we need to let it be consumed by p.next(). // we need to let it be consumed by p.next().
for ch := p.scanner.Peek(); ch != ';' && ch != scanner.EOF && p.scanner.Whitespace&(1<<uint(ch)) == 0; ch = p.scanner.Peek() { for ch := p.scanner.Peek(); ch != '\n' && ch != ';' && ch != scanner.EOF && p.scanner.Whitespace&(1<<uint(ch)) == 0; ch = p.scanner.Peek() {
buf.WriteRune(ch) buf.WriteRune(ch)
p.scanner.Next() p.scanner.Next()
} }
@ -272,6 +307,15 @@ func (p *parser) parseConversion(pkg *types.Package) (val constant.Value, typ ty
// ConstValue = string | "false" | "true" | ["-"] (int ["'"] | FloatOrComplex) | Conversion . // ConstValue = string | "false" | "true" | ["-"] (int ["'"] | FloatOrComplex) | Conversion .
// FloatOrComplex = float ["i" | ("+"|"-") float "i"] . // FloatOrComplex = float ["i" | ("+"|"-") float "i"] .
func (p *parser) parseConstValue(pkg *types.Package) (val constant.Value, typ types.Type) { func (p *parser) parseConstValue(pkg *types.Package) (val constant.Value, typ types.Type) {
// v3 changed to $false, $true, $convert, to avoid confusion
// with variable names in inline function bodies.
if p.tok == '$' {
p.next()
if p.tok != scanner.Ident {
p.errorf("expected identifer after '$', got %s (%q)", scanner.TokenString(p.tok), p.lit)
}
}
switch p.tok { switch p.tok {
case scanner.String: case scanner.String:
str := p.parseString() str := p.parseString()
@ -391,17 +435,42 @@ var reserved = new(struct{ types.Type })
// reserve reserves the type map entry n for future use. // reserve reserves the type map entry n for future use.
func (p *parser) reserve(n int) { func (p *parser) reserve(n int) {
if n != len(p.typeList) { // Notes:
p.errorf("invalid type number %d (out of sync)", n) // - for pre-V3 export data, the type numbers we see are
// guaranteed to be in increasing order, so we append a
// reserved entry onto the list.
// - for V3+ export data, type numbers can appear in
// any order, however the 'types' section tells us the
// total number of types, hence typeList is pre-allocated.
if len(p.typeData) == 0 {
if n != len(p.typeList) {
p.errorf("invalid type number %d (out of sync)", n)
}
p.typeList = append(p.typeList, reserved)
} else {
if p.typeList[n] != nil {
p.errorf("previously visited type number %d", n)
}
p.typeList[n] = reserved
} }
p.typeList = append(p.typeList, reserved)
} }
// update sets the type map entries for the given type numbers nlist to t. // update sets the type map entries for the given type numbers nlist to t.
func (p *parser) update(t types.Type, nlist []int) { func (p *parser) update(t types.Type, nlist []int) {
if len(nlist) != 0 {
if t == reserved {
p.errorf("internal error: update(%v) invoked on reserved", nlist)
}
if t == nil {
p.errorf("internal error: update(%v) invoked on nil", nlist)
}
}
for _, n := range nlist { for _, n := range nlist {
if p.typeList[n] == t {
continue
}
if p.typeList[n] != reserved { if p.typeList[n] != reserved {
p.errorf("typeMap[%d] not reserved", n) p.errorf("internal error: update(%v): %d not reserved", nlist, n)
} }
p.typeList[n] = t p.typeList[n] = t
} }
@ -409,7 +478,7 @@ func (p *parser) update(t types.Type, nlist []int) {
// NamedType = TypeName [ "=" ] Type { Method } . // NamedType = TypeName [ "=" ] Type { Method } .
// TypeName = ExportedName . // TypeName = ExportedName .
// Method = "func" "(" Param ")" Name ParamList ResultList ";" . // Method = "func" "(" Param ")" Name ParamList ResultList [InlineBody] ";" .
func (p *parser) parseNamedType(nlist []int) types.Type { func (p *parser) parseNamedType(nlist []int) types.Type {
pkg, name := p.parseExportedName() pkg, name := p.parseExportedName()
scope := pkg.Scope() scope := pkg.Scope()
@ -460,22 +529,34 @@ func (p *parser) parseNamedType(nlist []int) types.Type {
underlying := p.parseType(pkg) underlying := p.parseType(pkg)
if nt.Underlying() == nil { if nt.Underlying() == nil {
nt.SetUnderlying(underlying.Underlying()) if underlying.Underlying() == nil {
if p.version != "v1" {
p.errorf("internal error: unexpected fixup required for %v", nt)
}
fix := fixupRecord{toUpdate: nt, target: underlying}
p.fixups = append(p.fixups, fix)
} else {
nt.SetUnderlying(underlying.Underlying())
}
} }
// collect associated methods if p.tok == '\n' {
for p.tok == scanner.Ident { p.next()
p.expectKeyword("func") // collect associated methods
p.expect('(') for p.tok == scanner.Ident {
receiver, _ := p.parseParam(pkg) p.expectKeyword("func")
p.expect(')') p.expect('(')
name := p.parseName() receiver, _ := p.parseParam(pkg)
params, isVariadic := p.parseParamList(pkg) p.expect(')')
results := p.parseResultList(pkg) name := p.parseName()
p.expect(';') params, isVariadic := p.parseParamList(pkg)
results := p.parseResultList(pkg)
p.skipInlineBody()
p.expectEOL()
sig := types.NewSignature(receiver, params, results, isVariadic) sig := types.NewSignature(receiver, params, results, isVariadic)
nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
}
} }
return nt return nt
@ -616,7 +697,11 @@ func (p *parser) parseParamList(pkg *types.Package) (*types.Tuple, bool) {
func (p *parser) parseResultList(pkg *types.Package) *types.Tuple { func (p *parser) parseResultList(pkg *types.Package) *types.Tuple {
switch p.tok { switch p.tok {
case '<': case '<':
return types.NewTuple(types.NewParam(token.NoPos, pkg, "", p.parseType(pkg))) p.next()
if p.tok == scanner.Ident && p.lit == "inl" {
return nil
}
return types.NewTuple(types.NewParam(token.NoPos, pkg, "", p.parseTypeAfterAngle(pkg)))
case '(': case '(':
params, _ := p.parseParamList(pkg) params, _ := p.parseParamList(pkg)
@ -639,7 +724,7 @@ func (p *parser) parseFunctionType(pkg *types.Package, nlist []int) *types.Signa
return t return t
} }
// Func = Name FunctionType . // Func = Name FunctionType [InlineBody] .
func (p *parser) parseFunc(pkg *types.Package) *types.Func { func (p *parser) parseFunc(pkg *types.Package) *types.Func {
name := p.parseName() name := p.parseName()
if strings.ContainsRune(name, '$') { if strings.ContainsRune(name, '$') {
@ -648,7 +733,9 @@ func (p *parser) parseFunc(pkg *types.Package) *types.Func {
p.discardDirectiveWhileParsingTypes(pkg) p.discardDirectiveWhileParsingTypes(pkg)
return nil return nil
} }
return types.NewFunc(token.NoPos, pkg, name, p.parseFunctionType(pkg, nil)) f := types.NewFunc(token.NoPos, pkg, name, p.parseFunctionType(pkg, nil))
p.skipInlineBody()
return f
} }
// InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" . // InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" .
@ -786,17 +873,25 @@ func lookupBuiltinType(typ int) types.Type {
// //
// parseType updates the type map to t for all type numbers n. // parseType updates the type map to t for all type numbers n.
// //
func (p *parser) parseType(pkg *types.Package, n ...int) (t types.Type) { func (p *parser) parseType(pkg *types.Package, n ...int) types.Type {
p.expect('<') p.expect('<')
return p.parseTypeAfterAngle(pkg, n...)
}
// (*parser).Type after reading the "<".
func (p *parser) parseTypeAfterAngle(pkg *types.Package, n ...int) (t types.Type) {
p.expectKeyword("type") p.expectKeyword("type")
switch p.tok { switch p.tok {
case scanner.Int: case scanner.Int:
n1 := p.parseInt() n1 := p.parseInt()
if p.tok == '>' { if p.tok == '>' {
if len(p.typeData) > 0 && p.typeList[n1] == nil {
p.parseSavedType(pkg, n1, n)
}
t = p.typeList[n1] t = p.typeList[n1]
if t == reserved { if len(p.typeData) == 0 && t == reserved {
p.errorf("invalid type cycle, type %d not yet defined", n1) p.errorf("invalid type cycle, type %d not yet defined (nlist=%v)", n1, n)
} }
p.update(t, n) p.update(t, n)
} else { } else {
@ -812,12 +907,124 @@ func (p *parser) parseType(pkg *types.Package, n ...int) (t types.Type) {
default: default:
p.errorf("expected type number, got %s (%q)", scanner.TokenString(p.tok), p.lit) p.errorf("expected type number, got %s (%q)", scanner.TokenString(p.tok), p.lit)
return nil
}
if t == nil || t == reserved {
p.errorf("internal error: bad return from parseType(%v)", n)
} }
p.expect('>') p.expect('>')
return return
} }
// InlineBody = "<inl:NN>" .{NN}
// Reports whether a body was skipped.
func (p *parser) skipInlineBody() {
// We may or may not have seen the '<' already, depending on
// whether the function had a result type or not.
if p.tok == '<' {
p.next()
p.expectKeyword("inl")
} else if p.tok != scanner.Ident || p.lit != "inl" {
return
} else {
p.next()
}
p.expect(':')
want := p.parseInt()
p.expect('>')
defer func(w uint64) {
p.scanner.Whitespace = w
}(p.scanner.Whitespace)
p.scanner.Whitespace = 0
got := 0
for got < want {
r := p.scanner.Next()
if r == scanner.EOF {
p.error("unexpected EOF")
}
got += utf8.RuneLen(r)
}
}
// Types = "types" maxp1 exportedp1 (offset length)* .
func (p *parser) parseTypes(pkg *types.Package) {
maxp1 := p.parseInt()
exportedp1 := p.parseInt()
p.typeList = make([]types.Type, maxp1, maxp1)
type typeOffset struct {
offset int
length int
}
var typeOffsets []typeOffset
total := 0
for i := 1; i < maxp1; i++ {
len := p.parseInt()
typeOffsets = append(typeOffsets, typeOffset{total, len})
total += len
}
defer func(w uint64) {
p.scanner.Whitespace = w
}(p.scanner.Whitespace)
p.scanner.Whitespace = 0
// We should now have p.tok pointing to the final newline.
// The next runes from the scanner should be the type data.
var sb strings.Builder
for sb.Len() < total {
r := p.scanner.Next()
if r == scanner.EOF {
p.error("unexpected EOF")
}
sb.WriteRune(r)
}
allTypeData := sb.String()
p.typeData = []string{""} // type 0, unused
for _, to := range typeOffsets {
p.typeData = append(p.typeData, allTypeData[to.offset:to.offset+to.length])
}
for i := 1; i < int(exportedp1); i++ {
p.parseSavedType(pkg, i, []int{})
}
}
// parseSavedType parses one saved type definition.
func (p *parser) parseSavedType(pkg *types.Package, i int, nlist []int) {
defer func(s *scanner.Scanner, tok rune, lit string) {
p.scanner = s
p.tok = tok
p.lit = lit
}(p.scanner, p.tok, p.lit)
p.scanner = new(scanner.Scanner)
p.initScanner(p.scanner.Filename, strings.NewReader(p.typeData[i]))
p.expectKeyword("type")
id := p.parseInt()
if id != i {
p.errorf("type ID mismatch: got %d, want %d", id, i)
}
if p.typeList[i] == reserved {
p.errorf("internal error: %d already reserved in parseSavedType", i)
}
if p.typeList[i] == nil {
p.reserve(i)
p.parseTypeSpec(pkg, append(nlist, i))
}
if p.typeList[i] == nil || p.typeList[i] == reserved {
p.errorf("internal error: parseSavedType(%d,%v) reserved/nil", i, nlist)
}
}
// PackageInit = unquotedString unquotedString int . // PackageInit = unquotedString unquotedString int .
func (p *parser) parsePackageInit() PackageInit { func (p *parser) parsePackageInit() PackageInit {
name := p.parseUnquotedString() name := p.parseUnquotedString()
@ -833,7 +1040,7 @@ func (p *parser) parsePackageInit() PackageInit {
func (p *parser) discardDirectiveWhileParsingTypes(pkg *types.Package) { func (p *parser) discardDirectiveWhileParsingTypes(pkg *types.Package) {
for { for {
switch p.tok { switch p.tok {
case ';': case '\n', ';':
return return
case '<': case '<':
p.parseType(pkg) p.parseType(pkg)
@ -852,7 +1059,7 @@ func (p *parser) maybeCreatePackage() {
} }
} }
// InitDataDirective = ( "v1" | "v2" ) ";" | // InitDataDirective = ( "v1" | "v2" | "v3" ) ";" |
// "priority" int ";" | // "priority" int ";" |
// "init" { PackageInit } ";" | // "init" { PackageInit } ";" |
// "checksum" unquotedString ";" . // "checksum" unquotedString ";" .
@ -863,31 +1070,32 @@ func (p *parser) parseInitDataDirective() {
} }
switch p.lit { switch p.lit {
case "v1", "v2": case "v1", "v2", "v3":
p.version = p.lit p.version = p.lit
p.next() p.next()
p.expect(';') p.expect(';')
p.expect('\n')
case "priority": case "priority":
p.next() p.next()
p.initdata.Priority = p.parseInt() p.initdata.Priority = p.parseInt()
p.expect(';') p.expectEOL()
case "init": case "init":
p.next() p.next()
for p.tok != ';' && p.tok != scanner.EOF { for p.tok != '\n' && p.tok != ';' && p.tok != scanner.EOF {
p.initdata.Inits = append(p.initdata.Inits, p.parsePackageInit()) p.initdata.Inits = append(p.initdata.Inits, p.parsePackageInit())
} }
p.expect(';') p.expectEOL()
case "init_graph": case "init_graph":
p.next() p.next()
// The graph data is thrown away for now. // The graph data is thrown away for now.
for p.tok != ';' && p.tok != scanner.EOF { for p.tok != '\n' && p.tok != ';' && p.tok != scanner.EOF {
p.parseInt64() p.parseInt64()
p.parseInt64() p.parseInt64()
} }
p.expect(';') p.expectEOL()
case "checksum": case "checksum":
// Don't let the scanner try to parse the checksum as a number. // Don't let the scanner try to parse the checksum as a number.
@ -897,7 +1105,7 @@ func (p *parser) parseInitDataDirective() {
p.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats p.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats
p.next() p.next()
p.parseUnquotedString() p.parseUnquotedString()
p.expect(';') p.expectEOL()
default: default:
p.errorf("unexpected identifier: %q", p.lit) p.errorf("unexpected identifier: %q", p.lit)
@ -909,6 +1117,7 @@ func (p *parser) parseInitDataDirective() {
// "pkgpath" unquotedString ";" | // "pkgpath" unquotedString ";" |
// "prefix" unquotedString ";" | // "prefix" unquotedString ";" |
// "import" unquotedString unquotedString string ";" | // "import" unquotedString unquotedString string ";" |
// "indirectimport" unquotedString unquotedstring ";" |
// "func" Func ";" | // "func" Func ";" |
// "type" Type ";" | // "type" Type ";" |
// "var" Var ";" | // "var" Var ";" |
@ -920,29 +1129,29 @@ func (p *parser) parseDirective() {
} }
switch p.lit { switch p.lit {
case "v1", "v2", "priority", "init", "init_graph", "checksum": case "v1", "v2", "v3", "priority", "init", "init_graph", "checksum":
p.parseInitDataDirective() p.parseInitDataDirective()
case "package": case "package":
p.next() p.next()
p.pkgname = p.parseUnquotedString() p.pkgname = p.parseUnquotedString()
p.maybeCreatePackage() p.maybeCreatePackage()
if p.version == "v2" && p.tok != ';' { if p.version != "v1" && p.tok != '\n' && p.tok != ';' {
p.parseUnquotedString() p.parseUnquotedString()
p.parseUnquotedString() p.parseUnquotedString()
} }
p.expect(';') p.expectEOL()
case "pkgpath": case "pkgpath":
p.next() p.next()
p.pkgpath = p.parseUnquotedString() p.pkgpath = p.parseUnquotedString()
p.maybeCreatePackage() p.maybeCreatePackage()
p.expect(';') p.expectEOL()
case "prefix": case "prefix":
p.next() p.next()
p.pkgpath = p.parseUnquotedString() p.pkgpath = p.parseUnquotedString()
p.expect(';') p.expectEOL()
case "import": case "import":
p.next() p.next()
@ -950,7 +1159,19 @@ func (p *parser) parseDirective() {
pkgpath := p.parseUnquotedString() pkgpath := p.parseUnquotedString()
p.getPkg(pkgpath, pkgname) p.getPkg(pkgpath, pkgname)
p.parseString() p.parseString()
p.expect(';') p.expectEOL()
case "indirectimport":
p.next()
pkgname := p.parseUnquotedString()
pkgpath := p.parseUnquotedString()
p.getPkg(pkgpath, pkgname)
p.expectEOL()
case "types":
p.next()
p.parseTypes(p.pkg)
p.expectEOL()
case "func": case "func":
p.next() p.next()
@ -958,24 +1179,24 @@ func (p *parser) parseDirective() {
if fun != nil { if fun != nil {
p.pkg.Scope().Insert(fun) p.pkg.Scope().Insert(fun)
} }
p.expect(';') p.expectEOL()
case "type": case "type":
p.next() p.next()
p.parseType(p.pkg) p.parseType(p.pkg)
p.expect(';') p.expectEOL()
case "var": case "var":
p.next() p.next()
v := p.parseVar(p.pkg) v := p.parseVar(p.pkg)
p.pkg.Scope().Insert(v) p.pkg.Scope().Insert(v)
p.expect(';') p.expectEOL()
case "const": case "const":
p.next() p.next()
c := p.parseConst(p.pkg) c := p.parseConst(p.pkg)
p.pkg.Scope().Insert(c) p.pkg.Scope().Insert(c)
p.expect(';') p.expectEOL()
default: default:
p.errorf("unexpected identifier: %q", p.lit) p.errorf("unexpected identifier: %q", p.lit)
@ -987,6 +1208,13 @@ func (p *parser) parsePackage() *types.Package {
for p.tok != scanner.EOF { for p.tok != scanner.EOF {
p.parseDirective() p.parseDirective()
} }
for _, f := range p.fixups {
if f.target.Underlying() == nil {
p.errorf("internal error: fixup can't be applied, loop required")
}
f.toUpdate.SetUnderlying(f.target.Underlying())
}
p.fixups = nil
for _, typ := range p.typeList { for _, typ := range p.typeList {
if it, ok := typ.(*types.Interface); ok { if it, ok := typ.(*types.Interface); ok {
it.Complete() it.Complete()

View File

@ -22,7 +22,7 @@ var typeParserTests = []struct {
{id: "foo", typ: "<type 1 *<type -19>>", want: "*error"}, {id: "foo", typ: "<type 1 *<type -19>>", want: "*error"},
{id: "foo", typ: "<type 1 *any>", want: "unsafe.Pointer"}, {id: "foo", typ: "<type 1 *any>", want: "unsafe.Pointer"},
{id: "foo", typ: "<type 1 \"Bar\" <type 2 *<type 1>>>", want: "foo.Bar", underlying: "*foo.Bar"}, {id: "foo", typ: "<type 1 \"Bar\" <type 2 *<type 1>>>", want: "foo.Bar", underlying: "*foo.Bar"},
{id: "foo", typ: "<type 1 \"bar.Foo\" \"bar\" <type -1> func (? <type 1>) M (); >", want: "bar.Foo", underlying: "int8", methods: "func (bar.Foo).M()"}, {id: "foo", typ: "<type 1 \"bar.Foo\" \"bar\" <type -1>\nfunc (? <type 1>) M ();\n>", want: "bar.Foo", underlying: "int8", methods: "func (bar.Foo).M()"},
{id: "foo", typ: "<type 1 \".bar.foo\" \"bar\" <type -1>>", want: "bar.foo", underlying: "int8"}, {id: "foo", typ: "<type 1 \".bar.foo\" \"bar\" <type -1>>", want: "bar.foo", underlying: "int8"},
{id: "foo", typ: "<type 1 []<type -1>>", want: "[]int8"}, {id: "foo", typ: "<type 1 []<type -1>>", want: "[]int8"},
{id: "foo", typ: "<type 1 [42]<type -1>>", want: "[42]int8"}, {id: "foo", typ: "<type 1 [42]<type -1>>", want: "[42]int8"},
@ -39,6 +39,7 @@ func TestTypeParser(t *testing.T) {
for _, test := range typeParserTests { for _, test := range typeParserTests {
var p parser var p parser
p.init("test.gox", strings.NewReader(test.typ), make(map[string]*types.Package)) p.init("test.gox", strings.NewReader(test.typ), make(map[string]*types.Package))
p.version = "v2"
p.pkgname = test.id p.pkgname = test.id
p.pkgpath = test.id p.pkgpath = test.id
p.maybeCreatePackage() p.maybeCreatePackage()

Binary file not shown.

View File

@ -26,6 +26,20 @@ func HasGoBuild() bool {
return true return true
} }
// HasExec reports whether the current system can start new processes
// using os.StartProcess or (more commonly) exec.Command.
func HasExec() bool {
switch runtime.GOOS {
case "nacl", "js":
return false
case "darwin":
if strings.HasPrefix(runtime.GOARCH, "arm") {
return false
}
}
return true
}
// MustHaveGoBuild checks that the current system can build programs with ``go build'' // MustHaveGoBuild checks that the current system can build programs with ``go build''
// and then run them with os.StartProcess or exec.Command. // and then run them with os.StartProcess or exec.Command.
// If not, MustHaveGoBuild calls t.Skip with an explanation. // If not, MustHaveGoBuild calls t.Skip with an explanation.
@ -35,10 +49,21 @@ func MustHaveGoBuild(t *testing.T) {
} }
} }
// MustHaveExec checks that the current system can start new processes
// using os.StartProcess or (more commonly) exec.Command.
// If not, MustHaveExec calls t.Skip with an explanation.
func MustHaveExec(t *testing.T) {
if !HasExec() {
t.Skipf("skipping test: cannot exec subprocess on %s/%s", runtime.GOOS, runtime.GOARCH)
}
}
var testenv = struct { var testenv = struct {
HasGoBuild func() bool HasGoBuild func() bool
MustHaveGoBuild func(*testing.T) MustHaveGoBuild func(*testing.T)
MustHaveExec func(*testing.T)
}{ }{
HasGoBuild: HasGoBuild, HasGoBuild: HasGoBuild,
MustHaveGoBuild: MustHaveGoBuild, MustHaveGoBuild: MustHaveGoBuild,
MustHaveExec: MustHaveExec,
} }