1
0
mirror of https://github.com/golang/go synced 2024-10-03 05:11:21 -06:00

exp/types, exp/gotype: remove exp/types

The only code change is in exp/gotype/gotype.go.
The latest reviewed version of exp/types is now
exp/types/staging.

First step toward replacing exp/types with
exp/types/staging.

R=iant
CC=golang-dev
https://golang.org/cl/6819071
This commit is contained in:
Robert Griesemer 2012-11-01 15:25:51 -07:00
parent 538a58bb75
commit 98133ac03a
13 changed files with 2 additions and 2943 deletions

View File

@ -6,7 +6,7 @@ package main
import (
"errors"
"exp/types"
"exp/types/staging"
"flag"
"fmt"
"go/ast"
@ -176,8 +176,7 @@ func processPackage(fset *token.FileSet, files map[string]*ast.File) {
report(err)
return
}
_, err = types.Check(fset, pkg)
if err != nil {
if err = types.Check(fset, pkg, nil, nil); err != nil {
report(err)
}
}

View File

@ -1,252 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements the Check function, which typechecks a package.
package types
import (
"fmt"
"go/ast"
"go/scanner"
"go/token"
"strconv"
)
const debug = false
type checker struct {
fset *token.FileSet
errors scanner.ErrorList
types map[ast.Expr]Type
}
func (c *checker) errorf(pos token.Pos, format string, args ...interface{}) string {
msg := fmt.Sprintf(format, args...)
c.errors.Add(c.fset.Position(pos), msg)
return msg
}
// collectFields collects struct fields tok = token.STRUCT), interface methods
// (tok = token.INTERFACE), and function arguments/results (tok = token.FUNC).
//
func (c *checker) collectFields(tok token.Token, list *ast.FieldList, cycleOk bool) (fields ObjList, tags []string, isVariadic bool) {
if list != nil {
for _, field := range list.List {
ftype := field.Type
if t, ok := ftype.(*ast.Ellipsis); ok {
ftype = t.Elt
isVariadic = true
}
typ := c.makeType(ftype, cycleOk)
tag := ""
if field.Tag != nil {
assert(field.Tag.Kind == token.STRING)
tag, _ = strconv.Unquote(field.Tag.Value)
}
if len(field.Names) > 0 {
// named fields
for _, name := range field.Names {
obj := name.Obj
obj.Type = typ
fields = append(fields, obj)
if tok == token.STRUCT {
tags = append(tags, tag)
}
}
} else {
// anonymous field
switch tok {
case token.STRUCT:
tags = append(tags, tag)
fallthrough
case token.FUNC:
obj := ast.NewObj(ast.Var, "")
obj.Type = typ
fields = append(fields, obj)
case token.INTERFACE:
utyp := Underlying(typ)
if typ, ok := utyp.(*Interface); ok {
// TODO(gri) This is not good enough. Check for double declarations!
fields = append(fields, typ.Methods...)
} else if _, ok := utyp.(*Bad); !ok {
// if utyp is Bad, don't complain (the root cause was reported before)
c.errorf(ftype.Pos(), "interface contains embedded non-interface type")
}
default:
panic("unreachable")
}
}
}
}
return
}
// makeType makes a new type for an AST type specification x or returns
// the type referred to by a type name x. If cycleOk is set, a type may
// refer to itself directly or indirectly; otherwise cycles are errors.
//
func (c *checker) makeType(x ast.Expr, cycleOk bool) (typ Type) {
if debug {
fmt.Printf("makeType (cycleOk = %v)\n", cycleOk)
ast.Print(c.fset, x)
defer func() {
fmt.Printf("-> %T %v\n\n", typ, typ)
}()
}
switch t := x.(type) {
case *ast.BadExpr:
return &Bad{}
case *ast.Ident:
// type name
obj := t.Obj
if obj == nil {
// unresolved identifier (error has been reported before)
return &Bad{Msg: fmt.Sprintf("%s is unresolved", t.Name)}
}
if obj.Kind != ast.Typ {
msg := c.errorf(t.Pos(), "%s is not a type", t.Name)
return &Bad{Msg: msg}
}
c.checkObj(obj, cycleOk)
if !cycleOk && obj.Type.(*Name).Underlying == nil {
msg := c.errorf(obj.Pos(), "illegal cycle in declaration of %s", obj.Name)
return &Bad{Msg: msg}
}
return obj.Type.(Type)
case *ast.ParenExpr:
return c.makeType(t.X, cycleOk)
case *ast.SelectorExpr:
// qualified identifier
// TODO (gri) eventually, this code belongs to expression
// type checking - here for the time being
if ident, ok := t.X.(*ast.Ident); ok {
if obj := ident.Obj; obj != nil {
if obj.Kind != ast.Pkg {
msg := c.errorf(ident.Pos(), "%s is not a package", obj.Name)
return &Bad{Msg: msg}
}
// TODO(gri) we have a package name but don't
// have the mapping from package name to package
// scope anymore (created in ast.NewPackage).
return &Bad{} // for now
}
}
// TODO(gri) can this really happen (the parser should have excluded this)?
msg := c.errorf(t.Pos(), "expected qualified identifier")
return &Bad{Msg: msg}
case *ast.StarExpr:
return &Pointer{Base: c.makeType(t.X, true)}
case *ast.ArrayType:
if t.Len != nil {
// TODO(gri) compute length
return &Array{Elt: c.makeType(t.Elt, cycleOk)}
}
return &Slice{Elt: c.makeType(t.Elt, true)}
case *ast.StructType:
fields, tags, _ := c.collectFields(token.STRUCT, t.Fields, cycleOk)
return &Struct{Fields: fields, Tags: tags}
case *ast.FuncType:
params, _, isVariadic := c.collectFields(token.FUNC, t.Params, true)
results, _, _ := c.collectFields(token.FUNC, t.Results, true)
return &Func{Recv: nil, Params: params, Results: results, IsVariadic: isVariadic}
case *ast.InterfaceType:
methods, _, _ := c.collectFields(token.INTERFACE, t.Methods, cycleOk)
methods.Sort()
return &Interface{Methods: methods}
case *ast.MapType:
return &Map{Key: c.makeType(t.Key, true), Elt: c.makeType(t.Value, true)}
case *ast.ChanType:
return &Chan{Dir: t.Dir, Elt: c.makeType(t.Value, true)}
}
panic(fmt.Sprintf("unreachable (%T)", x))
}
// checkObj type checks an object.
func (c *checker) checkObj(obj *ast.Object, ref bool) {
if obj.Type != nil {
// object has already been type checked
return
}
switch obj.Kind {
case ast.Bad:
// ignore
case ast.Con:
// TODO(gri) complete this
case ast.Typ:
typ := &Name{Obj: obj}
obj.Type = typ // "mark" object so recursion terminates
typ.Underlying = Underlying(c.makeType(obj.Decl.(*ast.TypeSpec).Type, ref))
case ast.Var:
// TODO(gri) complete this
case ast.Fun:
fdecl := obj.Decl.(*ast.FuncDecl)
ftyp := c.makeType(fdecl.Type, ref).(*Func)
obj.Type = ftyp
if fdecl.Recv != nil {
recvField := fdecl.Recv.List[0]
if len(recvField.Names) > 0 {
ftyp.Recv = recvField.Names[0].Obj
} else {
ftyp.Recv = ast.NewObj(ast.Var, "_")
ftyp.Recv.Decl = recvField
}
c.checkObj(ftyp.Recv, ref)
// TODO(axw) add method to a list in the receiver type.
}
// TODO(axw) check function body, if non-nil.
default:
panic("unreachable")
}
}
// Check typechecks a package.
// It augments the AST by assigning types to all ast.Objects and returns a map
// of types for all expression nodes in statements, and a scanner.ErrorList if
// there are errors.
//
func Check(fset *token.FileSet, pkg *ast.Package) (types map[ast.Expr]Type, err error) {
// Sort objects so that we get reproducible error
// positions (this is only needed for testing).
// TODO(gri): Consider ast.Scope implementation that
// provides both a list and a map for fast lookup.
// Would permit the use of scopes instead of ObjMaps
// elsewhere.
list := make(ObjList, len(pkg.Scope.Objects))
i := 0
for _, obj := range pkg.Scope.Objects {
list[i] = obj
i++
}
list.Sort()
var c checker
c.fset = fset
c.types = make(map[ast.Expr]Type)
for _, obj := range list {
c.checkObj(obj, false)
}
c.errors.RemoveMultiples()
return c.types, c.errors.Err()
}

View File

@ -1,217 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements a typechecker test harness. The packages specified
// in tests are typechecked. Error messages reported by the typechecker are
// compared against the error messages expected in the test files.
//
// Expected errors are indicated in the test files by putting a comment
// of the form /* ERROR "rx" */ immediately following an offending token.
// The harness will verify that an error matching the regular expression
// rx is reported at that source position. Consecutive comments may be
// used to indicate multiple errors for the same token position.
//
// For instance, the following test file indicates that a "not declared"
// error should be reported for the undeclared variable x:
//
// package p
// func f() {
// _ = x /* ERROR "not declared" */ + 1
// }
package types
import (
"fmt"
"go/ast"
"go/parser"
"go/scanner"
"go/token"
"io/ioutil"
"os"
"regexp"
"testing"
)
// The test filenames do not end in .go so that they are invisible
// to gofmt since they contain comments that must not change their
// positions relative to surrounding tokens.
var tests = []struct {
name string
files []string
}{
{"test0", []string{"testdata/test0.src"}},
}
var fset = token.NewFileSet()
func getFile(filename string) (file *token.File) {
fset.Iterate(func(f *token.File) bool {
if f.Name() == filename {
file = f
return false // end iteration
}
return true
})
return file
}
func getPos(filename string, offset int) token.Pos {
if f := getFile(filename); f != nil {
return f.Pos(offset)
}
return token.NoPos
}
func parseFiles(t *testing.T, testname string, filenames []string) (map[string]*ast.File, error) {
files := make(map[string]*ast.File)
var errors scanner.ErrorList
for _, filename := range filenames {
if _, exists := files[filename]; exists {
t.Fatalf("%s: duplicate file %s", testname, filename)
}
file, err := parser.ParseFile(fset, filename, nil, parser.DeclarationErrors)
if file == nil {
t.Fatalf("%s: could not parse file %s", testname, filename)
}
files[filename] = file
if err != nil {
// if the parser returns a non-scanner.ErrorList error
// the file couldn't be read in the first place and
// file == nil; in that case we shouldn't reach here
errors = append(errors, err.(scanner.ErrorList)...)
}
}
return files, errors
}
// ERROR comments must be of the form /* ERROR "rx" */ and rx is
// a regular expression that matches the expected error message.
//
var errRx = regexp.MustCompile(`^/\* *ERROR *"([^"]*)" *\*/$`)
// expectedErrors collects the regular expressions of ERROR comments found
// in files and returns them as a map of error positions to error messages.
//
func expectedErrors(t *testing.T, testname string, files map[string]*ast.File) map[token.Pos]string {
errors := make(map[token.Pos]string)
for filename := range files {
src, err := ioutil.ReadFile(filename)
if err != nil {
t.Fatalf("%s: could not read %s", testname, filename)
}
var s scanner.Scanner
// file was parsed already - do not add it again to the file
// set otherwise the position information returned here will
// not match the position information collected by the parser
s.Init(getFile(filename), src, nil, scanner.ScanComments)
var prev token.Pos // position of last non-comment, non-semicolon token
scanFile:
for {
pos, tok, lit := s.Scan()
switch tok {
case token.EOF:
break scanFile
case token.COMMENT:
s := errRx.FindStringSubmatch(lit)
if len(s) == 2 {
errors[prev] = string(s[1])
}
case token.SEMICOLON:
// ignore automatically inserted semicolon
if lit == "\n" {
break
}
fallthrough
default:
prev = pos
}
}
}
return errors
}
func eliminate(t *testing.T, expected map[token.Pos]string, errors error) {
if errors == nil {
return
}
for _, error := range errors.(scanner.ErrorList) {
// error.Pos is a token.Position, but we want
// a token.Pos so we can do a map lookup
pos := getPos(error.Pos.Filename, error.Pos.Offset)
if msg, found := expected[pos]; found {
// we expect a message at pos; check if it matches
rx, err := regexp.Compile(msg)
if err != nil {
t.Errorf("%s: %v", error.Pos, err)
continue
}
if match := rx.MatchString(error.Msg); !match {
t.Errorf("%s: %q does not match %q", error.Pos, error.Msg, msg)
continue
}
// we have a match - eliminate this error
delete(expected, pos)
} else {
// To keep in mind when analyzing failed test output:
// If the same error position occurs multiple times in errors,
// this message will be triggered (because the first error at
// the position removes this position from the expected errors).
t.Errorf("%s: no (multiple?) error expected, but found: %s", error.Pos, error.Msg)
}
}
}
func check(t *testing.T, testname string, testfiles []string) {
// TODO(gri) Eventually all these different phases should be
// subsumed into a single function call that takes
// a set of files and creates a fully resolved and
// type-checked AST.
files, err := parseFiles(t, testname, testfiles)
// we are expecting the following errors
// (collect these after parsing the files so that
// they are found in the file set)
errors := expectedErrors(t, testname, files)
// verify errors returned by the parser
eliminate(t, errors, err)
// verify errors returned after resolving identifiers
pkg, err := ast.NewPackage(fset, files, GcImport, Universe)
eliminate(t, errors, err)
// verify errors returned by the typechecker
_, err = Check(fset, pkg)
eliminate(t, errors, err)
// there should be no expected errors left
if len(errors) > 0 {
t.Errorf("%s: %d errors not reported:", testname, len(errors))
for pos, msg := range errors {
t.Errorf("%s: %s\n", fset.Position(pos), msg)
}
}
}
func TestCheck(t *testing.T) {
// For easy debugging w/o changing the testing code,
// if there is a local test file, only test that file.
const testfile = "testdata/test.go"
if fi, err := os.Stat(testfile); err == nil && !fi.IsDir() {
fmt.Printf("WARNING: Testing only %s (remove it to run all tests)\n", testfile)
check(t, testfile, []string{testfile})
return
}
// Otherwise, run all the tests.
for _, test := range tests {
check(t, test.name, test.files)
}
}

View File

@ -1,332 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements operations on ideal constants.
package types
import (
"go/token"
"math/big"
"strconv"
)
// TODO(gri) Consider changing the API so Const is an interface
// and operations on consts don't have to type switch.
// A Const implements an ideal constant Value.
// The zero value z for a Const is not a valid constant value.
type Const struct {
// representation of constant values:
// ideal bool -> bool
// ideal int -> *big.Int
// ideal float -> *big.Rat
// ideal complex -> cmplx
// ideal string -> string
val interface{}
}
// Representation of complex values.
type cmplx struct {
re, im *big.Rat
}
func assert(cond bool) {
if !cond {
panic("go/types internal error: assertion failed")
}
}
// MakeConst makes an ideal constant from a literal
// token and the corresponding literal string.
func MakeConst(tok token.Token, lit string) Const {
switch tok {
case token.INT:
var x big.Int
_, ok := x.SetString(lit, 0)
assert(ok)
return Const{&x}
case token.FLOAT:
var y big.Rat
_, ok := y.SetString(lit)
assert(ok)
return Const{&y}
case token.IMAG:
assert(lit[len(lit)-1] == 'i')
var im big.Rat
_, ok := im.SetString(lit[0 : len(lit)-1])
assert(ok)
return Const{cmplx{big.NewRat(0, 1), &im}}
case token.CHAR:
assert(lit[0] == '\'' && lit[len(lit)-1] == '\'')
code, _, _, err := strconv.UnquoteChar(lit[1:len(lit)-1], '\'')
assert(err == nil)
return Const{big.NewInt(int64(code))}
case token.STRING:
s, err := strconv.Unquote(lit)
assert(err == nil)
return Const{s}
}
panic("unreachable")
}
// MakeZero returns the zero constant for the given type.
func MakeZero(typ *Type) Const {
// TODO(gri) fix this
return Const{0}
}
// Match attempts to match the internal constant representations of x and y.
// If the attempt is successful, the result is the values of x and y,
// if necessary converted to have the same internal representation; otherwise
// the results are invalid.
func (x Const) Match(y Const) (u, v Const) {
switch a := x.val.(type) {
case bool:
if _, ok := y.val.(bool); ok {
u, v = x, y
}
case *big.Int:
switch y.val.(type) {
case *big.Int:
u, v = x, y
case *big.Rat:
var z big.Rat
z.SetInt(a)
u, v = Const{&z}, y
case cmplx:
var z big.Rat
z.SetInt(a)
u, v = Const{cmplx{&z, big.NewRat(0, 1)}}, y
}
case *big.Rat:
switch y.val.(type) {
case *big.Int:
v, u = y.Match(x)
case *big.Rat:
u, v = x, y
case cmplx:
u, v = Const{cmplx{a, big.NewRat(0, 0)}}, y
}
case cmplx:
switch y.val.(type) {
case *big.Int, *big.Rat:
v, u = y.Match(x)
case cmplx:
u, v = x, y
}
case string:
if _, ok := y.val.(string); ok {
u, v = x, y
}
default:
panic("unreachable")
}
return
}
// Convert attempts to convert the constant x to a given type.
// If the attempt is successful, the result is the new constant;
// otherwise the result is invalid.
func (x Const) Convert(typ *Type) Const {
// TODO(gri) implement this
switch x.val.(type) {
case bool:
case *big.Int:
case *big.Rat:
case cmplx:
case string:
}
return x
}
func (x Const) String() string {
switch x := x.val.(type) {
case bool:
if x {
return "true"
}
return "false"
case *big.Int:
return x.String()
case *big.Rat:
return x.FloatString(10) // 10 digits of precision after decimal point seems fine
case cmplx:
// TODO(gri) don't print 0 components
return x.re.FloatString(10) + " + " + x.im.FloatString(10) + "i"
case string:
return x
}
panic("unreachable")
}
func (x Const) UnaryOp(op token.Token) Const {
panic("unimplemented")
}
func (x Const) BinaryOp(op token.Token, y Const) Const {
var z interface{}
switch x := x.val.(type) {
case bool:
z = binaryBoolOp(x, op, y.val.(bool))
case *big.Int:
z = binaryIntOp(x, op, y.val.(*big.Int))
case *big.Rat:
z = binaryFloatOp(x, op, y.val.(*big.Rat))
case cmplx:
z = binaryCmplxOp(x, op, y.val.(cmplx))
case string:
z = binaryStringOp(x, op, y.val.(string))
default:
panic("unreachable")
}
return Const{z}
}
func binaryBoolOp(x bool, op token.Token, y bool) interface{} {
switch op {
case token.EQL:
return x == y
case token.NEQ:
return x != y
}
panic("unreachable")
}
func binaryIntOp(x *big.Int, op token.Token, y *big.Int) interface{} {
var z big.Int
switch op {
case token.ADD:
return z.Add(x, y)
case token.SUB:
return z.Sub(x, y)
case token.MUL:
return z.Mul(x, y)
case token.QUO:
return z.Quo(x, y)
case token.REM:
return z.Rem(x, y)
case token.AND:
return z.And(x, y)
case token.OR:
return z.Or(x, y)
case token.XOR:
return z.Xor(x, y)
case token.AND_NOT:
return z.AndNot(x, y)
case token.SHL:
panic("unimplemented")
case token.SHR:
panic("unimplemented")
case token.EQL:
return x.Cmp(y) == 0
case token.NEQ:
return x.Cmp(y) != 0
case token.LSS:
return x.Cmp(y) < 0
case token.LEQ:
return x.Cmp(y) <= 0
case token.GTR:
return x.Cmp(y) > 0
case token.GEQ:
return x.Cmp(y) >= 0
}
panic("unreachable")
}
func binaryFloatOp(x *big.Rat, op token.Token, y *big.Rat) interface{} {
var z big.Rat
switch op {
case token.ADD:
return z.Add(x, y)
case token.SUB:
return z.Sub(x, y)
case token.MUL:
return z.Mul(x, y)
case token.QUO:
return z.Quo(x, y)
case token.EQL:
return x.Cmp(y) == 0
case token.NEQ:
return x.Cmp(y) != 0
case token.LSS:
return x.Cmp(y) < 0
case token.LEQ:
return x.Cmp(y) <= 0
case token.GTR:
return x.Cmp(y) > 0
case token.GEQ:
return x.Cmp(y) >= 0
}
panic("unreachable")
}
func binaryCmplxOp(x cmplx, op token.Token, y cmplx) interface{} {
a, b := x.re, x.im
c, d := y.re, y.im
switch op {
case token.ADD:
// (a+c) + i(b+d)
var re, im big.Rat
re.Add(a, c)
im.Add(b, d)
return cmplx{&re, &im}
case token.SUB:
// (a-c) + i(b-d)
var re, im big.Rat
re.Sub(a, c)
im.Sub(b, d)
return cmplx{&re, &im}
case token.MUL:
// (ac-bd) + i(bc+ad)
var ac, bd, bc, ad big.Rat
ac.Mul(a, c)
bd.Mul(b, d)
bc.Mul(b, c)
ad.Mul(a, d)
var re, im big.Rat
re.Sub(&ac, &bd)
im.Add(&bc, &ad)
return cmplx{&re, &im}
case token.QUO:
// (ac+bd)/s + i(bc-ad)/s, with s = cc + dd
var ac, bd, bc, ad, s big.Rat
ac.Mul(a, c)
bd.Mul(b, d)
bc.Mul(b, c)
ad.Mul(a, d)
s.Add(c.Mul(c, c), d.Mul(d, d))
var re, im big.Rat
re.Add(&ac, &bd)
re.Quo(&re, &s)
im.Sub(&bc, &ad)
im.Quo(&im, &s)
return cmplx{&re, &im}
case token.EQL:
return a.Cmp(c) == 0 && b.Cmp(d) == 0
case token.NEQ:
return a.Cmp(c) != 0 || b.Cmp(d) != 0
}
panic("unreachable")
}
func binaryStringOp(x string, op token.Token, y string) interface{} {
switch op {
case token.ADD:
return x + y
case token.EQL:
return x == y
case token.NEQ:
return x != y
case token.LSS:
return x < y
case token.LEQ:
return x <= y
case token.GTR:
return x > y
case token.GEQ:
return x >= y
}
panic("unreachable")
}

View File

@ -1,111 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements FindGcExportData.
package types
import (
"bufio"
"errors"
"fmt"
"io"
"strconv"
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
if err != nil {
return
}
// leave for debugging
if false {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = errors.New("invalid archive header")
return
}
name = strings.TrimSpace(string(hdr[:16]))
return
}
// FindGcExportData positions the reader r at the beginning of the
// export data section of an underlying GC-created object/archive
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function.
//
func FindGcExportData(r *bufio.Reader) (err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
return
}
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF, which should
// be second archive entry.
var name string
var size int
// First entry should be __.GOSYMDEF.
// Older archives used __.SYMDEF, so allow that too.
// Read and discard.
if name, size, err = readGopackHeader(r); err != nil {
return
}
if name != "__.SYMDEF" && name != "__.GOSYMDEF" {
err = errors.New("go archive does not begin with __.SYMDEF or __.GOSYMDEF")
return
}
const block = 4096
tmp := make([]byte, block)
for size > 0 {
n := size
if n > block {
n = block
}
if _, err = io.ReadFull(r, tmp[:n]); err != nil {
return
}
size -= n
}
// Second entry should be __.PKGDEF.
if name, size, err = readGopackHeader(r); err != nil {
return
}
if name != "__.PKGDEF" {
err = errors.New("go archive is missing __.PKGDEF")
return
}
// Read first line of __.PKGDEF data, so that line
// is once again the first line of the input.
if line, err = r.ReadSlice('\n'); err != nil {
return
}
}
// Now at __.PKGDEF in archive or still at beginning of file.
// Either way, line should begin with "go object ".
if !strings.HasPrefix(string(line), "go object ") {
err = errors.New("not a go object file")
return
}
// Skip over object header to export data.
// Begins after first line with $$.
for line[0] != '$' {
if line, err = r.ReadSlice('\n'); err != nil {
return
}
}
return
}

View File

@ -1,881 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements an ast.Importer for gc-generated object files.
// TODO(gri) Eventually move this into a separate package outside types.
package types
import (
"bufio"
"errors"
"fmt"
"go/ast"
"go/build"
"go/token"
"io"
"math/big"
"os"
"path/filepath"
"strconv"
"strings"
"text/scanner"
)
const trace = false // set to true for debugging
var pkgExts = [...]string{".a", ".5", ".6", ".8"}
// FindPkg returns the filename and unique package id for an import
// path based on package information provided by build.Import (using
// the build.Default build.Context).
// If no file was found, an empty filename is returned.
//
func FindPkg(path, srcDir string) (filename, id string) {
if len(path) == 0 {
return
}
id = path
var noext string
switch {
default:
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
// Don't require the source files to be present.
bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
if bp.PkgObj == "" {
return
}
noext = bp.PkgObj
if strings.HasSuffix(noext, ".a") {
noext = noext[:len(noext)-len(".a")]
}
case build.IsLocalImport(path):
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
noext = filepath.Join(srcDir, path)
id = noext
case filepath.IsAbs(path):
// for completeness only - go/build.Import
// does not support absolute imports
// "/x" -> "/x.ext", "/x"
noext = path
}
// try extensions
for _, ext := range pkgExts {
filename = noext + ext
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
return
}
}
filename = "" // not found
return
}
// GcImportData imports a package by reading the gc-generated export data,
// adds the corresponding package object to the imports map indexed by id,
// and returns the object.
//
// The imports map must contains all packages already imported, and no map
// entry with id as the key must be present. The data reader position must
// be the beginning of the export data section. The filename is only used
// in error messages.
//
func GcImportData(imports map[string]*ast.Object, filename, id string, data *bufio.Reader) (pkg *ast.Object, err error) {
if trace {
fmt.Printf("importing %s (%s)\n", id, filename)
}
// support for gcParser error handling
defer func() {
if r := recover(); r != nil {
err = r.(importError) // will re-panic if r is not an importError
}
}()
var p gcParser
p.init(filename, id, data, imports)
pkg = p.parseExport()
return
}
// GcImport imports a gc-generated package given its import path, adds the
// corresponding package object to the imports map, and returns the object.
// Local import paths are interpreted relative to the current working directory.
// The imports map must contains all packages already imported.
// GcImport satisfies the ast.Importer signature.
//
func GcImport(imports map[string]*ast.Object, path string) (pkg *ast.Object, err error) {
if path == "unsafe" {
return Unsafe, nil
}
srcDir, err := os.Getwd()
if err != nil {
return
}
filename, id := FindPkg(path, srcDir)
if filename == "" {
err = errors.New("can't find import: " + id)
return
}
// Note: imports[id] may already contain a partially imported package.
// We must continue doing the full import here since we don't
// know if something is missing.
// TODO: There's no need to re-import a package if we know that we
// have done a full import before. At the moment we cannot
// tell from the available information in this function alone.
// open file
f, err := os.Open(filename)
if err != nil {
return
}
defer func() {
f.Close()
if err != nil {
// Add file name to error.
err = fmt.Errorf("reading export data: %s: %v", filename, err)
}
}()
buf := bufio.NewReader(f)
if err = FindGcExportData(buf); err != nil {
return
}
pkg, err = GcImportData(imports, filename, id, buf)
return
}
// ----------------------------------------------------------------------------
// gcParser
// gcParser parses the exports inside a gc compiler-produced
// object/archive file and populates its scope with the results.
type gcParser struct {
scanner scanner.Scanner
tok rune // current token
lit string // literal string; only valid for Ident, Int, String tokens
id string // package id of imported package
imports map[string]*ast.Object // package id -> package object
}
func (p *gcParser) init(filename, id string, src io.Reader, imports map[string]*ast.Object) {
p.scanner.Init(src)
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
p.scanner.Whitespace = 1<<'\t' | 1<<' '
p.scanner.Filename = filename // for good error messages
p.next()
p.id = id
p.imports = imports
}
func (p *gcParser) next() {
p.tok = p.scanner.Scan()
switch p.tok {
case scanner.Ident, scanner.Int, scanner.String, '·':
p.lit = p.scanner.TokenText()
default:
p.lit = ""
}
if trace {
fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
}
}
// Declare inserts a named object of the given kind in scope.
func (p *gcParser) declare(scope *ast.Scope, kind ast.ObjKind, name string) *ast.Object {
// the object may have been imported before - if it exists
// already in the respective package scope, return that object
if obj := scope.Lookup(name); obj != nil {
assert(obj.Kind == kind)
return obj
}
// otherwise create a new object and insert it into the package scope
obj := ast.NewObj(kind, name)
if scope.Insert(obj) != nil {
p.errorf("already declared: %v %s", kind, obj.Name)
}
// a new type object is a named type and may be referred
// to before the underlying type is known - set it up
if kind == ast.Typ {
obj.Type = &Name{Obj: obj}
}
return obj
}
// ----------------------------------------------------------------------------
// Error handling
// Internal errors are boxed as importErrors.
type importError struct {
pos scanner.Position
err error
}
func (e importError) Error() string {
return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
}
func (p *gcParser) error(err interface{}) {
if s, ok := err.(string); ok {
err = errors.New(s)
}
// panic with a runtime.Error if err is not an error
panic(importError{p.scanner.Pos(), err.(error)})
}
func (p *gcParser) errorf(format string, args ...interface{}) {
p.error(fmt.Sprintf(format, args...))
}
func (p *gcParser) expect(tok rune) string {
lit := p.lit
if p.tok != tok {
panic(1)
p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
}
p.next()
return lit
}
func (p *gcParser) expectSpecial(tok string) {
sep := 'x' // not white space
i := 0
for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
p.next()
i++
}
if i < len(tok) {
p.errorf("expected %q, got %q", tok, tok[0:i])
}
}
func (p *gcParser) expectKeyword(keyword string) {
lit := p.expect(scanner.Ident)
if lit != keyword {
p.errorf("expected keyword %s, got %q", keyword, lit)
}
}
// ----------------------------------------------------------------------------
// Import declarations
// ImportPath = string_lit .
//
func (p *gcParser) parsePkgId() *ast.Object {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
p.error(err)
}
switch id {
case "":
// id == "" stands for the imported package id
// (only known at time of package installation)
id = p.id
case "unsafe":
// package unsafe is not in the imports map - handle explicitly
return Unsafe
}
pkg := p.imports[id]
if pkg == nil {
pkg = ast.NewObj(ast.Pkg, "")
pkg.Data = ast.NewScope(nil)
p.imports[id] = pkg
}
return pkg
}
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
func (p *gcParser) parseDotIdent() string {
ident := ""
if p.tok != scanner.Int {
sep := 'x' // not white space
for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
ident += p.lit
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
p.next()
}
}
if ident == "" {
p.expect(scanner.Ident) // use expect() for error handling
}
return ident
}
// ExportedName = "@" ImportPath "." dotIdentifier .
//
func (p *gcParser) parseExportedName() (*ast.Object, string) {
p.expect('@')
pkg := p.parsePkgId()
p.expect('.')
name := p.parseDotIdent()
return pkg, name
}
// ----------------------------------------------------------------------------
// Types
// BasicType = identifier .
//
func (p *gcParser) parseBasicType() Type {
id := p.expect(scanner.Ident)
obj := Universe.Lookup(id)
if obj == nil || obj.Kind != ast.Typ {
p.errorf("not a basic type: %s", id)
}
return obj.Type.(Type)
}
// ArrayType = "[" int_lit "]" Type .
//
func (p *gcParser) parseArrayType() Type {
// "[" already consumed and lookahead known not to be "]"
lit := p.expect(scanner.Int)
p.expect(']')
elt := p.parseType()
n, err := strconv.ParseUint(lit, 10, 64)
if err != nil {
p.error(err)
}
return &Array{Len: n, Elt: elt}
}
// MapType = "map" "[" Type "]" Type .
//
func (p *gcParser) parseMapType() Type {
p.expectKeyword("map")
p.expect('[')
key := p.parseType()
p.expect(']')
elt := p.parseType()
return &Map{Key: key, Elt: elt}
}
// Name = identifier | "?" | ExportedName .
//
func (p *gcParser) parseName() (name string) {
switch p.tok {
case scanner.Ident:
name = p.lit
p.next()
case '?':
// anonymous
p.next()
case '@':
// exported name prefixed with package path
_, name = p.parseExportedName()
default:
p.error("name expected")
}
return
}
// Field = Name Type [ string_lit ] .
//
func (p *gcParser) parseField() (fld *ast.Object, tag string) {
name := p.parseName()
ftyp := p.parseType()
if name == "" {
// anonymous field - ftyp must be T or *T and T must be a type name
if _, ok := Deref(ftyp).(*Name); !ok {
p.errorf("anonymous field expected")
}
}
if p.tok == scanner.String {
tag = p.expect(scanner.String)
}
fld = ast.NewObj(ast.Var, name)
fld.Type = ftyp
return
}
// StructType = "struct" "{" [ FieldList ] "}" .
// FieldList = Field { ";" Field } .
//
func (p *gcParser) parseStructType() Type {
var fields []*ast.Object
var tags []string
parseField := func() {
fld, tag := p.parseField()
fields = append(fields, fld)
tags = append(tags, tag)
}
p.expectKeyword("struct")
p.expect('{')
if p.tok != '}' {
parseField()
for p.tok == ';' {
p.next()
parseField()
}
}
p.expect('}')
return &Struct{Fields: fields, Tags: tags}
}
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
//
func (p *gcParser) parseParameter() (par *ast.Object, isVariadic bool) {
name := p.parseName()
if name == "" {
name = "_" // cannot access unnamed identifiers
}
if p.tok == '.' {
p.expectSpecial("...")
isVariadic = true
}
ptyp := p.parseType()
// ignore argument tag
if p.tok == scanner.String {
p.expect(scanner.String)
}
par = ast.NewObj(ast.Var, name)
par.Type = ptyp
return
}
// Parameters = "(" [ ParameterList ] ")" .
// ParameterList = { Parameter "," } Parameter .
//
func (p *gcParser) parseParameters() (list []*ast.Object, isVariadic bool) {
parseParameter := func() {
par, variadic := p.parseParameter()
list = append(list, par)
if variadic {
if isVariadic {
p.error("... not on final argument")
}
isVariadic = true
}
}
p.expect('(')
if p.tok != ')' {
parseParameter()
for p.tok == ',' {
p.next()
parseParameter()
}
}
p.expect(')')
return
}
// Signature = Parameters [ Result ] .
// Result = Type | Parameters .
//
func (p *gcParser) parseSignature() *Func {
params, isVariadic := p.parseParameters()
// optional result type
var results []*ast.Object
switch p.tok {
case scanner.Ident, '[', '*', '<', '@':
// single, unnamed result
result := ast.NewObj(ast.Var, "_")
result.Type = p.parseType()
results = []*ast.Object{result}
case '(':
// named or multiple result(s)
var variadic bool
results, variadic = p.parseParameters()
if variadic {
p.error("... not permitted on result type")
}
}
return &Func{Params: params, Results: results, IsVariadic: isVariadic}
}
// InterfaceType = "interface" "{" [ MethodList ] "}" .
// MethodList = Method { ";" Method } .
// Method = Name Signature .
//
// (The methods of embedded interfaces are always "inlined"
// by the compiler and thus embedded interfaces are never
// visible in the export data.)
//
func (p *gcParser) parseInterfaceType() Type {
var methods ObjList
parseMethod := func() {
obj := ast.NewObj(ast.Fun, p.parseName())
obj.Type = p.parseSignature()
methods = append(methods, obj)
}
p.expectKeyword("interface")
p.expect('{')
if p.tok != '}' {
parseMethod()
for p.tok == ';' {
p.next()
parseMethod()
}
}
p.expect('}')
methods.Sort()
return &Interface{Methods: methods}
}
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
//
func (p *gcParser) parseChanType() Type {
dir := ast.SEND | ast.RECV
if p.tok == scanner.Ident {
p.expectKeyword("chan")
if p.tok == '<' {
p.expectSpecial("<-")
dir = ast.SEND
}
} else {
p.expectSpecial("<-")
p.expectKeyword("chan")
dir = ast.RECV
}
elt := p.parseType()
return &Chan{Dir: dir, Elt: elt}
}
// Type =
// BasicType | TypeName | ArrayType | SliceType | StructType |
// PointerType | FuncType | InterfaceType | MapType | ChanType |
// "(" Type ")" .
// BasicType = ident .
// TypeName = ExportedName .
// SliceType = "[" "]" Type .
// PointerType = "*" Type .
// FuncType = "func" Signature .
//
func (p *gcParser) parseType() Type {
switch p.tok {
case scanner.Ident:
switch p.lit {
default:
return p.parseBasicType()
case "struct":
return p.parseStructType()
case "func":
// FuncType
p.next()
return p.parseSignature()
case "interface":
return p.parseInterfaceType()
case "map":
return p.parseMapType()
case "chan":
return p.parseChanType()
}
case '@':
// TypeName
pkg, name := p.parseExportedName()
return p.declare(pkg.Data.(*ast.Scope), ast.Typ, name).Type.(Type)
case '[':
p.next() // look ahead
if p.tok == ']' {
// SliceType
p.next()
return &Slice{Elt: p.parseType()}
}
return p.parseArrayType()
case '*':
// PointerType
p.next()
return &Pointer{Base: p.parseType()}
case '<':
return p.parseChanType()
case '(':
// "(" Type ")"
p.next()
typ := p.parseType()
p.expect(')')
return typ
}
p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
return nil
}
// ----------------------------------------------------------------------------
// Declarations
// ImportDecl = "import" identifier string_lit .
//
func (p *gcParser) parseImportDecl() {
p.expectKeyword("import")
// The identifier has no semantic meaning in the import data.
// It exists so that error messages can print the real package
// name: binary.ByteOrder instead of "encoding/binary".ByteOrder.
name := p.expect(scanner.Ident)
pkg := p.parsePkgId()
assert(pkg.Name == "" || pkg.Name == name)
pkg.Name = name
}
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
//
func (p *gcParser) parseInt() (sign, val string) {
switch p.tok {
case '-':
p.next()
sign = "-"
case '+':
p.next()
}
val = p.expect(scanner.Int)
return
}
// number = int_lit [ "p" int_lit ] .
//
func (p *gcParser) parseNumber() Const {
// mantissa
sign, val := p.parseInt()
mant, ok := new(big.Int).SetString(sign+val, 0)
assert(ok)
if p.lit == "p" {
// exponent (base 2)
p.next()
sign, val = p.parseInt()
exp64, err := strconv.ParseUint(val, 10, 0)
if err != nil {
p.error(err)
}
exp := uint(exp64)
if sign == "-" {
denom := big.NewInt(1)
denom.Lsh(denom, exp)
return Const{new(big.Rat).SetFrac(mant, denom)}
}
if exp > 0 {
mant.Lsh(mant, exp)
}
return Const{new(big.Rat).SetInt(mant)}
}
return Const{mant}
}
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
// Literal = bool_lit | int_lit | float_lit | complex_lit | string_lit .
// bool_lit = "true" | "false" .
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
// rune_lit = "(" int_lit "+" int_lit ")" .
// string_lit = `"` { unicode_char } `"` .
//
func (p *gcParser) parseConstDecl() {
p.expectKeyword("const")
pkg, name := p.parseExportedName()
obj := p.declare(pkg.Data.(*ast.Scope), ast.Con, name)
var x Const
var typ Type
if p.tok != '=' {
obj.Type = p.parseType()
}
p.expect('=')
switch p.tok {
case scanner.Ident:
// bool_lit
if p.lit != "true" && p.lit != "false" {
p.error("expected true or false")
}
x = Const{p.lit == "true"}
typ = Bool.Underlying
p.next()
case '-', scanner.Int:
// int_lit
x = p.parseNumber()
typ = Int.Underlying
if _, ok := x.val.(*big.Rat); ok {
typ = Float64.Underlying
}
case '(':
// complex_lit or rune_lit
p.next()
if p.tok == scanner.Char {
p.next()
p.expect('+')
p.parseNumber()
p.expect(')')
// TODO: x = ...
break
}
re := p.parseNumber()
p.expect('+')
im := p.parseNumber()
p.expectKeyword("i")
p.expect(')')
x = Const{cmplx{re.val.(*big.Rat), im.val.(*big.Rat)}}
typ = Complex128.Underlying
case scanner.Char:
// TODO: x = ...
p.next()
case scanner.String:
// string_lit
x = MakeConst(token.STRING, p.lit)
p.next()
typ = String.Underlying
default:
p.errorf("expected literal got %s", scanner.TokenString(p.tok))
}
if obj.Type == nil {
obj.Type = typ
}
obj.Data = x
}
// TypeDecl = "type" ExportedName Type .
//
func (p *gcParser) parseTypeDecl() {
p.expectKeyword("type")
pkg, name := p.parseExportedName()
obj := p.declare(pkg.Data.(*ast.Scope), ast.Typ, name)
// The type object may have been imported before and thus already
// have a type associated with it. We still need to parse the type
// structure, but throw it away if the object already has a type.
// This ensures that all imports refer to the same type object for
// a given type declaration.
typ := p.parseType()
if name := obj.Type.(*Name); name.Underlying == nil {
assert(Underlying(typ) == typ)
name.Underlying = typ
}
}
// VarDecl = "var" ExportedName Type .
//
func (p *gcParser) parseVarDecl() {
p.expectKeyword("var")
pkg, name := p.parseExportedName()
obj := p.declare(pkg.Data.(*ast.Scope), ast.Var, name)
obj.Type = p.parseType()
}
// FuncBody = "{" ... "}" .
//
func (p *gcParser) parseFuncBody() {
p.expect('{')
for i := 1; i > 0; p.next() {
switch p.tok {
case '{':
i++
case '}':
i--
}
}
}
// FuncDecl = "func" ExportedName Signature [ FuncBody ] .
//
func (p *gcParser) parseFuncDecl() {
// "func" already consumed
pkg, name := p.parseExportedName()
obj := p.declare(pkg.Data.(*ast.Scope), ast.Fun, name)
obj.Type = p.parseSignature()
if p.tok == '{' {
p.parseFuncBody()
}
}
// MethodDecl = "func" Receiver Name Signature .
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" [ FuncBody ].
//
func (p *gcParser) parseMethodDecl() {
// "func" already consumed
p.expect('(')
p.parseParameter() // receiver
p.expect(')')
p.parseName() // unexported method names in imports are qualified with their package.
p.parseSignature()
if p.tok == '{' {
p.parseFuncBody()
}
}
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
//
func (p *gcParser) parseDecl() {
switch p.lit {
case "import":
p.parseImportDecl()
case "const":
p.parseConstDecl()
case "type":
p.parseTypeDecl()
case "var":
p.parseVarDecl()
case "func":
p.next() // look ahead
if p.tok == '(' {
p.parseMethodDecl()
} else {
p.parseFuncDecl()
}
}
p.expect('\n')
}
// ----------------------------------------------------------------------------
// Export
// Export = "PackageClause { Decl } "$$" .
// PackageClause = "package" identifier [ "safe" ] "\n" .
//
func (p *gcParser) parseExport() *ast.Object {
p.expectKeyword("package")
name := p.expect(scanner.Ident)
if p.tok != '\n' {
// A package is safe if it was compiled with the -u flag,
// which disables the unsafe package.
// TODO(gri) remember "safe" package
p.expectKeyword("safe")
}
p.expect('\n')
pkg := p.imports[p.id]
if pkg == nil {
pkg = ast.NewObj(ast.Pkg, name)
pkg.Data = ast.NewScope(nil)
p.imports[p.id] = pkg
}
for p.tok != '$' && p.tok != scanner.EOF {
p.parseDecl()
}
if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
// don't call next()/expect() since reading past the
// export data may cause scanner errors (e.g. NUL chars)
p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
}
if n := p.scanner.ErrorCount; n != 0 {
p.errorf("expected no scanner errors, got %d", n)
}
return pkg
}

View File

@ -1,153 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package types
import (
"go/ast"
"go/build"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"time"
)
var gcPath string // Go compiler path
func init() {
// determine compiler
var gc string
switch runtime.GOARCH {
case "386":
gc = "8g"
case "amd64":
gc = "6g"
case "arm":
gc = "5g"
default:
gcPath = "unknown-GOARCH-compiler"
return
}
gcPath = filepath.Join(build.ToolDir, gc)
}
func compile(t *testing.T, dirname, filename string) string {
cmd := exec.Command(gcPath, filename)
cmd.Dir = dirname
out, err := cmd.CombinedOutput()
if err != nil {
t.Logf("%s", out)
t.Fatalf("%s %s failed: %s", gcPath, filename, err)
}
archCh, _ := build.ArchChar(runtime.GOARCH)
// filename should end with ".go"
return filepath.Join(dirname, filename[:len(filename)-2]+archCh)
}
// Use the same global imports map for all tests. The effect is
// as if all tested packages were imported into a single package.
var imports = make(map[string]*ast.Object)
func testPath(t *testing.T, path string) bool {
_, err := GcImport(imports, path)
if err != nil {
t.Errorf("testPath(%s): %s", path, err)
return false
}
return true
}
const maxTime = 3 * time.Second
func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir)
list, err := ioutil.ReadDir(dirname)
if err != nil {
t.Errorf("testDir(%s): %s", dirname, err)
}
for _, f := range list {
if time.Now().After(endTime) {
t.Log("testing time used up")
return
}
switch {
case !f.IsDir():
// try extensions
for _, ext := range pkgExts {
if strings.HasSuffix(f.Name(), ext) {
name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension
if testPath(t, filepath.Join(dir, name)) {
nimports++
}
}
}
case f.IsDir():
nimports += testDir(t, filepath.Join(dir, f.Name()), endTime)
}
}
return
}
func TestGcImport(t *testing.T) {
// On cross-compile builds, the path will not exist.
// Need to use GOHOSTOS, which is not available.
if _, err := os.Stat(gcPath); err != nil {
t.Logf("skipping test: %v", err)
return
}
if outFn := compile(t, "testdata", "exports.go"); outFn != "" {
defer os.Remove(outFn)
}
nimports := 0
if testPath(t, "./testdata/exports") {
nimports++
}
nimports += testDir(t, "", time.Now().Add(maxTime)) // installed packages
t.Logf("tested %d imports", nimports)
}
var importedObjectTests = []struct {
name string
kind ast.ObjKind
typ string
}{
{"unsafe.Pointer", ast.Typ, "Pointer"},
{"math.Pi", ast.Con, "basicType"}, // TODO(gri) need to complete BasicType
{"io.Reader", ast.Typ, "interface{Read(p []byte) (n int, err error)}"},
{"io.ReadWriter", ast.Typ, "interface{Read(p []byte) (n int, err error); Write(p []byte) (n int, err error)}"},
{"math.Sin", ast.Fun, "func(x float64) (_ float64)"},
// TODO(gri) add more tests
}
func TestGcImportedTypes(t *testing.T) {
for _, test := range importedObjectTests {
s := strings.Split(test.name, ".")
if len(s) != 2 {
t.Fatal("inconsistent test data")
}
importPath := s[0]
objName := s[1]
pkg, err := GcImport(imports, importPath)
if err != nil {
t.Error(err)
continue
}
obj := pkg.Data.(*ast.Scope).Lookup(objName)
if obj.Kind != test.kind {
t.Errorf("%s: got kind = %q; want %q", test.name, obj.Kind, test.kind)
}
typ := TypeString(Underlying(obj.Type.(Type)))
if typ != test.typ {
t.Errorf("%s: got type = %q; want %q", test.name, typ, test.typ)
}
}
}

View File

@ -1,130 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package types
import (
"fmt"
"go/ast"
"go/parser"
"go/scanner"
"go/token"
"testing"
)
var sources = []string{
`package p
import "fmt"
import "math"
const pi = math.Pi
func sin(x float64) float64 {
return math.Sin(x)
}
var Println = fmt.Println
`,
`package p
import "fmt"
func f() string {
return fmt.Sprintf("%d", g())
}
`,
`package p
import . "go/parser"
func g() Mode { return ImportsOnly }`,
}
var pkgnames = []string{
"fmt",
"go/parser",
"math",
}
// ResolveQualifiedIdents resolves the selectors of qualified
// identifiers by associating the correct ast.Object with them.
// TODO(gri): Eventually, this functionality should be subsumed
// by Check.
//
func ResolveQualifiedIdents(fset *token.FileSet, pkg *ast.Package) error {
var errors scanner.ErrorList
findObj := func(pkg *ast.Object, name *ast.Ident) *ast.Object {
scope := pkg.Data.(*ast.Scope)
obj := scope.Lookup(name.Name)
if obj == nil {
errors.Add(fset.Position(name.Pos()), fmt.Sprintf("no %s in package %s", name.Name, pkg.Name))
}
return obj
}
ast.Inspect(pkg, func(n ast.Node) bool {
if s, ok := n.(*ast.SelectorExpr); ok {
if x, ok := s.X.(*ast.Ident); ok && x.Obj != nil && x.Obj.Kind == ast.Pkg {
// find selector in respective package
s.Sel.Obj = findObj(x.Obj, s.Sel)
}
return false
}
return true
})
return errors.Err()
}
func TestResolveQualifiedIdents(t *testing.T) {
// parse package files
fset := token.NewFileSet()
files := make(map[string]*ast.File)
for i, src := range sources {
filename := fmt.Sprintf("file%d", i)
f, err := parser.ParseFile(fset, filename, src, parser.DeclarationErrors)
if err != nil {
t.Fatal(err)
}
files[filename] = f
}
// resolve package AST
pkg, err := ast.NewPackage(fset, files, GcImport, Universe)
if err != nil {
t.Fatal(err)
}
// check that all packages were imported
for _, name := range pkgnames {
if pkg.Imports[name] == nil {
t.Errorf("package %s not imported", name)
}
}
// check that there are no top-level unresolved identifiers
for _, f := range pkg.Files {
for _, x := range f.Unresolved {
t.Errorf("%s: unresolved global identifier %s", fset.Position(x.Pos()), x.Name)
}
}
// resolve qualified identifiers
if err := ResolveQualifiedIdents(fset, pkg); err != nil {
t.Error(err)
}
// check that qualified identifiers are resolved
ast.Inspect(pkg, func(n ast.Node) bool {
if s, ok := n.(*ast.SelectorExpr); ok {
if x, ok := s.X.(*ast.Ident); ok {
if x.Obj == nil {
t.Errorf("%s: unresolved qualified identifier %s", fset.Position(x.Pos()), x.Name)
return false
}
if x.Obj.Kind == ast.Pkg && s.Sel != nil && s.Sel.Obj == nil {
t.Errorf("%s: unresolved selector %s", fset.Position(s.Sel.Pos()), s.Sel.Name)
return false
}
return false
}
return false
}
return true
})
}

View File

@ -1,89 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is used to generate an object file which
// serves as test file for gcimporter_test.go.
package exports
import (
"go/ast"
)
// Issue 3682: Correctly read dotted identifiers from export data.
const init1 = 0
func init() {}
const (
C0 int = 0
C1 = 3.14159265
C2 = 2.718281828i
C3 = -123.456e-789
C4 = +123.456E+789
C5 = 1234i
C6 = "foo\n"
C7 = `bar\n`
)
type (
T1 int
T2 [10]int
T3 []int
T4 *int
T5 chan int
T6a chan<- int
T6b chan (<-chan int)
T6c chan<- (chan int)
T7 <-chan *ast.File
T8 struct{}
T9 struct {
a int
b, c float32
d []string `go:"tag"`
}
T10 struct {
T8
T9
_ *T10
}
T11 map[int]string
T12 interface{}
T13 interface {
m1()
m2(int) float32
}
T14 interface {
T12
T13
m3(x ...struct{}) []T9
}
T15 func()
T16 func(int)
T17 func(x int)
T18 func() float32
T19 func() (x float32)
T20 func(...interface{})
T21 struct{ next *T21 }
T22 struct{ link *T23 }
T23 struct{ link *T22 }
T24 *T24
T25 *T26
T26 *T27
T27 *T25
T28 func(T28) T28
)
var (
V0 int
V1 = -991.0
)
func F1() {}
func F2(x int) {}
func F3() int { return 0 }
func F4() float32 { return 0 }
func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10)
func (p *T1) M1()

View File

@ -1,163 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// type declarations
package test0
import (
"unsafe"
// we can have multiple blank imports (was bug)
_ "math"
_ "net/rpc"
)
const pi = 3.1415
type (
N undeclared /* ERROR "undeclared" */
B bool
I int32
A [10]P
T struct {
x, y P
}
P *T
R (*R)
F func(A) I
Y interface {
f(A) I
}
S [](((P)))
M map[I]F
C chan<- I
)
type (
p1 pi /* ERROR "not a package" */ .foo
p2 unsafe.Pointer
)
type (
Pi pi /* ERROR "not a type" */
a /* ERROR "illegal cycle" */ a
a /* ERROR "redeclared" */ int
// where the cycle error appears depends on the
// order in which declarations are processed
// (which depends on the order in which a map
// is iterated through)
b /* ERROR "illegal cycle" */ c
c d
d e
e b
t *t
U V
V *W
W U
P1 *S2
P2 P1
S0 struct {
}
S1 struct {
a, b, c int
u, v, a /* ERROR "redeclared" */ float32
}
S2 struct {
U // anonymous field
// TODO(gri) recognize double-declaration below
// U /* ERROR "redeclared" */ int
}
S3 struct {
x S2
}
S4/* ERROR "illegal cycle" */ struct {
S4
}
S5 /* ERROR "illegal cycle" */ struct {
S6
}
S6 struct {
field S7
}
S7 struct {
S5
}
L1 []L1
L2 []int
A1 [10]int
A2 /* ERROR "illegal cycle" */ [10]A2
A3 /* ERROR "illegal cycle" */ [10]struct {
x A4
}
A4 [10]A3
F1 func()
F2 func(x, y, z float32)
F3 func(x, y, x /* ERROR "redeclared" */ float32)
F4 func() (x, y, x /* ERROR "redeclared" */ float32)
F5 func(x int) (x /* ERROR "redeclared" */ float32)
F6 func(x ...int)
I1 interface{}
I2 interface {
m1()
}
I3 interface {
m1()
m1 /* ERROR "redeclared" */ ()
}
I4 interface {
m1(x, y, x /* ERROR "redeclared" */ float32)
m2() (x, y, x /* ERROR "redeclared" */ float32)
m3(x int) (x /* ERROR "redeclared" */ float32)
}
I5 interface {
m1(I5)
}
I6 interface {
S0 /* ERROR "non-interface" */
}
I7 interface {
I1
I1
}
I8 /* ERROR "illegal cycle" */ interface {
I8
}
// Use I09 (rather than I9) because it appears lexically before
// I10 so that we get the illegal cycle here rather then in the
// declaration of I10. If the implementation sorts by position
// rather than name, the error message will still be here.
I09 /* ERROR "illegal cycle" */ interface {
I10
}
I10 interface {
I11
}
I11 interface {
I09
}
C1 chan int
C2 <-chan int
C3 chan<- C3
C4 chan C5
C5 chan C6
C6 chan C4
M1 map[Last]string
M2 map[string]M2
Last int
)

View File

@ -1,377 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package types declares the types used to represent Go types
// (UNDER CONSTRUCTION). ANY AND ALL PARTS MAY CHANGE.
//
package types
import (
"bytes"
"fmt"
"go/ast"
"sort"
)
// All types implement the Type interface.
type Type interface {
isType()
}
// All concrete types embed implementsType which
// ensures that all types implement the Type interface.
type implementsType struct{}
func (t *implementsType) isType() {}
// A Bad type is a non-nil placeholder type when we don't know a type.
type Bad struct {
implementsType
Msg string // for better error reporting/debugging
}
// A Basic represents a (unnamed) basic type.
type Basic struct {
implementsType
// TODO(gri) need a field specifying the exact basic type
}
// An Array represents an array type [Len]Elt.
type Array struct {
implementsType
Len uint64
Elt Type
}
// A Slice represents a slice type []Elt.
type Slice struct {
implementsType
Elt Type
}
// A Struct represents a struct type struct{...}.
// Anonymous fields are represented by objects with empty names.
type Struct struct {
implementsType
Fields ObjList // struct fields; or nil
Tags []string // corresponding tags; or nil
// TODO(gri) This type needs some rethinking:
// - at the moment anonymous fields are marked with "" object names,
// and their names have to be reconstructed
// - there is no scope for fast lookup (but the parser creates one)
}
// A Pointer represents a pointer type *Base.
type Pointer struct {
implementsType
Base Type
}
// A Func represents a function type func(...) (...).
// Unnamed parameters are represented by objects with empty names.
type Func struct {
implementsType
Recv *ast.Object // nil if not a method
Params ObjList // (incoming) parameters from left to right; or nil
Results ObjList // (outgoing) results from left to right; or nil
IsVariadic bool // true if the last parameter's type is of the form ...T
}
// An Interface represents an interface type interface{...}.
type Interface struct {
implementsType
Methods ObjList // interface methods sorted by name; or nil
}
// A Map represents a map type map[Key]Elt.
type Map struct {
implementsType
Key, Elt Type
}
// A Chan represents a channel type chan Elt, <-chan Elt, or chan<-Elt.
type Chan struct {
implementsType
Dir ast.ChanDir
Elt Type
}
// A Name represents a named type as declared in a type declaration.
type Name struct {
implementsType
Underlying Type // nil if not fully declared
Obj *ast.Object // corresponding declared object
// TODO(gri) need to remember fields and methods.
}
func writeParams(buf *bytes.Buffer, params ObjList, isVariadic bool) {
buf.WriteByte('(')
for i, par := range params {
if i > 0 {
buf.WriteString(", ")
}
if par.Name != "" {
buf.WriteString(par.Name)
buf.WriteByte(' ')
}
if isVariadic && i == len(params)-1 {
buf.WriteString("...")
}
writeType(buf, par.Type.(Type))
}
buf.WriteByte(')')
}
func writeSignature(buf *bytes.Buffer, t *Func) {
writeParams(buf, t.Params, t.IsVariadic)
if len(t.Results) == 0 {
// no result
return
}
buf.WriteByte(' ')
if len(t.Results) == 1 && t.Results[0].Name == "" {
// single unnamed result
writeType(buf, t.Results[0].Type.(Type))
return
}
// multiple or named result(s)
writeParams(buf, t.Results, false)
}
func writeType(buf *bytes.Buffer, typ Type) {
switch t := typ.(type) {
case *Bad:
fmt.Fprintf(buf, "badType(%s)", t.Msg)
case *Basic:
buf.WriteString("basicType") // TODO(gri) print actual type information
case *Array:
fmt.Fprintf(buf, "[%d]", t.Len)
writeType(buf, t.Elt)
case *Slice:
buf.WriteString("[]")
writeType(buf, t.Elt)
case *Struct:
buf.WriteString("struct{")
for i, fld := range t.Fields {
if i > 0 {
buf.WriteString("; ")
}
if fld.Name != "" {
buf.WriteString(fld.Name)
buf.WriteByte(' ')
}
writeType(buf, fld.Type.(Type))
if i < len(t.Tags) && t.Tags[i] != "" {
fmt.Fprintf(buf, " %q", t.Tags[i])
}
}
buf.WriteByte('}')
case *Pointer:
buf.WriteByte('*')
writeType(buf, t.Base)
case *Func:
buf.WriteString("func")
writeSignature(buf, t)
case *Interface:
buf.WriteString("interface{")
for i, m := range t.Methods {
if i > 0 {
buf.WriteString("; ")
}
buf.WriteString(m.Name)
writeSignature(buf, m.Type.(*Func))
}
buf.WriteByte('}')
case *Map:
buf.WriteString("map[")
writeType(buf, t.Key)
buf.WriteByte(']')
writeType(buf, t.Elt)
case *Chan:
var s string
switch t.Dir {
case ast.SEND:
s = "chan<- "
case ast.RECV:
s = "<-chan "
default:
s = "chan "
}
buf.WriteString(s)
writeType(buf, t.Elt)
case *Name:
buf.WriteString(t.Obj.Name)
}
}
// TypeString returns a string representation for typ.
func TypeString(typ Type) string {
var buf bytes.Buffer
writeType(&buf, typ)
return buf.String()
}
// If typ is a pointer type, Deref returns the pointer's base type;
// otherwise it returns typ.
func Deref(typ Type) Type {
if typ, ok := typ.(*Pointer); ok {
return typ.Base
}
return typ
}
// Underlying returns the underlying type of a type.
func Underlying(typ Type) Type {
if typ, ok := typ.(*Name); ok {
utyp := typ.Underlying
if _, ok := utyp.(*Basic); !ok {
return utyp
}
// the underlying type of a type name referring
// to an (untyped) basic type is the basic type
// name
}
return typ
}
// An ObjList represents an ordered (in some fashion) list of objects.
type ObjList []*ast.Object
// ObjList implements sort.Interface.
func (list ObjList) Len() int { return len(list) }
func (list ObjList) Less(i, j int) bool { return list[i].Name < list[j].Name }
func (list ObjList) Swap(i, j int) { list[i], list[j] = list[j], list[i] }
// Sort sorts an object list by object name.
func (list ObjList) Sort() { sort.Sort(list) }
// identicalTypes returns true if both lists a and b have the
// same length and corresponding objects have identical types.
func identicalTypes(a, b ObjList) bool {
if len(a) == len(b) {
for i, x := range a {
y := b[i]
if !Identical(x.Type.(Type), y.Type.(Type)) {
return false
}
}
return true
}
return false
}
// Identical returns true if two types are identical.
func Identical(x, y Type) bool {
if x == y {
return true
}
switch x := x.(type) {
case *Bad:
// A Bad type is always identical to any other type
// (to avoid spurious follow-up errors).
return true
case *Basic:
if y, ok := y.(*Basic); ok {
panic("unimplemented")
_ = y
}
case *Array:
// Two array types are identical if they have identical element types
// and the same array length.
if y, ok := y.(*Array); ok {
return x.Len == y.Len && Identical(x.Elt, y.Elt)
}
case *Slice:
// Two slice types are identical if they have identical element types.
if y, ok := y.(*Slice); ok {
return Identical(x.Elt, y.Elt)
}
case *Struct:
// Two struct types are identical if they have the same sequence of fields,
// and if corresponding fields have the same names, and identical types,
// and identical tags. Two anonymous fields are considered to have the same
// name. Lower-case field names from different packages are always different.
if y, ok := y.(*Struct); ok {
// TODO(gri) handle structs from different packages
if identicalTypes(x.Fields, y.Fields) {
for i, f := range x.Fields {
g := y.Fields[i]
if f.Name != g.Name || x.Tags[i] != y.Tags[i] {
return false
}
}
return true
}
}
case *Pointer:
// Two pointer types are identical if they have identical base types.
if y, ok := y.(*Pointer); ok {
return Identical(x.Base, y.Base)
}
case *Func:
// Two function types are identical if they have the same number of parameters
// and result values, corresponding parameter and result types are identical,
// and either both functions are variadic or neither is. Parameter and result
// names are not required to match.
if y, ok := y.(*Func); ok {
return identicalTypes(x.Params, y.Params) &&
identicalTypes(x.Results, y.Results) &&
x.IsVariadic == y.IsVariadic
}
case *Interface:
// Two interface types are identical if they have the same set of methods with
// the same names and identical function types. Lower-case method names from
// different packages are always different. The order of the methods is irrelevant.
if y, ok := y.(*Interface); ok {
return identicalTypes(x.Methods, y.Methods) // methods are sorted
}
case *Map:
// Two map types are identical if they have identical key and value types.
if y, ok := y.(*Map); ok {
return Identical(x.Key, y.Key) && Identical(x.Elt, y.Elt)
}
case *Chan:
// Two channel types are identical if they have identical value types
// and the same direction.
if y, ok := y.(*Chan); ok {
return x.Dir == y.Dir && Identical(x.Elt, y.Elt)
}
case *Name:
// Two named types are identical if their type names originate
// in the same type declaration.
if y, ok := y.(*Name); ok {
return x.Obj == y.Obj ||
// permit bad objects to be equal to avoid
// follow up errors
x.Obj != nil && x.Obj.Kind == ast.Bad ||
y.Obj != nil && y.Obj.Kind == ast.Bad
}
}
return false
}

View File

@ -1,128 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains tests verifying the types associated with an AST after
// type checking.
package types
import (
"go/ast"
"go/parser"
"testing"
)
func makePkg(t *testing.T, src string) (*ast.Package, error) {
const filename = "<src>"
file, err := parser.ParseFile(fset, filename, src, parser.DeclarationErrors)
if err != nil {
return nil, err
}
files := map[string]*ast.File{filename: file}
pkg, err := ast.NewPackage(fset, files, GcImport, Universe)
if err != nil {
return nil, err
}
if _, err := Check(fset, pkg); err != nil {
return nil, err
}
return pkg, nil
}
type testEntry struct {
src, str string
}
// dup returns a testEntry where both src and str are the same.
func dup(s string) testEntry {
return testEntry{s, s}
}
var testTypes = []testEntry{
// basic types
dup("int"),
dup("float32"),
dup("string"),
// arrays
{"[10]int", "[0]int"}, // TODO(gri) fix array length, add more array tests
// slices
dup("[]int"),
dup("[][]int"),
// structs
dup("struct{}"),
dup("struct{x int}"),
{`struct {
x, y int
z float32 "foo"
}`, `struct{x int; y int; z float32 "foo"}`},
{`struct {
string
elems []T
}`, `struct{string; elems []T}`},
// pointers
dup("*int"),
dup("***struct{}"),
dup("*struct{a int; b float32}"),
// functions
dup("func()"),
dup("func(x int)"),
{"func(x, y int)", "func(x int, y int)"},
{"func(x, y int, z string)", "func(x int, y int, z string)"},
dup("func(int)"),
dup("func(int, string, byte)"),
dup("func() int"),
{"func() (string)", "func() string"},
dup("func() (u int)"),
{"func() (u, v int, w string)", "func() (u int, v int, w string)"},
dup("func(int) string"),
dup("func(x int) string"),
dup("func(x int) (u string)"),
{"func(x, y int) (u string)", "func(x int, y int) (u string)"},
dup("func(...int) string"),
dup("func(x ...int) string"),
dup("func(x ...int) (u string)"),
{"func(x, y ...int) (u string)", "func(x int, y ...int) (u string)"},
// interfaces
dup("interface{}"),
dup("interface{m()}"),
{`interface{
m(int) float32
String() string
}`, `interface{String() string; m(int) float32}`}, // methods are sorted
// TODO(gri) add test for interface w/ anonymous field
// maps
dup("map[string]int"),
{"map[struct{x, y int}][]byte", "map[struct{x int; y int}][]byte"},
// channels
dup("chan int"),
dup("chan<- func()"),
dup("<-chan []func() int"),
}
func TestTypes(t *testing.T) {
for _, test := range testTypes {
src := "package p; type T " + test.src
pkg, err := makePkg(t, src)
if err != nil {
t.Errorf("%s: %s", src, err)
continue
}
typ := Underlying(pkg.Scope.Lookup("T").Type.(Type))
str := TypeString(typ)
if str != test.str {
t.Errorf("%s: got %s, want %s", test.src, str, test.str)
}
}
}

View File

@ -1,107 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// FILE UNDER CONSTRUCTION. ANY AND ALL PARTS MAY CHANGE.
// This file implements the universe and unsafe package scopes.
package types
import "go/ast"
var (
scope *ast.Scope // current scope to use for initialization
Universe *ast.Scope
Unsafe *ast.Object // package unsafe
)
func define(kind ast.ObjKind, name string) *ast.Object {
obj := ast.NewObj(kind, name)
if scope.Insert(obj) != nil {
panic("types internal error: double declaration")
}
obj.Decl = scope
return obj
}
func defType(name string) *Name {
obj := define(ast.Typ, name)
typ := &Name{Underlying: &Basic{}, Obj: obj}
obj.Type = typ
return typ
}
func defConst(name string) {
obj := define(ast.Con, name)
_ = obj // TODO(gri) fill in other properties
}
func defFun(name string) {
obj := define(ast.Fun, name)
_ = obj // TODO(gri) fill in other properties
}
var (
Bool,
Int,
Float64,
Complex128,
String *Name
)
func init() {
scope = ast.NewScope(nil)
Universe = scope
Bool = defType("bool")
defType("byte") // TODO(gri) should be an alias for uint8
defType("rune") // TODO(gri) should be an alias for int
defType("complex64")
Complex128 = defType("complex128")
defType("error")
defType("float32")
Float64 = defType("float64")
defType("int8")
defType("int16")
defType("int32")
defType("int64")
String = defType("string")
defType("uint8")
defType("uint16")
defType("uint32")
defType("uint64")
Int = defType("int")
defType("uint")
defType("uintptr")
defConst("true")
defConst("false")
defConst("iota")
defConst("nil")
defFun("append")
defFun("cap")
defFun("close")
defFun("complex")
defFun("copy")
defFun("delete")
defFun("imag")
defFun("len")
defFun("make")
defFun("new")
defFun("panic")
defFun("print")
defFun("println")
defFun("real")
defFun("recover")
scope = ast.NewScope(nil)
Unsafe = ast.NewObj(ast.Pkg, "unsafe")
Unsafe.Data = scope
defType("Pointer")
defFun("Alignof")
defFun("Offsetof")
defFun("Sizeof")
}