1
0
mirror of https://github.com/golang/go synced 2024-11-11 20:20:23 -07:00

cmd/compile: use bufio.Reader directly in lexer

Removes an intermediate layer of functions that was clogging up a
corner of the compiler's profile graph.

I can't measure a performance improvement running a large build
like jujud, but the profile reports less total time spent in
gc.(*lexer).getr.

Change-Id: I3000585cfcb0f9729d3a3859e9023690a6528591
Reviewed-on: https://go-review.googlesource.com/20565
Reviewed-by: Robert Griesemer <gri@golang.org>
Run-TryBot: David Crawshaw <crawshaw@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
This commit is contained in:
David Crawshaw 2016-03-11 13:39:20 -05:00
parent cc158403d6
commit 5aa5db7593
6 changed files with 79 additions and 109 deletions

View File

@ -8,8 +8,8 @@
package gc package gc
import ( import (
"bufio"
"cmd/compile/internal/big" "cmd/compile/internal/big"
"cmd/internal/obj"
"encoding/binary" "encoding/binary"
"fmt" "fmt"
) )
@ -20,7 +20,7 @@ import (
// changes to bimport.go and bexport.go. // changes to bimport.go and bexport.go.
// Import populates importpkg from the serialized package data. // Import populates importpkg from the serialized package data.
func Import(in *obj.Biobuf) { func Import(in *bufio.Reader) {
p := importer{in: in} p := importer{in: in}
p.buf = p.bufarray[:] p.buf = p.bufarray[:]
@ -137,7 +137,7 @@ func idealType(typ *Type) *Type {
} }
type importer struct { type importer struct {
in *obj.Biobuf in *bufio.Reader
buf []byte // for reading strings buf []byte // for reading strings
bufarray [64]byte // initial underlying array for buf, large enough to avoid allocation when compiling std lib bufarray [64]byte // initial underlying array for buf, large enough to avoid allocation when compiling std lib
pkgList []*Pkg pkgList []*Pkg
@ -855,16 +855,16 @@ func (p *importer) ReadByte() (byte, error) {
// byte is the bottleneck interface for reading from p.in. // byte is the bottleneck interface for reading from p.in.
// It unescapes '|' 'S' to '$' and '|' '|' to '|'. // It unescapes '|' 'S' to '$' and '|' '|' to '|'.
func (p *importer) byte() byte { func (p *importer) byte() byte {
c := obj.Bgetc(p.in) c, err := p.in.ReadByte()
p.read++ p.read++
if c < 0 { if err != nil {
Fatalf("importer: read error") Fatalf("importer: read error: %v", err)
} }
if c == '|' { if c == '|' {
c = obj.Bgetc(p.in) c, err = p.in.ReadByte()
p.read++ p.read++
if c < 0 { if err != nil {
Fatalf("importer: read error") Fatalf("importer: read error: %v", err)
} }
switch c { switch c {
case 'S': case 'S':
@ -875,5 +875,5 @@ func (p *importer) byte() byte {
Fatalf("importer: unexpected escape sequence in export data") Fatalf("importer: unexpected escape sequence in export data")
} }
} }
return byte(c) return c
} }

View File

@ -5,6 +5,7 @@
package gc package gc
import ( import (
"bufio"
"bytes" "bytes"
"cmd/internal/obj" "cmd/internal/obj"
"fmt" "fmt"
@ -387,7 +388,7 @@ func dumpexport() {
pkgMap = make(map[string]*Pkg) pkgMap = make(map[string]*Pkg)
pkgs = nil pkgs = nil
importpkg = mkpkg("") importpkg = mkpkg("")
Import(obj.Binitr(&copy)) // must not die Import(bufio.NewReader(&copy)) // must not die
importpkg = nil importpkg = nil
pkgs = savedPkgs pkgs = savedPkgs
pkgMap = savedPkgMap pkgMap = savedPkgMap

View File

@ -7,6 +7,7 @@
package gc package gc
import ( import (
"bufio"
"cmd/compile/internal/ssa" "cmd/compile/internal/ssa"
"cmd/internal/obj" "cmd/internal/obj"
"flag" "flag"
@ -335,15 +336,16 @@ func Main() {
linehistpush(infile) linehistpush(infile)
bin, err := obj.Bopenr(infile) f, err := os.Open(infile)
if err != nil { if err != nil {
fmt.Printf("open %s: %v\n", infile, err) fmt.Printf("open %s: %v\n", infile, err)
errorexit() errorexit()
} }
bin := bufio.NewReader(f)
// Skip initial BOM if present. // Skip initial BOM if present.
if obj.Bgetrune(bin) != BOM { if r, _, _ := bin.ReadRune(); r != BOM {
obj.Bungetrune(bin) bin.UnreadRune()
} }
block = 1 block = 1
@ -362,7 +364,7 @@ func Main() {
lexlineno++ lexlineno++
linehistpop() linehistpop()
obj.Bterm(bin) f.Close()
} }
testdclstack() testdclstack()
@ -541,7 +543,7 @@ func saveerrors() {
nerrors = 0 nerrors = 0
} }
func arsize(b *obj.Biobuf, name string) int { func arsize(b *bufio.Reader, name string) int {
var buf [ArhdrSize]byte var buf [ArhdrSize]byte
if _, err := io.ReadFull(b, buf[:]); err != nil { if _, err := io.ReadFull(b, buf[:]); err != nil {
return -1 return -1
@ -555,14 +557,11 @@ func arsize(b *obj.Biobuf, name string) int {
return i return i
} }
func skiptopkgdef(b *obj.Biobuf) bool { func skiptopkgdef(b *bufio.Reader) bool {
// archive header // archive header
p := obj.Brdline(b, '\n') p, err := b.ReadString('\n')
if p == "" { if err != nil {
return false log.Fatalf("reading input: %v", err)
}
if obj.Blinelen(b) != 8 {
return false
} }
if p != "!<arch>\n" { if p != "!<arch>\n" {
return false return false
@ -672,10 +671,10 @@ func loadsys() {
incannedimport = 1 incannedimport = 1
importpkg = Runtimepkg importpkg = Runtimepkg
parse_import(obj.Binitr(strings.NewReader(runtimeimport)), nil) parse_import(bufio.NewReader(strings.NewReader(runtimeimport)), nil)
importpkg = unsafepkg importpkg = unsafepkg
parse_import(obj.Binitr(strings.NewReader(unsafeimport)), nil) parse_import(bufio.NewReader(strings.NewReader(unsafeimport)), nil)
importpkg = nil importpkg = nil
incannedimport = 0 incannedimport = 0
@ -761,12 +760,13 @@ func importfile(f *Val, indent []byte) {
importpkg.Imported = true importpkg.Imported = true
imp, err := obj.Bopenr(file) impf, err := os.Open(file)
if err != nil { if err != nil {
Yyerror("can't open import: %q: %v", path_, err) Yyerror("can't open import: %q: %v", path_, err)
errorexit() errorexit()
} }
defer obj.Bterm(imp) defer impf.Close()
imp := bufio.NewReader(impf)
if strings.HasSuffix(file, ".a") { if strings.HasSuffix(file, ".a") {
if !skiptopkgdef(imp) { if !skiptopkgdef(imp) {
@ -776,7 +776,13 @@ func importfile(f *Val, indent []byte) {
} }
// check object header // check object header
p := obj.Brdstr(imp, '\n', 1) p, err := imp.ReadString('\n')
if err != nil {
log.Fatalf("reading input: %v", err)
}
if len(p) > 0 {
p = p[:len(p)-1]
}
if p != "empty archive" { if p != "empty archive" {
if !strings.HasPrefix(p, "go object ") { if !strings.HasPrefix(p, "go object ") {
@ -800,23 +806,23 @@ func importfile(f *Val, indent []byte) {
// $$B\n (new format): import directly, then feed the lexer a dummy statement // $$B\n (new format): import directly, then feed the lexer a dummy statement
// look for $$ // look for $$
var c int var c byte
for { for {
c = obj.Bgetc(imp) c, err = imp.ReadByte()
if c < 0 { if err != nil {
break break
} }
if c == '$' { if c == '$' {
c = obj.Bgetc(imp) c, err = imp.ReadByte()
if c == '$' || c < 0 { if c == '$' || err != nil {
break break
} }
} }
} }
// get character after $$ // get character after $$
if c >= 0 { if err == nil {
c = obj.Bgetc(imp) c, _ = imp.ReadByte()
} }
switch c { switch c {
@ -826,7 +832,7 @@ func importfile(f *Val, indent []byte) {
case 'B': case 'B':
// new export format // new export format
obj.Bgetc(imp) // skip \n after $$B imp.ReadByte() // skip \n after $$B
Import(imp) Import(imp)
default: default:
@ -879,9 +885,7 @@ const (
type lexer struct { type lexer struct {
// source // source
bin *obj.Biobuf bin *bufio.Reader
peekr1 rune
peekr2 rune // second peekc for ...
nlsemi bool // if set, '\n' and EOF translate to ';' nlsemi bool // if set, '\n' and EOF translate to ';'
@ -1025,8 +1029,9 @@ l0:
} }
if c1 == '.' { if c1 == '.' {
c1 = l.getr() p, err := l.bin.Peek(1)
if c1 == '.' { if err == nil && p[0] == '.' {
l.getr()
c = LDDD c = LDDD
goto lx goto lx
} }
@ -1886,49 +1891,26 @@ func pragcgo(text string) {
} }
func (l *lexer) getr() rune { func (l *lexer) getr() rune {
// unread rune != 0 available
if r := l.peekr1; r != 0 {
l.peekr1 = l.peekr2
l.peekr2 = 0
if r == '\n' && importpkg == nil {
lexlineno++
}
return r
}
redo: redo:
// common case: 7bit ASCII r, w, err := l.bin.ReadRune()
c := obj.Bgetc(l.bin) if err != nil {
if c < utf8.RuneSelf { if err != io.EOF {
if c == 0 { Fatalf("io error: %v", err)
yyerrorl(lexlineno, "illegal NUL byte")
return 0
} }
if c == '\n' && importpkg == nil { return -1
}
switch r {
case 0:
yyerrorl(lexlineno, "illegal NUL byte")
case '\n':
if importpkg == nil {
lexlineno++ lexlineno++
} }
return rune(c) case utf8.RuneError:
} if w == 1 {
// c >= utf8.RuneSelf yyerrorl(lexlineno, "illegal UTF-8 sequence")
}
// uncommon case: non-ASCII case BOM:
var buf [utf8.UTFMax]byte
buf[0] = byte(c)
buf[1] = byte(obj.Bgetc(l.bin))
i := 2
for ; i < len(buf) && !utf8.FullRune(buf[:i]); i++ {
buf[i] = byte(obj.Bgetc(l.bin))
}
r, w := utf8.DecodeRune(buf[:i])
if r == utf8.RuneError && w == 1 {
// The string conversion here makes a copy for passing
// to fmt.Printf, so that buf itself does not escape and
// can be allocated on the stack.
yyerrorl(lexlineno, "illegal UTF-8 sequence % x", string(buf[:i]))
}
if r == BOM {
yyerrorl(lexlineno, "Unicode (UTF-8) BOM in middle of file") yyerrorl(lexlineno, "Unicode (UTF-8) BOM in middle of file")
goto redo goto redo
} }
@ -1937,8 +1919,7 @@ redo:
} }
func (l *lexer) ungetr(r rune) { func (l *lexer) ungetr(r rune) {
l.peekr2 = l.peekr1 l.bin.UnreadRune()
l.peekr1 = r
if r == '\n' && importpkg == nil { if r == '\n' && importpkg == nil {
lexlineno-- lexlineno--
} }

View File

@ -13,7 +13,7 @@ package gc
// to handle optional commas and semicolons before a closing ) or } . // to handle optional commas and semicolons before a closing ) or } .
import ( import (
"cmd/internal/obj" "bufio"
"fmt" "fmt"
"strconv" "strconv"
"strings" "strings"
@ -22,12 +22,12 @@ import (
const trace = false // if set, parse tracing can be enabled with -x const trace = false // if set, parse tracing can be enabled with -x
// parse_import parses the export data of a package that is imported. // parse_import parses the export data of a package that is imported.
func parse_import(bin *obj.Biobuf, indent []byte) { func parse_import(bin *bufio.Reader, indent []byte) {
newparser(bin, indent).import_package() newparser(bin, indent).import_package()
} }
// parse_file parses a single Go source file. // parse_file parses a single Go source file.
func parse_file(bin *obj.Biobuf) { func parse_file(bin *bufio.Reader) {
newparser(bin, nil).file() newparser(bin, nil).file()
} }
@ -40,7 +40,7 @@ type parser struct {
// newparser returns a new parser ready to parse from src. // newparser returns a new parser ready to parse from src.
// indent is the initial indentation for tracing output. // indent is the initial indentation for tracing output.
func newparser(src *obj.Biobuf, indent []byte) *parser { func newparser(src *bufio.Reader, indent []byte) *parser {
var p parser var p parser
p.bin = src p.bin = src
p.indent = indent p.indent = indent

View File

@ -129,18 +129,6 @@ func Bgetc(b *Biobuf) int {
return int(c) return int(c)
} }
func Bgetrune(b *Biobuf) int {
r, _, err := b.r.ReadRune()
if err != nil {
return -1
}
return int(r)
}
func Bungetrune(b *Biobuf) {
b.r.UnreadRune()
}
func (b *Biobuf) Read(p []byte) (int, error) { func (b *Biobuf) Read(p []byte) (int, error) {
return b.r.Read(p) return b.r.Read(p)
} }
@ -158,17 +146,6 @@ func Brdline(b *Biobuf, delim int) string {
return string(s) return string(s)
} }
func Brdstr(b *Biobuf, delim int, cut int) string {
s, err := b.r.ReadString(byte(delim))
if err != nil {
log.Fatalf("reading input: %v", err)
}
if len(s) > 0 && cut > 0 {
s = s[:len(s)-1]
}
return s
}
func Blinelen(b *Biobuf) int { func Blinelen(b *Biobuf) int {
return b.linelen return b.linelen
} }

11
test/syntax/ddd.go Normal file
View File

@ -0,0 +1,11 @@
// errorcheck
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
func f() {
g(f..3) // ERROR "unexpected literal \.3, expecting name or \("
}