mirror of
https://github.com/golang/go
synced 2024-11-18 11:14:39 -07:00
cmd/internal/obj/wasm,cmd/link/internal/wasm: add fast path for writeUleb128
While building a simple hello world binary, there are total 858277 calls to writeUleb during the assembler phase out of which 836625 (97%) are less than 7 bits. Using a simple micro-benchmark like this: func BenchmarkUleb(b *testing.B) { var buf bytes.Buffer for i := 0; i < b.N; i++ { writeUleb128(&buf, 42) buf.Reset() } } We get the following results with the fast path enabled. name old time/op new time/op delta Uleb-4 8.45ns ± 2% 7.51ns ± 2% -11.16% (p=0.000 n=10+10) Applying the time taken to the number of calls, we get roughly 6% improvement in total time taken for writeUleb128. We also apply the change to the function in linker to make it consistent. Change-Id: I9fe8c41df1209f5f3aa7d8bd0181f1b0e536ceb5 Reviewed-on: https://go-review.googlesource.com/c/go/+/201177 Run-TryBot: Agniva De Sarker <agniva.quicksilver@gmail.com> Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org>
This commit is contained in:
parent
86f40a2e03
commit
03bb3e9ad1
@ -1154,6 +1154,10 @@ func align(as obj.As) uint64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func writeUleb128(w io.ByteWriter, v uint64) {
|
func writeUleb128(w io.ByteWriter, v uint64) {
|
||||||
|
if v < 128 {
|
||||||
|
w.WriteByte(uint8(v))
|
||||||
|
return
|
||||||
|
}
|
||||||
more := true
|
more := true
|
||||||
for more {
|
for more {
|
||||||
c := uint8(v & 0x7f)
|
c := uint8(v & 0x7f)
|
||||||
|
@ -542,6 +542,10 @@ func writeName(w nameWriter, name string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func writeUleb128(w io.ByteWriter, v uint64) {
|
func writeUleb128(w io.ByteWriter, v uint64) {
|
||||||
|
if v < 128 {
|
||||||
|
w.WriteByte(uint8(v))
|
||||||
|
return
|
||||||
|
}
|
||||||
more := true
|
more := true
|
||||||
for more {
|
for more {
|
||||||
c := uint8(v & 0x7f)
|
c := uint8(v & 0x7f)
|
||||||
|
Loading…
Reference in New Issue
Block a user