1
0
mirror of https://github.com/golang/go synced 2024-11-24 05:50:13 -07:00
go/test/codegen/slices.go

434 lines
10 KiB
Go
Raw Normal View History

// asmcheck
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package codegen
import "unsafe"
// This file contains code generation tests related to the handling of
// slice types.
// ------------------ //
// Clear //
// ------------------ //
// Issue #5373 optimize memset idiom
func SliceClear(s []int) []int {
// amd64:`.*memclrNoHeapPointers`
// ppc64le:`.*memclrNoHeapPointers`
// ppc64:`.*memclrNoHeapPointers`
for i := range s {
s[i] = 0
}
return s
}
func SliceClearPointers(s []*int) []*int {
// amd64:`.*memclrHasPointers`
// ppc64le:`.*memclrHasPointers`
// ppc64:`.*memclrHasPointers`
for i := range s {
s[i] = nil
}
return s
}
// ------------------ //
// Extension //
// ------------------ //
// Issue #21266 - avoid makeslice in append(x, make([]T, y)...)
func SliceExtensionConst(s []int) []int {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// ppc64le:`.*runtime\.memclrNoHeapPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64le:-`.*runtime\.panicmakeslicelen`
// ppc64:`.*runtime\.memclrNoHeapPointers`
// ppc64:-`.*runtime\.makeslice`
// ppc64:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, 1<<2)...)
}
func SliceExtensionConstInt64(s []int) []int {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// ppc64le:`.*runtime\.memclrNoHeapPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64le:-`.*runtime\.panicmakeslicelen`
// ppc64:`.*runtime\.memclrNoHeapPointers`
// ppc64:-`.*runtime\.makeslice`
// ppc64:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, int64(1<<2))...)
}
func SliceExtensionConstUint64(s []int) []int {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// ppc64le:`.*runtime\.memclrNoHeapPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64le:-`.*runtime\.panicmakeslicelen`
// ppc64:`.*runtime\.memclrNoHeapPointers`
// ppc64:-`.*runtime\.makeslice`
// ppc64:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, uint64(1<<2))...)
}
func SliceExtensionConstUint(s []int) []int {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// ppc64le:`.*runtime\.memclrNoHeapPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64le:-`.*runtime\.panicmakeslicelen`
// ppc64:`.*runtime\.memclrNoHeapPointers`
// ppc64:-`.*runtime\.makeslice`
// ppc64:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, uint(1<<2))...)
}
func SliceExtensionPointer(s []*int, l int) []*int {
// amd64:`.*runtime\.memclrHasPointers`
// amd64:-`.*runtime\.makeslice`
// ppc64le:`.*runtime\.memclrHasPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64:`.*runtime\.memclrHasPointers`
// ppc64:-`.*runtime\.makeslice`
return append(s, make([]*int, l)...)
}
func SliceExtensionVar(s []byte, l int) []byte {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// ppc64le:`.*runtime\.memclrNoHeapPointers`
// ppc64le:-`.*runtime\.makeslice`
// ppc64:`.*runtime\.memclrNoHeapPointers`
// ppc64:-`.*runtime\.makeslice`
return append(s, make([]byte, l)...)
}
func SliceExtensionVarInt64(s []byte, l int64) []byte {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:`.*runtime\.panicmakeslicelen`
return append(s, make([]byte, l)...)
}
func SliceExtensionVarUint64(s []byte, l uint64) []byte {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:`.*runtime\.panicmakeslicelen`
return append(s, make([]byte, l)...)
}
func SliceExtensionVarUint(s []byte, l uint) []byte {
// amd64:`.*runtime\.memclrNoHeapPointers`
// amd64:-`.*runtime\.makeslice`
// amd64:`.*runtime\.panicmakeslicelen`
return append(s, make([]byte, l)...)
}
func SliceExtensionInt64(s []int, l64 int64) []int {
// 386:`.*runtime\.makeslice`
// 386:-`.*runtime\.memclr`
return append(s, make([]int, l64)...)
}
cmd/compile: optimize make+copy pattern to avoid memclr match: m = make([]T, x); copy(m, s) for pointer free T and x==len(s) rewrite to: m = mallocgc(x*elemsize(T), nil, false); memmove(&m, &s, x*elemsize(T)) otherwise rewrite to: m = makeslicecopy([]T, x, s) This avoids memclear and shading of pointers in the newly created slice before the copy. With this CL "s" is only be allowed to bev a variable and not a more complex expression. This restriction could be lifted in future versions of this optimization when it can be proven that "s" is not referencing "m". Triggers 450 times during make.bash.. Reduces go binary size by ~8 kbyte. name old time/op new time/op delta MakeSliceCopy/mallocmove/Byte 71.1ns ± 1% 65.8ns ± 0% -7.49% (p=0.000 n=10+9) MakeSliceCopy/mallocmove/Int 71.2ns ± 1% 66.0ns ± 0% -7.27% (p=0.000 n=10+8) MakeSliceCopy/mallocmove/Ptr 104ns ± 4% 99ns ± 1% -5.13% (p=0.000 n=10+10) MakeSliceCopy/makecopy/Byte 70.3ns ± 0% 68.0ns ± 0% -3.22% (p=0.000 n=10+9) MakeSliceCopy/makecopy/Int 70.3ns ± 0% 68.5ns ± 1% -2.59% (p=0.000 n=9+10) MakeSliceCopy/makecopy/Ptr 102ns ± 0% 99ns ± 1% -2.97% (p=0.000 n=9+9) MakeSliceCopy/nilappend/Byte 75.4ns ± 0% 74.9ns ± 2% -0.63% (p=0.015 n=9+9) MakeSliceCopy/nilappend/Int 75.6ns ± 0% 76.4ns ± 3% ~ (p=0.245 n=9+10) MakeSliceCopy/nilappend/Ptr 107ns ± 0% 108ns ± 1% +0.93% (p=0.005 n=9+10) Fixes #26252 Change-Id: Iec553dd1fef6ded16197216a472351c8799a8e71 Reviewed-on: https://go-review.googlesource.com/c/go/+/146719 Reviewed-by: Keith Randall <khr@golang.org> Run-TryBot: Martin Möhrmann <moehrmann@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org>
2018-10-23 05:50:07 -06:00
// ------------------ //
// Make+Copy //
// ------------------ //
// Issue #26252 - avoid memclr for make+copy
func SliceMakeCopyLen(s []int) []int {
// amd64:`.*runtime\.mallocgc`
// amd64:`.*runtime\.memmove`
// amd64:-`.*runtime\.makeslice`
// ppc64le:`.*runtime\.mallocgc`
// ppc64le:`.*runtime\.memmove`
// ppc64le:-`.*runtime\.makeslice`
// ppc64:`.*runtime\.mallocgc`
// ppc64:`.*runtime\.memmove`
// ppc64:-`.*runtime\.makeslice`
cmd/compile: optimize make+copy pattern to avoid memclr match: m = make([]T, x); copy(m, s) for pointer free T and x==len(s) rewrite to: m = mallocgc(x*elemsize(T), nil, false); memmove(&m, &s, x*elemsize(T)) otherwise rewrite to: m = makeslicecopy([]T, x, s) This avoids memclear and shading of pointers in the newly created slice before the copy. With this CL "s" is only be allowed to bev a variable and not a more complex expression. This restriction could be lifted in future versions of this optimization when it can be proven that "s" is not referencing "m". Triggers 450 times during make.bash.. Reduces go binary size by ~8 kbyte. name old time/op new time/op delta MakeSliceCopy/mallocmove/Byte 71.1ns ± 1% 65.8ns ± 0% -7.49% (p=0.000 n=10+9) MakeSliceCopy/mallocmove/Int 71.2ns ± 1% 66.0ns ± 0% -7.27% (p=0.000 n=10+8) MakeSliceCopy/mallocmove/Ptr 104ns ± 4% 99ns ± 1% -5.13% (p=0.000 n=10+10) MakeSliceCopy/makecopy/Byte 70.3ns ± 0% 68.0ns ± 0% -3.22% (p=0.000 n=10+9) MakeSliceCopy/makecopy/Int 70.3ns ± 0% 68.5ns ± 1% -2.59% (p=0.000 n=9+10) MakeSliceCopy/makecopy/Ptr 102ns ± 0% 99ns ± 1% -2.97% (p=0.000 n=9+9) MakeSliceCopy/nilappend/Byte 75.4ns ± 0% 74.9ns ± 2% -0.63% (p=0.015 n=9+9) MakeSliceCopy/nilappend/Int 75.6ns ± 0% 76.4ns ± 3% ~ (p=0.245 n=9+10) MakeSliceCopy/nilappend/Ptr 107ns ± 0% 108ns ± 1% +0.93% (p=0.005 n=9+10) Fixes #26252 Change-Id: Iec553dd1fef6ded16197216a472351c8799a8e71 Reviewed-on: https://go-review.googlesource.com/c/go/+/146719 Reviewed-by: Keith Randall <khr@golang.org> Run-TryBot: Martin Möhrmann <moehrmann@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org>
2018-10-23 05:50:07 -06:00
a := make([]int, len(s))
copy(a, s)
return a
}
func SliceMakeCopyLenPtr(s []*int) []*int {
// amd64:`.*runtime\.makeslicecopy`
// amd64:-`.*runtime\.makeslice\(`
// amd64:-`.*runtime\.typedslicecopy
// ppc64le:`.*runtime\.makeslicecopy`
// ppc64le:-`.*runtime\.makeslice\(`
// ppc64le:-`.*runtime\.typedslicecopy
// ppc64:`.*runtime\.makeslicecopy`
// ppc64:-`.*runtime\.makeslice\(`
// ppc64:-`.*runtime\.typedslicecopy
cmd/compile: optimize make+copy pattern to avoid memclr match: m = make([]T, x); copy(m, s) for pointer free T and x==len(s) rewrite to: m = mallocgc(x*elemsize(T), nil, false); memmove(&m, &s, x*elemsize(T)) otherwise rewrite to: m = makeslicecopy([]T, x, s) This avoids memclear and shading of pointers in the newly created slice before the copy. With this CL "s" is only be allowed to bev a variable and not a more complex expression. This restriction could be lifted in future versions of this optimization when it can be proven that "s" is not referencing "m". Triggers 450 times during make.bash.. Reduces go binary size by ~8 kbyte. name old time/op new time/op delta MakeSliceCopy/mallocmove/Byte 71.1ns ± 1% 65.8ns ± 0% -7.49% (p=0.000 n=10+9) MakeSliceCopy/mallocmove/Int 71.2ns ± 1% 66.0ns ± 0% -7.27% (p=0.000 n=10+8) MakeSliceCopy/mallocmove/Ptr 104ns ± 4% 99ns ± 1% -5.13% (p=0.000 n=10+10) MakeSliceCopy/makecopy/Byte 70.3ns ± 0% 68.0ns ± 0% -3.22% (p=0.000 n=10+9) MakeSliceCopy/makecopy/Int 70.3ns ± 0% 68.5ns ± 1% -2.59% (p=0.000 n=9+10) MakeSliceCopy/makecopy/Ptr 102ns ± 0% 99ns ± 1% -2.97% (p=0.000 n=9+9) MakeSliceCopy/nilappend/Byte 75.4ns ± 0% 74.9ns ± 2% -0.63% (p=0.015 n=9+9) MakeSliceCopy/nilappend/Int 75.6ns ± 0% 76.4ns ± 3% ~ (p=0.245 n=9+10) MakeSliceCopy/nilappend/Ptr 107ns ± 0% 108ns ± 1% +0.93% (p=0.005 n=9+10) Fixes #26252 Change-Id: Iec553dd1fef6ded16197216a472351c8799a8e71 Reviewed-on: https://go-review.googlesource.com/c/go/+/146719 Reviewed-by: Keith Randall <khr@golang.org> Run-TryBot: Martin Möhrmann <moehrmann@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org>
2018-10-23 05:50:07 -06:00
a := make([]*int, len(s))
copy(a, s)
return a
}
func SliceMakeCopyConst(s []int) []int {
// amd64:`.*runtime\.makeslicecopy`
// amd64:-`.*runtime\.makeslice\(`
// amd64:-`.*runtime\.memmove`
a := make([]int, 4)
copy(a, s)
return a
}
func SliceMakeCopyConstPtr(s []*int) []*int {
// amd64:`.*runtime\.makeslicecopy`
// amd64:-`.*runtime\.makeslice\(`
// amd64:-`.*runtime\.typedslicecopy
a := make([]*int, 4)
copy(a, s)
return a
}
func SliceMakeCopyNoOptNoDeref(s []*int) []*int {
a := new([]*int)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
*a = make([]*int, 4)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(*a, s)
return *a
}
func SliceMakeCopyNoOptNoVar(s []*int) []*int {
a := make([][]*int, 1)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a[0] = make([]*int, 4)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(a[0], s)
return a[0]
}
func SliceMakeCopyNoOptBlank(s []*int) []*int {
var a []*int
// amd64:-`.*runtime\.makeslicecopy`
_ = make([]*int, 4)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(a, s)
return a
}
func SliceMakeCopyNoOptNoMake(s []*int) []*int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:-`.*runtime\.objectnew`
a := *new([]*int)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(a, s)
return a
}
func SliceMakeCopyNoOptNoHeapAlloc(s []*int) int {
// amd64:-`.*runtime\.makeslicecopy`
a := make([]*int, 4)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(a, s)
return cap(a)
}
func SliceMakeCopyNoOptNoCap(s []*int) []*int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 0, 4)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.typedslicecopy`
copy(a, s)
return a
}
func SliceMakeCopyNoOptNoCopy(s []*int) []*int {
copy := func(x, y []*int) {}
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 4)
// amd64:-`.*runtime\.makeslicecopy`
copy(a, s)
return a
}
func SliceMakeCopyNoOptWrongOrder(s []*int) []*int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 4)
// amd64:`.*runtime\.typedslicecopy`
// amd64:-`.*runtime\.makeslicecopy`
copy(s, a)
return a
}
func SliceMakeCopyNoOptWrongAssign(s []*int) []*int {
var a []*int
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
s = make([]*int, 4)
// amd64:`.*runtime\.typedslicecopy`
// amd64:-`.*runtime\.makeslicecopy`
copy(a, s)
return s
}
func SliceMakeCopyNoOptCopyLength(s []*int) (int, []*int) {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 4)
// amd64:`.*runtime\.typedslicecopy`
// amd64:-`.*runtime\.makeslicecopy`
n := copy(a, s)
return n, a
}
func SliceMakeCopyNoOptSelfCopy(s []*int) []*int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 4)
// amd64:`.*runtime\.typedslicecopy`
// amd64:-`.*runtime\.makeslicecopy`
copy(a, a)
return a
}
func SliceMakeCopyNoOptTargetReference(s []*int) []*int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]*int, 4)
// amd64:`.*runtime\.typedslicecopy`
// amd64:-`.*runtime\.makeslicecopy`
copy(a, s[:len(a)])
return a
}
func SliceMakeCopyNoOptCap(s []int) []int {
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.makeslice\(`
a := make([]int, len(s), 9)
// amd64:-`.*runtime\.makeslicecopy`
// amd64:`.*runtime\.memmove`
copy(a, s)
return a
}
func SliceMakeCopyNoMemmoveDifferentLen(s []int) []int {
// amd64:`.*runtime\.makeslicecopy`
// amd64:-`.*runtime\.memmove`
a := make([]int, len(s)-1)
// amd64:-`.*runtime\.memmove`
copy(a, s)
return a
}
// ---------------------- //
// Nil check of &s[0] //
// ---------------------- //
// See issue 30366
func SliceNilCheck(s []int) {
p := &s[0]
// amd64:-`TESTB`
_ = *p
}
// ---------------------- //
// Init slice literal //
// ---------------------- //
// See issue 21561
func InitSmallSliceLiteral() []int {
// amd64:`MOVQ\t[$]42`
return []int{42}
}
func InitNotSmallSliceLiteral() []int {
// amd64:`LEAQ\t.*stmp_`
return []int{
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
42,
}
}
// --------------------------------------- //
// Test PPC64 SUBFCconst folding rules //
// triggered by slice operations. //
// --------------------------------------- //
func SliceWithConstCompare(a []int, b int) []int {
var c []int = []int{1, 2, 3, 4, 5}
if b+len(a) < len(c) {
// ppc64le:-"NEG"
// ppc64:-"NEG"
return c[b:]
}
return a
}
func SliceWithSubtractBound(a []int, b int) []int {
// ppc64le:"SUBC",-"NEG"
// ppc64:"SUBC",-"NEG"
return a[(3 - b):]
}
// --------------------------------------- //
// Code generation for unsafe.Slice //
// --------------------------------------- //
func Slice1(p *byte, i int) []byte {
// amd64:-"MULQ"
return unsafe.Slice(p, i)
}
func Slice0(p *struct{}, i int) []struct{} {
// amd64:-"MULQ"
return unsafe.Slice(p, i)
}