mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
reflect: do not escape Value.Type
Types are either static (for compiler-created types) or heap allocated and always reachable (for reflection-created types, held in the central map). So there is no need to escape types. With CL 408826 reflect.Value does not always escape. Some functions that escapes Value.typ would make the Value escape without this CL. Had to add a special case for the inliner to keep (*Value).Type still inlineable. Change-Id: I7c14d35fd26328347b509a06eb5bd1534d40775f Reviewed-on: https://go-review.googlesource.com/c/go/+/413474 Reviewed-by: Austin Clements <austin@google.com> Reviewed-by: David Chase <drchase@google.com> Run-TryBot: Cherry Mui <cherryyz@google.com> TryBot-Result: Gopher Robot <gobot@golang.org>
This commit is contained in:
parent
0ac72f8b96
commit
be4fe08b57
7 changed files with 170 additions and 122 deletions
|
|
@ -483,6 +483,7 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
|
||||||
// because getcaller{pc,sp} expect a pointer to the caller's first argument.
|
// because getcaller{pc,sp} expect a pointer to the caller's first argument.
|
||||||
//
|
//
|
||||||
// runtime.throw is a "cheap call" like panic in normal code.
|
// runtime.throw is a "cheap call" like panic in normal code.
|
||||||
|
var cheap bool
|
||||||
if n.X.Op() == ir.ONAME {
|
if n.X.Op() == ir.ONAME {
|
||||||
name := n.X.(*ir.Name)
|
name := n.X.(*ir.Name)
|
||||||
if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
|
if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
|
||||||
|
|
@ -496,6 +497,14 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Special case for reflect.noescpae. It does just type
|
||||||
|
// conversions to appease the escape analysis, and doesn't
|
||||||
|
// generate code.
|
||||||
|
if name.Class == ir.PFUNC && types.IsReflectPkg(name.Sym().Pkg) {
|
||||||
|
if name.Sym().Name == "noescape" {
|
||||||
|
cheap = true
|
||||||
|
}
|
||||||
|
}
|
||||||
// Special case for coverage counter updates; although
|
// Special case for coverage counter updates; although
|
||||||
// these correspond to real operations, we treat them as
|
// these correspond to real operations, we treat them as
|
||||||
// zero cost for the moment. This is due to the existence
|
// zero cost for the moment. This is due to the existence
|
||||||
|
|
@ -514,7 +523,6 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
|
||||||
if meth := ir.MethodExprName(n.X); meth != nil {
|
if meth := ir.MethodExprName(n.X); meth != nil {
|
||||||
if fn := meth.Func; fn != nil {
|
if fn := meth.Func; fn != nil {
|
||||||
s := fn.Sym()
|
s := fn.Sym()
|
||||||
var cheap bool
|
|
||||||
if types.IsRuntimePkg(s.Pkg) && s.Name == "heapBits.nextArena" {
|
if types.IsRuntimePkg(s.Pkg) && s.Name == "heapBits.nextArena" {
|
||||||
// Special case: explicitly allow mid-stack inlining of
|
// Special case: explicitly allow mid-stack inlining of
|
||||||
// runtime.heapBits.next even though it calls slow-path
|
// runtime.heapBits.next even though it calls slow-path
|
||||||
|
|
@ -536,12 +544,12 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
|
||||||
cheap = true
|
cheap = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if cheap {
|
|
||||||
break // treat like any other node, that is, cost of 1
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if cheap {
|
||||||
|
break // treat like any other node, that is, cost of 1
|
||||||
|
}
|
||||||
|
|
||||||
// Determine if the callee edge is for an inlinable hot callee or not.
|
// Determine if the callee edge is for an inlinable hot callee or not.
|
||||||
if v.profile != nil && v.curFunc != nil {
|
if v.profile != nil && v.curFunc != nil {
|
||||||
|
|
@ -642,7 +650,7 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
|
||||||
// This doesn't produce code, but the children might.
|
// This doesn't produce code, but the children might.
|
||||||
v.budget++ // undo default cost
|
v.budget++ // undo default cost
|
||||||
|
|
||||||
case ir.ODCLCONST, ir.OFALL:
|
case ir.ODCLCONST, ir.OFALL, ir.OTYPE:
|
||||||
// These nodes don't produce code; omit from inlining budget.
|
// These nodes don't produce code; omit from inlining budget.
|
||||||
return false
|
return false
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
|
||||||
hard := func(v1, v2 Value) bool {
|
hard := func(v1, v2 Value) bool {
|
||||||
switch v1.Kind() {
|
switch v1.Kind() {
|
||||||
case Pointer:
|
case Pointer:
|
||||||
if v1.typ.PtrBytes == 0 {
|
if v1.typ().PtrBytes == 0 {
|
||||||
// not-in-heap pointers can't be cyclic.
|
// not-in-heap pointers can't be cyclic.
|
||||||
// At least, all of our current uses of runtime/internal/sys.NotInHeap
|
// At least, all of our current uses of runtime/internal/sys.NotInHeap
|
||||||
// have that property. The runtime ones aren't cyclic (and we don't use
|
// have that property. The runtime ones aren't cyclic (and we don't use
|
||||||
|
|
|
||||||
|
|
@ -100,8 +100,8 @@ func makeMethodValue(op string, v Value) Value {
|
||||||
|
|
||||||
// Ignoring the flagMethod bit, v describes the receiver, not the method type.
|
// Ignoring the flagMethod bit, v describes the receiver, not the method type.
|
||||||
fl := v.flag & (flagRO | flagAddr | flagIndir)
|
fl := v.flag & (flagRO | flagAddr | flagIndir)
|
||||||
fl |= flag(v.typ.Kind())
|
fl |= flag(v.typ().Kind())
|
||||||
rcvr := Value{v.typ, v.ptr, fl}
|
rcvr := Value{v.typ(), v.ptr, fl}
|
||||||
|
|
||||||
// v.Type returns the actual type of the method value.
|
// v.Type returns the actual type of the method value.
|
||||||
ftyp := (*funcType)(unsafe.Pointer(v.Type().(*rtype)))
|
ftyp := (*funcType)(unsafe.Pointer(v.Type().(*rtype)))
|
||||||
|
|
|
||||||
|
|
@ -478,21 +478,29 @@ var kindNames = []string{
|
||||||
// resolveNameOff resolves a name offset from a base pointer.
|
// resolveNameOff resolves a name offset from a base pointer.
|
||||||
// The (*rtype).nameOff method is a convenience wrapper for this function.
|
// The (*rtype).nameOff method is a convenience wrapper for this function.
|
||||||
// Implemented in the runtime package.
|
// Implemented in the runtime package.
|
||||||
|
//
|
||||||
|
//go:noescape
|
||||||
func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer
|
func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer
|
||||||
|
|
||||||
// resolveTypeOff resolves an *rtype offset from a base type.
|
// resolveTypeOff resolves an *rtype offset from a base type.
|
||||||
// The (*rtype).typeOff method is a convenience wrapper for this function.
|
// The (*rtype).typeOff method is a convenience wrapper for this function.
|
||||||
// Implemented in the runtime package.
|
// Implemented in the runtime package.
|
||||||
|
//
|
||||||
|
//go:noescape
|
||||||
func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
|
func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
|
||||||
|
|
||||||
// resolveTextOff resolves a function pointer offset from a base type.
|
// resolveTextOff resolves a function pointer offset from a base type.
|
||||||
// The (*rtype).textOff method is a convenience wrapper for this function.
|
// The (*rtype).textOff method is a convenience wrapper for this function.
|
||||||
// Implemented in the runtime package.
|
// Implemented in the runtime package.
|
||||||
|
//
|
||||||
|
//go:noescape
|
||||||
func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
|
func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
|
||||||
|
|
||||||
// addReflectOff adds a pointer to the reflection lookup map in the runtime.
|
// addReflectOff adds a pointer to the reflection lookup map in the runtime.
|
||||||
// It returns a new ID that can be used as a typeOff or textOff, and will
|
// It returns a new ID that can be used as a typeOff or textOff, and will
|
||||||
// be resolved correctly. Implemented in the runtime package.
|
// be resolved correctly. Implemented in the runtime package.
|
||||||
|
//
|
||||||
|
//go:noescape
|
||||||
func addReflectOff(ptr unsafe.Pointer) int32
|
func addReflectOff(ptr unsafe.Pointer) int32
|
||||||
|
|
||||||
// resolveReflectName adds a name to the reflection lookup map in the runtime.
|
// resolveReflectName adds a name to the reflection lookup map in the runtime.
|
||||||
|
|
@ -1144,7 +1152,9 @@ func (t *structType) FieldByName(name string) (f StructField, present bool) {
|
||||||
// If i is a nil interface value, TypeOf returns nil.
|
// If i is a nil interface value, TypeOf returns nil.
|
||||||
func TypeOf(i any) Type {
|
func TypeOf(i any) Type {
|
||||||
eface := *(*emptyInterface)(unsafe.Pointer(&i))
|
eface := *(*emptyInterface)(unsafe.Pointer(&i))
|
||||||
return toType(eface.typ)
|
// Noescape so this doesn't make i to escape. See the comment
|
||||||
|
// at Value.typ for why this is safe.
|
||||||
|
return toType((*abi.Type)(noescape(unsafe.Pointer(eface.typ))))
|
||||||
}
|
}
|
||||||
|
|
||||||
// rtypeOf directly extracts the *rtype of the provided value.
|
// rtypeOf directly extracts the *rtype of the provided value.
|
||||||
|
|
|
||||||
|
|
@ -37,8 +37,9 @@ import (
|
||||||
// Using == on two Values does not compare the underlying values
|
// Using == on two Values does not compare the underlying values
|
||||||
// they represent.
|
// they represent.
|
||||||
type Value struct {
|
type Value struct {
|
||||||
// typ holds the type of the value represented by a Value.
|
// typ_ holds the type of the value represented by a Value.
|
||||||
typ *abi.Type
|
// Access using the typ method to avoid escape of v.
|
||||||
|
typ_ *abi.Type
|
||||||
|
|
||||||
// Pointer-valued data or, if flagIndir is set, pointer to data.
|
// Pointer-valued data or, if flagIndir is set, pointer to data.
|
||||||
// Valid when either flagIndir is set or typ.pointers() is true.
|
// Valid when either flagIndir is set or typ.pointers() is true.
|
||||||
|
|
@ -92,11 +93,20 @@ func (f flag) ro() flag {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (v Value) typ() *abi.Type {
|
||||||
|
// Types are either static (for compiler-created types) or
|
||||||
|
// heap-allocated but always reachable (for reflection-created
|
||||||
|
// types, held in the central map). So there is no need to
|
||||||
|
// escape types. noescape here help avoid unnecessary escape
|
||||||
|
// of v.
|
||||||
|
return (*abi.Type)(noescape(unsafe.Pointer(v.typ_)))
|
||||||
|
}
|
||||||
|
|
||||||
// pointer returns the underlying pointer represented by v.
|
// pointer returns the underlying pointer represented by v.
|
||||||
// v.Kind() must be Pointer, Map, Chan, Func, or UnsafePointer
|
// v.Kind() must be Pointer, Map, Chan, Func, or UnsafePointer
|
||||||
// if v.Kind() == Pointer, the base type must not be not-in-heap.
|
// if v.Kind() == Pointer, the base type must not be not-in-heap.
|
||||||
func (v Value) pointer() unsafe.Pointer {
|
func (v Value) pointer() unsafe.Pointer {
|
||||||
if v.typ.Size() != goarch.PtrSize || !v.typ.Pointers() {
|
if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() {
|
||||||
panic("can't call pointer on a non-pointer Value")
|
panic("can't call pointer on a non-pointer Value")
|
||||||
}
|
}
|
||||||
if v.flag&flagIndir != 0 {
|
if v.flag&flagIndir != 0 {
|
||||||
|
|
@ -107,7 +117,7 @@ func (v Value) pointer() unsafe.Pointer {
|
||||||
|
|
||||||
// packEface converts v to the empty interface.
|
// packEface converts v to the empty interface.
|
||||||
func packEface(v Value) any {
|
func packEface(v Value) any {
|
||||||
t := v.typ
|
t := v.typ()
|
||||||
var i any
|
var i any
|
||||||
e := (*emptyInterface)(unsafe.Pointer(&i))
|
e := (*emptyInterface)(unsafe.Pointer(&i))
|
||||||
// First, fill in the data portion of the interface.
|
// First, fill in the data portion of the interface.
|
||||||
|
|
@ -275,7 +285,7 @@ func (v Value) Addr() Value {
|
||||||
// Preserve flagRO instead of using v.flag.ro() so that
|
// Preserve flagRO instead of using v.flag.ro() so that
|
||||||
// v.Addr().Elem() is equivalent to v (#32772)
|
// v.Addr().Elem() is equivalent to v (#32772)
|
||||||
fl := v.flag & flagRO
|
fl := v.flag & flagRO
|
||||||
return Value{ptrTo(v.typ), v.ptr, fl | flag(Pointer)}
|
return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bool returns v's underlying value.
|
// Bool returns v's underlying value.
|
||||||
|
|
@ -299,7 +309,7 @@ var bytesType = rtypeOf(([]byte)(nil))
|
||||||
// an addressable array of bytes.
|
// an addressable array of bytes.
|
||||||
func (v Value) Bytes() []byte {
|
func (v Value) Bytes() []byte {
|
||||||
// bytesSlow is split out to keep Bytes inlineable for unnamed []byte.
|
// bytesSlow is split out to keep Bytes inlineable for unnamed []byte.
|
||||||
if v.typ == bytesType {
|
if v.typ_ == bytesType { // ok to use v.typ_ directly as comparison doesn't cause escape
|
||||||
return *(*[]byte)(v.ptr)
|
return *(*[]byte)(v.ptr)
|
||||||
}
|
}
|
||||||
return v.bytesSlow()
|
return v.bytesSlow()
|
||||||
|
|
@ -308,20 +318,20 @@ func (v Value) Bytes() []byte {
|
||||||
func (v Value) bytesSlow() []byte {
|
func (v Value) bytesSlow() []byte {
|
||||||
switch v.kind() {
|
switch v.kind() {
|
||||||
case Slice:
|
case Slice:
|
||||||
if v.typ.Elem().Kind() != abi.Uint8 {
|
if v.typ().Elem().Kind() != abi.Uint8 {
|
||||||
panic("reflect.Value.Bytes of non-byte slice")
|
panic("reflect.Value.Bytes of non-byte slice")
|
||||||
}
|
}
|
||||||
// Slice is always bigger than a word; assume flagIndir.
|
// Slice is always bigger than a word; assume flagIndir.
|
||||||
return *(*[]byte)(v.ptr)
|
return *(*[]byte)(v.ptr)
|
||||||
case Array:
|
case Array:
|
||||||
if v.typ.Elem().Kind() != abi.Uint8 {
|
if v.typ().Elem().Kind() != abi.Uint8 {
|
||||||
panic("reflect.Value.Bytes of non-byte array")
|
panic("reflect.Value.Bytes of non-byte array")
|
||||||
}
|
}
|
||||||
if !v.CanAddr() {
|
if !v.CanAddr() {
|
||||||
panic("reflect.Value.Bytes of unaddressable byte array")
|
panic("reflect.Value.Bytes of unaddressable byte array")
|
||||||
}
|
}
|
||||||
p := (*byte)(v.ptr)
|
p := (*byte)(v.ptr)
|
||||||
n := int((*arrayType)(unsafe.Pointer(v.typ)).Len)
|
n := int((*arrayType)(unsafe.Pointer(v.typ())).Len)
|
||||||
return unsafe.Slice(p, n)
|
return unsafe.Slice(p, n)
|
||||||
}
|
}
|
||||||
panic(&ValueError{"reflect.Value.Bytes", v.kind()})
|
panic(&ValueError{"reflect.Value.Bytes", v.kind()})
|
||||||
|
|
@ -331,7 +341,7 @@ func (v Value) bytesSlow() []byte {
|
||||||
// It panics if v's underlying value is not a slice of runes (int32s).
|
// It panics if v's underlying value is not a slice of runes (int32s).
|
||||||
func (v Value) runes() []rune {
|
func (v Value) runes() []rune {
|
||||||
v.mustBe(Slice)
|
v.mustBe(Slice)
|
||||||
if v.typ.Elem().Kind() != abi.Int32 {
|
if v.typ().Elem().Kind() != abi.Int32 {
|
||||||
panic("reflect.Value.Bytes of non-rune slice")
|
panic("reflect.Value.Bytes of non-rune slice")
|
||||||
}
|
}
|
||||||
// Slice is always bigger than a word; assume flagIndir.
|
// Slice is always bigger than a word; assume flagIndir.
|
||||||
|
|
@ -389,7 +399,7 @@ const debugReflectCall = false
|
||||||
|
|
||||||
func (v Value) call(op string, in []Value) []Value {
|
func (v Value) call(op string, in []Value) []Value {
|
||||||
// Get function pointer, type.
|
// Get function pointer, type.
|
||||||
t := (*funcType)(unsafe.Pointer(v.typ))
|
t := (*funcType)(unsafe.Pointer(v.typ()))
|
||||||
var (
|
var (
|
||||||
fn unsafe.Pointer
|
fn unsafe.Pointer
|
||||||
rcvr Value
|
rcvr Value
|
||||||
|
|
@ -779,7 +789,7 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs
|
||||||
if numOut > 0 {
|
if numOut > 0 {
|
||||||
for i, typ := range ftyp.OutSlice() {
|
for i, typ := range ftyp.OutSlice() {
|
||||||
v := out[i]
|
v := out[i]
|
||||||
if v.typ == nil {
|
if v.typ() == nil {
|
||||||
panic("reflect: function created by MakeFunc using " + funcName(f) +
|
panic("reflect: function created by MakeFunc using " + funcName(f) +
|
||||||
" returned zero Value")
|
" returned zero Value")
|
||||||
}
|
}
|
||||||
|
|
@ -876,8 +886,8 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs
|
||||||
// The return value fn is a pointer to the method code.
|
// The return value fn is a pointer to the method code.
|
||||||
func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
|
func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
|
||||||
i := methodIndex
|
i := methodIndex
|
||||||
if v.typ.Kind() == abi.Interface {
|
if v.typ().Kind() == abi.Interface {
|
||||||
tt := (*interfaceType)(unsafe.Pointer(v.typ))
|
tt := (*interfaceType)(unsafe.Pointer(v.typ()))
|
||||||
if uint(i) >= uint(len(tt.Methods)) {
|
if uint(i) >= uint(len(tt.Methods)) {
|
||||||
panic("reflect: internal error: invalid method index")
|
panic("reflect: internal error: invalid method index")
|
||||||
}
|
}
|
||||||
|
|
@ -893,18 +903,18 @@ func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t
|
||||||
fn = unsafe.Pointer(&iface.itab.fun[i])
|
fn = unsafe.Pointer(&iface.itab.fun[i])
|
||||||
t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
|
t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
|
||||||
} else {
|
} else {
|
||||||
rcvrtype = v.typ
|
rcvrtype = v.typ()
|
||||||
ms := v.typ.ExportedMethods()
|
ms := v.typ().ExportedMethods()
|
||||||
if uint(i) >= uint(len(ms)) {
|
if uint(i) >= uint(len(ms)) {
|
||||||
panic("reflect: internal error: invalid method index")
|
panic("reflect: internal error: invalid method index")
|
||||||
}
|
}
|
||||||
m := ms[i]
|
m := ms[i]
|
||||||
if !nameOffFor(v.typ, m.Name).IsExported() {
|
if !nameOffFor(v.typ(), m.Name).IsExported() {
|
||||||
panic("reflect: " + op + " of unexported method")
|
panic("reflect: " + op + " of unexported method")
|
||||||
}
|
}
|
||||||
ifn := textOffFor(v.typ, m.Ifn)
|
ifn := textOffFor(v.typ(), m.Ifn)
|
||||||
fn = unsafe.Pointer(&ifn)
|
fn = unsafe.Pointer(&ifn)
|
||||||
t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ, m.Mtyp)))
|
t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp)))
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -914,7 +924,7 @@ func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t
|
||||||
// Reflect uses the "interface" calling convention for
|
// Reflect uses the "interface" calling convention for
|
||||||
// methods, which always uses one word to record the receiver.
|
// methods, which always uses one word to record the receiver.
|
||||||
func storeRcvr(v Value, p unsafe.Pointer) {
|
func storeRcvr(v Value, p unsafe.Pointer) {
|
||||||
t := v.typ
|
t := v.typ()
|
||||||
if t.Kind() == abi.Interface {
|
if t.Kind() == abi.Interface {
|
||||||
// the interface data word becomes the receiver word
|
// the interface data word becomes the receiver word
|
||||||
iface := (*nonEmptyInterface)(v.ptr)
|
iface := (*nonEmptyInterface)(v.ptr)
|
||||||
|
|
@ -1164,12 +1174,12 @@ func (v Value) capNonSlice() int {
|
||||||
k := v.kind()
|
k := v.kind()
|
||||||
switch k {
|
switch k {
|
||||||
case Array:
|
case Array:
|
||||||
return v.typ.Len()
|
return v.typ().Len()
|
||||||
case Chan:
|
case Chan:
|
||||||
return chancap(v.pointer())
|
return chancap(v.pointer())
|
||||||
case Ptr:
|
case Ptr:
|
||||||
if v.typ.Elem().Kind() == abi.Array {
|
if v.typ().Elem().Kind() == abi.Array {
|
||||||
return v.typ.Elem().Len()
|
return v.typ().Elem().Len()
|
||||||
}
|
}
|
||||||
panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
|
panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
|
||||||
}
|
}
|
||||||
|
|
@ -1216,7 +1226,7 @@ func (v Value) Elem() Value {
|
||||||
switch k {
|
switch k {
|
||||||
case Interface:
|
case Interface:
|
||||||
var eface any
|
var eface any
|
||||||
if v.typ.NumMethod() == 0 {
|
if v.typ().NumMethod() == 0 {
|
||||||
eface = *(*any)(v.ptr)
|
eface = *(*any)(v.ptr)
|
||||||
} else {
|
} else {
|
||||||
eface = (any)(*(*interface {
|
eface = (any)(*(*interface {
|
||||||
|
|
@ -1231,7 +1241,7 @@ func (v Value) Elem() Value {
|
||||||
case Pointer:
|
case Pointer:
|
||||||
ptr := v.ptr
|
ptr := v.ptr
|
||||||
if v.flag&flagIndir != 0 {
|
if v.flag&flagIndir != 0 {
|
||||||
if ifaceIndir(v.typ) {
|
if ifaceIndir(v.typ()) {
|
||||||
// This is a pointer to a not-in-heap object. ptr points to a uintptr
|
// This is a pointer to a not-in-heap object. ptr points to a uintptr
|
||||||
// in the heap. That uintptr is the address of a not-in-heap object.
|
// in the heap. That uintptr is the address of a not-in-heap object.
|
||||||
// In general, pointers to not-in-heap objects can be total junk.
|
// In general, pointers to not-in-heap objects can be total junk.
|
||||||
|
|
@ -1252,7 +1262,7 @@ func (v Value) Elem() Value {
|
||||||
if ptr == nil {
|
if ptr == nil {
|
||||||
return Value{}
|
return Value{}
|
||||||
}
|
}
|
||||||
tt := (*ptrType)(unsafe.Pointer(v.typ))
|
tt := (*ptrType)(unsafe.Pointer(v.typ()))
|
||||||
typ := tt.Elem
|
typ := tt.Elem
|
||||||
fl := v.flag&flagRO | flagIndir | flagAddr
|
fl := v.flag&flagRO | flagIndir | flagAddr
|
||||||
fl |= flag(typ.Kind())
|
fl |= flag(typ.Kind())
|
||||||
|
|
@ -1267,7 +1277,7 @@ func (v Value) Field(i int) Value {
|
||||||
if v.kind() != Struct {
|
if v.kind() != Struct {
|
||||||
panic(&ValueError{"reflect.Value.Field", v.kind()})
|
panic(&ValueError{"reflect.Value.Field", v.kind()})
|
||||||
}
|
}
|
||||||
tt := (*structType)(unsafe.Pointer(v.typ))
|
tt := (*structType)(unsafe.Pointer(v.typ()))
|
||||||
if uint(i) >= uint(len(tt.Fields)) {
|
if uint(i) >= uint(len(tt.Fields)) {
|
||||||
panic("reflect: Field index out of range")
|
panic("reflect: Field index out of range")
|
||||||
}
|
}
|
||||||
|
|
@ -1303,7 +1313,7 @@ func (v Value) FieldByIndex(index []int) Value {
|
||||||
v.mustBe(Struct)
|
v.mustBe(Struct)
|
||||||
for i, x := range index {
|
for i, x := range index {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
if v.Kind() == Pointer && v.typ.Elem().Kind() == abi.Struct {
|
if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct {
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
panic("reflect: indirection through nil pointer to embedded struct")
|
panic("reflect: indirection through nil pointer to embedded struct")
|
||||||
}
|
}
|
||||||
|
|
@ -1326,9 +1336,9 @@ func (v Value) FieldByIndexErr(index []int) (Value, error) {
|
||||||
v.mustBe(Struct)
|
v.mustBe(Struct)
|
||||||
for i, x := range index {
|
for i, x := range index {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
if v.Kind() == Ptr && v.typ.Elem().Kind() == abi.Struct {
|
if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct {
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ.Elem()))
|
return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem()))
|
||||||
}
|
}
|
||||||
v = v.Elem()
|
v = v.Elem()
|
||||||
}
|
}
|
||||||
|
|
@ -1343,7 +1353,7 @@ func (v Value) FieldByIndexErr(index []int) (Value, error) {
|
||||||
// It panics if v's Kind is not struct.
|
// It panics if v's Kind is not struct.
|
||||||
func (v Value) FieldByName(name string) Value {
|
func (v Value) FieldByName(name string) Value {
|
||||||
v.mustBe(Struct)
|
v.mustBe(Struct)
|
||||||
if f, ok := toRType(v.typ).FieldByName(name); ok {
|
if f, ok := toRType(v.typ()).FieldByName(name); ok {
|
||||||
return v.FieldByIndex(f.Index)
|
return v.FieldByIndex(f.Index)
|
||||||
}
|
}
|
||||||
return Value{}
|
return Value{}
|
||||||
|
|
@ -1354,7 +1364,7 @@ func (v Value) FieldByName(name string) Value {
|
||||||
// It panics if v's Kind is not struct.
|
// It panics if v's Kind is not struct.
|
||||||
// It returns the zero Value if no field was found.
|
// It returns the zero Value if no field was found.
|
||||||
func (v Value) FieldByNameFunc(match func(string) bool) Value {
|
func (v Value) FieldByNameFunc(match func(string) bool) Value {
|
||||||
if f, ok := toRType(v.typ).FieldByNameFunc(match); ok {
|
if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok {
|
||||||
return v.FieldByIndex(f.Index)
|
return v.FieldByIndex(f.Index)
|
||||||
}
|
}
|
||||||
return Value{}
|
return Value{}
|
||||||
|
|
@ -1390,7 +1400,7 @@ var uint8Type = rtypeOf(uint8(0))
|
||||||
func (v Value) Index(i int) Value {
|
func (v Value) Index(i int) Value {
|
||||||
switch v.kind() {
|
switch v.kind() {
|
||||||
case Array:
|
case Array:
|
||||||
tt := (*arrayType)(unsafe.Pointer(v.typ))
|
tt := (*arrayType)(unsafe.Pointer(v.typ()))
|
||||||
if uint(i) >= uint(tt.Len) {
|
if uint(i) >= uint(tt.Len) {
|
||||||
panic("reflect: array index out of range")
|
panic("reflect: array index out of range")
|
||||||
}
|
}
|
||||||
|
|
@ -1413,7 +1423,7 @@ func (v Value) Index(i int) Value {
|
||||||
if uint(i) >= uint(s.Len) {
|
if uint(i) >= uint(s.Len) {
|
||||||
panic("reflect: slice index out of range")
|
panic("reflect: slice index out of range")
|
||||||
}
|
}
|
||||||
tt := (*sliceType)(unsafe.Pointer(v.typ))
|
tt := (*sliceType)(unsafe.Pointer(v.typ()))
|
||||||
typ := tt.Elem
|
typ := tt.Elem
|
||||||
val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
|
val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
|
||||||
fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
|
fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
|
||||||
|
|
@ -1584,11 +1594,11 @@ func (v Value) IsZero() bool {
|
||||||
return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
|
return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
|
||||||
case Array:
|
case Array:
|
||||||
// If the type is comparable, then compare directly with zero.
|
// If the type is comparable, then compare directly with zero.
|
||||||
if v.typ.Equal != nil && v.typ.Size() <= maxZero {
|
if v.typ().Equal != nil && v.typ().Size() <= maxZero {
|
||||||
if v.flag&flagIndir == 0 {
|
if v.flag&flagIndir == 0 {
|
||||||
return v.ptr == nil
|
return v.ptr == nil
|
||||||
}
|
}
|
||||||
return v.typ.Equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
|
return v.typ().Equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
|
||||||
}
|
}
|
||||||
|
|
||||||
n := v.Len()
|
n := v.Len()
|
||||||
|
|
@ -1604,11 +1614,11 @@ func (v Value) IsZero() bool {
|
||||||
return v.Len() == 0
|
return v.Len() == 0
|
||||||
case Struct:
|
case Struct:
|
||||||
// If the type is comparable, then compare directly with zero.
|
// If the type is comparable, then compare directly with zero.
|
||||||
if v.typ.Equal != nil && v.typ.Size() <= maxZero {
|
if v.typ().Equal != nil && v.typ().Size() <= maxZero {
|
||||||
if v.flag&flagIndir == 0 {
|
if v.flag&flagIndir == 0 {
|
||||||
return v.ptr == nil
|
return v.ptr == nil
|
||||||
}
|
}
|
||||||
return v.typ.Equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
|
return v.typ().Equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
|
||||||
}
|
}
|
||||||
|
|
||||||
n := v.NumField()
|
n := v.NumField()
|
||||||
|
|
@ -1671,7 +1681,7 @@ func (v Value) SetZero() {
|
||||||
case Chan, Func, Map, Pointer, UnsafePointer:
|
case Chan, Func, Map, Pointer, UnsafePointer:
|
||||||
*(*unsafe.Pointer)(v.ptr) = nil
|
*(*unsafe.Pointer)(v.ptr) = nil
|
||||||
case Array, Struct:
|
case Array, Struct:
|
||||||
typedmemclr(v.typ, v.ptr)
|
typedmemclr(v.typ(), v.ptr)
|
||||||
default:
|
default:
|
||||||
// This should never happen, but will act as a safeguard for later,
|
// This should never happen, but will act as a safeguard for later,
|
||||||
// as a default value doesn't makes sense here.
|
// as a default value doesn't makes sense here.
|
||||||
|
|
@ -1698,7 +1708,7 @@ func (v Value) Len() int {
|
||||||
func (v Value) lenNonSlice() int {
|
func (v Value) lenNonSlice() int {
|
||||||
switch k := v.kind(); k {
|
switch k := v.kind(); k {
|
||||||
case Array:
|
case Array:
|
||||||
tt := (*arrayType)(unsafe.Pointer(v.typ))
|
tt := (*arrayType)(unsafe.Pointer(v.typ()))
|
||||||
return int(tt.Len)
|
return int(tt.Len)
|
||||||
case Chan:
|
case Chan:
|
||||||
return chanlen(v.pointer())
|
return chanlen(v.pointer())
|
||||||
|
|
@ -1708,8 +1718,8 @@ func (v Value) lenNonSlice() int {
|
||||||
// String is bigger than a word; assume flagIndir.
|
// String is bigger than a word; assume flagIndir.
|
||||||
return (*unsafeheader.String)(v.ptr).Len
|
return (*unsafeheader.String)(v.ptr).Len
|
||||||
case Ptr:
|
case Ptr:
|
||||||
if v.typ.Elem().Kind() == abi.Array {
|
if v.typ().Elem().Kind() == abi.Array {
|
||||||
return v.typ.Elem().Len()
|
return v.typ().Elem().Len()
|
||||||
}
|
}
|
||||||
panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
|
panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
|
||||||
}
|
}
|
||||||
|
|
@ -1724,7 +1734,7 @@ var stringType = rtypeOf("")
|
||||||
// As in Go, the key's value must be assignable to the map's key type.
|
// As in Go, the key's value must be assignable to the map's key type.
|
||||||
func (v Value) MapIndex(key Value) Value {
|
func (v Value) MapIndex(key Value) Value {
|
||||||
v.mustBe(Map)
|
v.mustBe(Map)
|
||||||
tt := (*mapType)(unsafe.Pointer(v.typ))
|
tt := (*mapType)(unsafe.Pointer(v.typ()))
|
||||||
|
|
||||||
// Do not require key to be exported, so that DeepEqual
|
// Do not require key to be exported, so that DeepEqual
|
||||||
// and other programs can use all the keys returned by
|
// and other programs can use all the keys returned by
|
||||||
|
|
@ -1735,9 +1745,9 @@ func (v Value) MapIndex(key Value) Value {
|
||||||
// of unexported fields.
|
// of unexported fields.
|
||||||
|
|
||||||
var e unsafe.Pointer
|
var e unsafe.Pointer
|
||||||
if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ && tt.Elem.Size() <= maxValSize {
|
if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
|
||||||
k := *(*string)(key.ptr)
|
k := *(*string)(key.ptr)
|
||||||
e = mapaccess_faststr(v.typ, v.pointer(), k)
|
e = mapaccess_faststr(v.typ(), v.pointer(), k)
|
||||||
} else {
|
} else {
|
||||||
key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
|
key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
|
||||||
var k unsafe.Pointer
|
var k unsafe.Pointer
|
||||||
|
|
@ -1746,7 +1756,7 @@ func (v Value) MapIndex(key Value) Value {
|
||||||
} else {
|
} else {
|
||||||
k = unsafe.Pointer(&key.ptr)
|
k = unsafe.Pointer(&key.ptr)
|
||||||
}
|
}
|
||||||
e = mapaccess(v.typ, v.pointer(), k)
|
e = mapaccess(v.typ(), v.pointer(), k)
|
||||||
}
|
}
|
||||||
if e == nil {
|
if e == nil {
|
||||||
return Value{}
|
return Value{}
|
||||||
|
|
@ -1763,7 +1773,7 @@ func (v Value) MapIndex(key Value) Value {
|
||||||
// It returns an empty slice if v represents a nil map.
|
// It returns an empty slice if v represents a nil map.
|
||||||
func (v Value) MapKeys() []Value {
|
func (v Value) MapKeys() []Value {
|
||||||
v.mustBe(Map)
|
v.mustBe(Map)
|
||||||
tt := (*mapType)(unsafe.Pointer(v.typ))
|
tt := (*mapType)(unsafe.Pointer(v.typ()))
|
||||||
keyType := tt.Key
|
keyType := tt.Key
|
||||||
|
|
||||||
fl := v.flag.ro() | flag(keyType.Kind())
|
fl := v.flag.ro() | flag(keyType.Kind())
|
||||||
|
|
@ -1774,7 +1784,7 @@ func (v Value) MapKeys() []Value {
|
||||||
mlen = maplen(m)
|
mlen = maplen(m)
|
||||||
}
|
}
|
||||||
var it hiter
|
var it hiter
|
||||||
mapiterinit(v.typ, m, &it)
|
mapiterinit(v.typ(), m, &it)
|
||||||
a := make([]Value, mlen)
|
a := make([]Value, mlen)
|
||||||
var i int
|
var i int
|
||||||
for i = 0; i < len(a); i++ {
|
for i = 0; i < len(a); i++ {
|
||||||
|
|
@ -1834,7 +1844,7 @@ func (iter *MapIter) Key() Value {
|
||||||
panic("MapIter.Key called on exhausted iterator")
|
panic("MapIter.Key called on exhausted iterator")
|
||||||
}
|
}
|
||||||
|
|
||||||
t := (*mapType)(unsafe.Pointer(iter.m.typ))
|
t := (*mapType)(unsafe.Pointer(iter.m.typ()))
|
||||||
ktype := t.Key
|
ktype := t.Key
|
||||||
return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
|
return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
|
||||||
}
|
}
|
||||||
|
|
@ -1858,13 +1868,13 @@ func (v Value) SetIterKey(iter *MapIter) {
|
||||||
target = v.ptr
|
target = v.ptr
|
||||||
}
|
}
|
||||||
|
|
||||||
t := (*mapType)(unsafe.Pointer(iter.m.typ))
|
t := (*mapType)(unsafe.Pointer(iter.m.typ()))
|
||||||
ktype := t.Key
|
ktype := t.Key
|
||||||
|
|
||||||
iter.m.mustBeExported() // do not let unexported m leak
|
iter.m.mustBeExported() // do not let unexported m leak
|
||||||
key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
|
key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
|
||||||
key = key.assignTo("reflect.MapIter.SetKey", v.typ, target)
|
key = key.assignTo("reflect.MapIter.SetKey", v.typ(), target)
|
||||||
typedmemmove(v.typ, v.ptr, key.ptr)
|
typedmemmove(v.typ(), v.ptr, key.ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Value returns the value of iter's current map entry.
|
// Value returns the value of iter's current map entry.
|
||||||
|
|
@ -1877,7 +1887,7 @@ func (iter *MapIter) Value() Value {
|
||||||
panic("MapIter.Value called on exhausted iterator")
|
panic("MapIter.Value called on exhausted iterator")
|
||||||
}
|
}
|
||||||
|
|
||||||
t := (*mapType)(unsafe.Pointer(iter.m.typ))
|
t := (*mapType)(unsafe.Pointer(iter.m.typ()))
|
||||||
vtype := t.Elem
|
vtype := t.Elem
|
||||||
return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
|
return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
|
||||||
}
|
}
|
||||||
|
|
@ -1901,13 +1911,13 @@ func (v Value) SetIterValue(iter *MapIter) {
|
||||||
target = v.ptr
|
target = v.ptr
|
||||||
}
|
}
|
||||||
|
|
||||||
t := (*mapType)(unsafe.Pointer(iter.m.typ))
|
t := (*mapType)(unsafe.Pointer(iter.m.typ()))
|
||||||
vtype := t.Elem
|
vtype := t.Elem
|
||||||
|
|
||||||
iter.m.mustBeExported() // do not let unexported m leak
|
iter.m.mustBeExported() // do not let unexported m leak
|
||||||
elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
|
elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
|
||||||
elem = elem.assignTo("reflect.MapIter.SetValue", v.typ, target)
|
elem = elem.assignTo("reflect.MapIter.SetValue", v.typ(), target)
|
||||||
typedmemmove(v.typ, v.ptr, elem.ptr)
|
typedmemmove(v.typ(), v.ptr, elem.ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next advances the map iterator and reports whether there is another
|
// Next advances the map iterator and reports whether there is another
|
||||||
|
|
@ -1918,7 +1928,7 @@ func (iter *MapIter) Next() bool {
|
||||||
panic("MapIter.Next called on an iterator that does not have an associated map Value")
|
panic("MapIter.Next called on an iterator that does not have an associated map Value")
|
||||||
}
|
}
|
||||||
if !iter.hiter.initialized() {
|
if !iter.hiter.initialized() {
|
||||||
mapiterinit(iter.m.typ, iter.m.pointer(), &iter.hiter)
|
mapiterinit(iter.m.typ(), iter.m.pointer(), &iter.hiter)
|
||||||
} else {
|
} else {
|
||||||
if mapiterkey(&iter.hiter) == nil {
|
if mapiterkey(&iter.hiter) == nil {
|
||||||
panic("MapIter.Next called on exhausted iterator")
|
panic("MapIter.Next called on exhausted iterator")
|
||||||
|
|
@ -1966,6 +1976,11 @@ func (v Value) MapRange() *MapIter {
|
||||||
return &MapIter{m: v}
|
return &MapIter{m: v}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Force slow panicking path not inlined, so it won't add to the
|
||||||
|
// inlining budget of the caller.
|
||||||
|
// TODO: undo when the inliner is no longer bottom-up only.
|
||||||
|
//
|
||||||
|
//go:noinline
|
||||||
func (f flag) panicNotMap() {
|
func (f flag) panicNotMap() {
|
||||||
f.mustBe(Map)
|
f.mustBe(Map)
|
||||||
}
|
}
|
||||||
|
|
@ -1988,19 +2003,19 @@ func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value {
|
||||||
// a receiver; the returned function will always use v as the receiver.
|
// a receiver; the returned function will always use v as the receiver.
|
||||||
// Method panics if i is out of range or if v is a nil interface value.
|
// Method panics if i is out of range or if v is a nil interface value.
|
||||||
func (v Value) Method(i int) Value {
|
func (v Value) Method(i int) Value {
|
||||||
if v.typ == nil {
|
if v.typ() == nil {
|
||||||
panic(&ValueError{"reflect.Value.Method", Invalid})
|
panic(&ValueError{"reflect.Value.Method", Invalid})
|
||||||
}
|
}
|
||||||
if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ).NumMethod()) {
|
if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) {
|
||||||
panic("reflect: Method index out of range")
|
panic("reflect: Method index out of range")
|
||||||
}
|
}
|
||||||
if v.typ.Kind() == abi.Interface && v.IsNil() {
|
if v.typ().Kind() == abi.Interface && v.IsNil() {
|
||||||
panic("reflect: Method on nil interface value")
|
panic("reflect: Method on nil interface value")
|
||||||
}
|
}
|
||||||
fl := v.flag.ro() | (v.flag & flagIndir)
|
fl := v.flag.ro() | (v.flag & flagIndir)
|
||||||
fl |= flag(Func)
|
fl |= flag(Func)
|
||||||
fl |= flag(i)<<flagMethodShift | flagMethod
|
fl |= flag(i)<<flagMethodShift | flagMethod
|
||||||
return Value{v.typ, v.ptr, fl}
|
return Value{v.typ(), v.ptr, fl}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NumMethod returns the number of methods in the value's method set.
|
// NumMethod returns the number of methods in the value's method set.
|
||||||
|
|
@ -2009,13 +2024,13 @@ func (v Value) Method(i int) Value {
|
||||||
//
|
//
|
||||||
// For an interface type, it returns the number of exported and unexported methods.
|
// For an interface type, it returns the number of exported and unexported methods.
|
||||||
func (v Value) NumMethod() int {
|
func (v Value) NumMethod() int {
|
||||||
if v.typ == nil {
|
if v.typ() == nil {
|
||||||
panic(&ValueError{"reflect.Value.NumMethod", Invalid})
|
panic(&ValueError{"reflect.Value.NumMethod", Invalid})
|
||||||
}
|
}
|
||||||
if v.flag&flagMethod != 0 {
|
if v.flag&flagMethod != 0 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
return toRType(v.typ).NumMethod()
|
return toRType(v.typ()).NumMethod()
|
||||||
}
|
}
|
||||||
|
|
||||||
// MethodByName returns a function value corresponding to the method
|
// MethodByName returns a function value corresponding to the method
|
||||||
|
|
@ -2024,13 +2039,13 @@ func (v Value) NumMethod() int {
|
||||||
// a receiver; the returned function will always use v as the receiver.
|
// a receiver; the returned function will always use v as the receiver.
|
||||||
// It returns the zero Value if no method was found.
|
// It returns the zero Value if no method was found.
|
||||||
func (v Value) MethodByName(name string) Value {
|
func (v Value) MethodByName(name string) Value {
|
||||||
if v.typ == nil {
|
if v.typ() == nil {
|
||||||
panic(&ValueError{"reflect.Value.MethodByName", Invalid})
|
panic(&ValueError{"reflect.Value.MethodByName", Invalid})
|
||||||
}
|
}
|
||||||
if v.flag&flagMethod != 0 {
|
if v.flag&flagMethod != 0 {
|
||||||
return Value{}
|
return Value{}
|
||||||
}
|
}
|
||||||
m, ok := toRType(v.typ).MethodByName(name)
|
m, ok := toRType(v.typ()).MethodByName(name)
|
||||||
if !ok {
|
if !ok {
|
||||||
return Value{}
|
return Value{}
|
||||||
}
|
}
|
||||||
|
|
@ -2041,7 +2056,7 @@ func (v Value) MethodByName(name string) Value {
|
||||||
// It panics if v's Kind is not Struct.
|
// It panics if v's Kind is not Struct.
|
||||||
func (v Value) NumField() int {
|
func (v Value) NumField() int {
|
||||||
v.mustBe(Struct)
|
v.mustBe(Struct)
|
||||||
tt := (*structType)(unsafe.Pointer(v.typ))
|
tt := (*structType)(unsafe.Pointer(v.typ()))
|
||||||
return len(tt.Fields)
|
return len(tt.Fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2084,7 +2099,7 @@ func (v Value) OverflowInt(x int64) bool {
|
||||||
k := v.kind()
|
k := v.kind()
|
||||||
switch k {
|
switch k {
|
||||||
case Int, Int8, Int16, Int32, Int64:
|
case Int, Int8, Int16, Int32, Int64:
|
||||||
bitSize := v.typ.Size() * 8
|
bitSize := v.typ().Size() * 8
|
||||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||||
return x != trunc
|
return x != trunc
|
||||||
}
|
}
|
||||||
|
|
@ -2097,7 +2112,7 @@ func (v Value) OverflowUint(x uint64) bool {
|
||||||
k := v.kind()
|
k := v.kind()
|
||||||
switch k {
|
switch k {
|
||||||
case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
|
case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
|
||||||
bitSize := v.typ.Size() * 8
|
bitSize := v.typ_.Size() * 8 // ok to use v.typ_ directly as Size doesn't escape
|
||||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||||
return x != trunc
|
return x != trunc
|
||||||
}
|
}
|
||||||
|
|
@ -2129,7 +2144,7 @@ func (v Value) Pointer() uintptr {
|
||||||
k := v.kind()
|
k := v.kind()
|
||||||
switch k {
|
switch k {
|
||||||
case Pointer:
|
case Pointer:
|
||||||
if v.typ.PtrBytes == 0 {
|
if v.typ().PtrBytes == 0 {
|
||||||
val := *(*uintptr)(v.ptr)
|
val := *(*uintptr)(v.ptr)
|
||||||
// Since it is a not-in-heap pointer, all pointers to the heap are
|
// Since it is a not-in-heap pointer, all pointers to the heap are
|
||||||
// forbidden! See comment in Value.Elem and issue #48399.
|
// forbidden! See comment in Value.Elem and issue #48399.
|
||||||
|
|
@ -2179,7 +2194,7 @@ func (v Value) Recv() (x Value, ok bool) {
|
||||||
// internal recv, possibly non-blocking (nb).
|
// internal recv, possibly non-blocking (nb).
|
||||||
// v is known to be a channel.
|
// v is known to be a channel.
|
||||||
func (v Value) recv(nb bool) (val Value, ok bool) {
|
func (v Value) recv(nb bool) (val Value, ok bool) {
|
||||||
tt := (*chanType)(unsafe.Pointer(v.typ))
|
tt := (*chanType)(unsafe.Pointer(v.typ()))
|
||||||
if ChanDir(tt.Dir)&RecvDir == 0 {
|
if ChanDir(tt.Dir)&RecvDir == 0 {
|
||||||
panic("reflect: recv on send-only channel")
|
panic("reflect: recv on send-only channel")
|
||||||
}
|
}
|
||||||
|
|
@ -2212,7 +2227,7 @@ func (v Value) Send(x Value) {
|
||||||
// internal send, possibly non-blocking.
|
// internal send, possibly non-blocking.
|
||||||
// v is known to be a channel.
|
// v is known to be a channel.
|
||||||
func (v Value) send(x Value, nb bool) (selected bool) {
|
func (v Value) send(x Value, nb bool) (selected bool) {
|
||||||
tt := (*chanType)(unsafe.Pointer(v.typ))
|
tt := (*chanType)(unsafe.Pointer(v.typ()))
|
||||||
if ChanDir(tt.Dir)&SendDir == 0 {
|
if ChanDir(tt.Dir)&SendDir == 0 {
|
||||||
panic("reflect: send on recv-only channel")
|
panic("reflect: send on recv-only channel")
|
||||||
}
|
}
|
||||||
|
|
@ -2238,12 +2253,12 @@ func (v Value) Set(x Value) {
|
||||||
if v.kind() == Interface {
|
if v.kind() == Interface {
|
||||||
target = v.ptr
|
target = v.ptr
|
||||||
}
|
}
|
||||||
x = x.assignTo("reflect.Set", v.typ, target)
|
x = x.assignTo("reflect.Set", v.typ(), target)
|
||||||
if x.flag&flagIndir != 0 {
|
if x.flag&flagIndir != 0 {
|
||||||
if x.ptr == unsafe.Pointer(&zeroVal[0]) {
|
if x.ptr == unsafe.Pointer(&zeroVal[0]) {
|
||||||
typedmemclr(v.typ, v.ptr)
|
typedmemclr(v.typ(), v.ptr)
|
||||||
} else {
|
} else {
|
||||||
typedmemmove(v.typ, v.ptr, x.ptr)
|
typedmemmove(v.typ(), v.ptr, x.ptr)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
*(*unsafe.Pointer)(v.ptr) = x.ptr
|
*(*unsafe.Pointer)(v.ptr) = x.ptr
|
||||||
|
|
@ -2263,7 +2278,7 @@ func (v Value) SetBool(x bool) {
|
||||||
func (v Value) SetBytes(x []byte) {
|
func (v Value) SetBytes(x []byte) {
|
||||||
v.mustBeAssignable()
|
v.mustBeAssignable()
|
||||||
v.mustBe(Slice)
|
v.mustBe(Slice)
|
||||||
if toRType(v.typ).Elem().Kind() != Uint8 { // TODO add Elem method, fix mustBe(Slice) to return slice.
|
if toRType(v.typ()).Elem().Kind() != Uint8 { // TODO add Elem method, fix mustBe(Slice) to return slice.
|
||||||
panic("reflect.Value.SetBytes of non-byte slice")
|
panic("reflect.Value.SetBytes of non-byte slice")
|
||||||
}
|
}
|
||||||
*(*[]byte)(v.ptr) = x
|
*(*[]byte)(v.ptr) = x
|
||||||
|
|
@ -2274,7 +2289,7 @@ func (v Value) SetBytes(x []byte) {
|
||||||
func (v Value) setRunes(x []rune) {
|
func (v Value) setRunes(x []rune) {
|
||||||
v.mustBeAssignable()
|
v.mustBeAssignable()
|
||||||
v.mustBe(Slice)
|
v.mustBe(Slice)
|
||||||
if v.typ.Elem().Kind() != abi.Int32 {
|
if v.typ().Elem().Kind() != abi.Int32 {
|
||||||
panic("reflect.Value.setRunes of non-rune slice")
|
panic("reflect.Value.setRunes of non-rune slice")
|
||||||
}
|
}
|
||||||
*(*[]rune)(v.ptr) = x
|
*(*[]rune)(v.ptr) = x
|
||||||
|
|
@ -2364,12 +2379,12 @@ func (v Value) SetMapIndex(key, elem Value) {
|
||||||
v.mustBe(Map)
|
v.mustBe(Map)
|
||||||
v.mustBeExported()
|
v.mustBeExported()
|
||||||
key.mustBeExported()
|
key.mustBeExported()
|
||||||
tt := (*mapType)(unsafe.Pointer(v.typ))
|
tt := (*mapType)(unsafe.Pointer(v.typ()))
|
||||||
|
|
||||||
if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ && tt.Elem.Size() <= maxValSize {
|
if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
|
||||||
k := *(*string)(key.ptr)
|
k := *(*string)(key.ptr)
|
||||||
if elem.typ == nil {
|
if elem.typ() == nil {
|
||||||
mapdelete_faststr(v.typ, v.pointer(), k)
|
mapdelete_faststr(v.typ(), v.pointer(), k)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
elem.mustBeExported()
|
elem.mustBeExported()
|
||||||
|
|
@ -2380,7 +2395,7 @@ func (v Value) SetMapIndex(key, elem Value) {
|
||||||
} else {
|
} else {
|
||||||
e = unsafe.Pointer(&elem.ptr)
|
e = unsafe.Pointer(&elem.ptr)
|
||||||
}
|
}
|
||||||
mapassign_faststr(v.typ, v.pointer(), k, e)
|
mapassign_faststr(v.typ(), v.pointer(), k, e)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2391,8 +2406,8 @@ func (v Value) SetMapIndex(key, elem Value) {
|
||||||
} else {
|
} else {
|
||||||
k = unsafe.Pointer(&key.ptr)
|
k = unsafe.Pointer(&key.ptr)
|
||||||
}
|
}
|
||||||
if elem.typ == nil {
|
if elem.typ() == nil {
|
||||||
mapdelete(v.typ, v.pointer(), k)
|
mapdelete(v.typ(), v.pointer(), k)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
elem.mustBeExported()
|
elem.mustBeExported()
|
||||||
|
|
@ -2403,7 +2418,7 @@ func (v Value) SetMapIndex(key, elem Value) {
|
||||||
} else {
|
} else {
|
||||||
e = unsafe.Pointer(&elem.ptr)
|
e = unsafe.Pointer(&elem.ptr)
|
||||||
}
|
}
|
||||||
mapassign(v.typ, v.pointer(), k, e)
|
mapassign(v.typ(), v.pointer(), k, e)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetUint sets v's underlying value to x.
|
// SetUint sets v's underlying value to x.
|
||||||
|
|
@ -2461,13 +2476,13 @@ func (v Value) Slice(i, j int) Value {
|
||||||
if v.flag&flagAddr == 0 {
|
if v.flag&flagAddr == 0 {
|
||||||
panic("reflect.Value.Slice: slice of unaddressable array")
|
panic("reflect.Value.Slice: slice of unaddressable array")
|
||||||
}
|
}
|
||||||
tt := (*arrayType)(unsafe.Pointer(v.typ))
|
tt := (*arrayType)(unsafe.Pointer(v.typ()))
|
||||||
cap = int(tt.Len)
|
cap = int(tt.Len)
|
||||||
typ = (*sliceType)(unsafe.Pointer(tt.Slice))
|
typ = (*sliceType)(unsafe.Pointer(tt.Slice))
|
||||||
base = v.ptr
|
base = v.ptr
|
||||||
|
|
||||||
case Slice:
|
case Slice:
|
||||||
typ = (*sliceType)(unsafe.Pointer(v.typ))
|
typ = (*sliceType)(unsafe.Pointer(v.typ()))
|
||||||
s := (*unsafeheader.Slice)(v.ptr)
|
s := (*unsafeheader.Slice)(v.ptr)
|
||||||
base = s.Data
|
base = s.Data
|
||||||
cap = s.Cap
|
cap = s.Cap
|
||||||
|
|
@ -2481,7 +2496,7 @@ func (v Value) Slice(i, j int) Value {
|
||||||
if i < s.Len {
|
if i < s.Len {
|
||||||
t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
|
t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
|
||||||
}
|
}
|
||||||
return Value{v.typ, unsafe.Pointer(&t), v.flag}
|
return Value{v.typ(), unsafe.Pointer(&t), v.flag}
|
||||||
}
|
}
|
||||||
|
|
||||||
if i < 0 || j < i || j > cap {
|
if i < 0 || j < i || j > cap {
|
||||||
|
|
@ -2523,13 +2538,13 @@ func (v Value) Slice3(i, j, k int) Value {
|
||||||
if v.flag&flagAddr == 0 {
|
if v.flag&flagAddr == 0 {
|
||||||
panic("reflect.Value.Slice3: slice of unaddressable array")
|
panic("reflect.Value.Slice3: slice of unaddressable array")
|
||||||
}
|
}
|
||||||
tt := (*arrayType)(unsafe.Pointer(v.typ))
|
tt := (*arrayType)(unsafe.Pointer(v.typ()))
|
||||||
cap = int(tt.Len)
|
cap = int(tt.Len)
|
||||||
typ = (*sliceType)(unsafe.Pointer(tt.Slice))
|
typ = (*sliceType)(unsafe.Pointer(tt.Slice))
|
||||||
base = v.ptr
|
base = v.ptr
|
||||||
|
|
||||||
case Slice:
|
case Slice:
|
||||||
typ = (*sliceType)(unsafe.Pointer(v.typ))
|
typ = (*sliceType)(unsafe.Pointer(v.typ()))
|
||||||
s := (*unsafeheader.Slice)(v.ptr)
|
s := (*unsafeheader.Slice)(v.ptr)
|
||||||
base = s.Data
|
base = s.Data
|
||||||
cap = s.Cap
|
cap = s.Cap
|
||||||
|
|
@ -2605,7 +2620,7 @@ func (v Value) TrySend(x Value) bool {
|
||||||
// Type returns v's type.
|
// Type returns v's type.
|
||||||
func (v Value) Type() Type {
|
func (v Value) Type() Type {
|
||||||
if v.flag != 0 && v.flag&flagMethod == 0 {
|
if v.flag != 0 && v.flag&flagMethod == 0 {
|
||||||
return (*rtype)(unsafe.Pointer(v.typ)) // inline of toRType(v.typ), for own inlining in inline test
|
return (*rtype)(noescape(unsafe.Pointer(v.typ_))) // inline of toRType(v.typ()), for own inlining in inline test
|
||||||
}
|
}
|
||||||
return v.typeSlow()
|
return v.typeSlow()
|
||||||
}
|
}
|
||||||
|
|
@ -2614,29 +2629,31 @@ func (v Value) typeSlow() Type {
|
||||||
if v.flag == 0 {
|
if v.flag == 0 {
|
||||||
panic(&ValueError{"reflect.Value.Type", Invalid})
|
panic(&ValueError{"reflect.Value.Type", Invalid})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
typ := v.typ()
|
||||||
if v.flag&flagMethod == 0 {
|
if v.flag&flagMethod == 0 {
|
||||||
return toRType(v.typ)
|
return toRType(v.typ())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method value.
|
// Method value.
|
||||||
// v.typ describes the receiver, not the method type.
|
// v.typ describes the receiver, not the method type.
|
||||||
i := int(v.flag) >> flagMethodShift
|
i := int(v.flag) >> flagMethodShift
|
||||||
if v.typ.Kind() == abi.Interface {
|
if v.typ().Kind() == abi.Interface {
|
||||||
// Method on interface.
|
// Method on interface.
|
||||||
tt := (*interfaceType)(unsafe.Pointer(v.typ))
|
tt := (*interfaceType)(unsafe.Pointer(typ))
|
||||||
if uint(i) >= uint(len(tt.Methods)) {
|
if uint(i) >= uint(len(tt.Methods)) {
|
||||||
panic("reflect: internal error: invalid method index")
|
panic("reflect: internal error: invalid method index")
|
||||||
}
|
}
|
||||||
m := &tt.Methods[i]
|
m := &tt.Methods[i]
|
||||||
return toRType(typeOffFor(v.typ, m.Typ))
|
return toRType(typeOffFor(typ, m.Typ))
|
||||||
}
|
}
|
||||||
// Method on concrete type.
|
// Method on concrete type.
|
||||||
ms := v.typ.ExportedMethods()
|
ms := typ.ExportedMethods()
|
||||||
if uint(i) >= uint(len(ms)) {
|
if uint(i) >= uint(len(ms)) {
|
||||||
panic("reflect: internal error: invalid method index")
|
panic("reflect: internal error: invalid method index")
|
||||||
}
|
}
|
||||||
m := ms[i]
|
m := ms[i]
|
||||||
return toRType(typeOffFor(v.typ, m.Mtyp))
|
return toRType(typeOffFor(typ, m.Mtyp))
|
||||||
}
|
}
|
||||||
|
|
||||||
// CanUint reports whether Uint can be used without panicking.
|
// CanUint reports whether Uint can be used without panicking.
|
||||||
|
|
@ -2681,7 +2698,7 @@ func (v Value) Uint() uint64 {
|
||||||
//
|
//
|
||||||
// It's preferred to use uintptr(Value.Addr().UnsafePointer()) to get the equivalent result.
|
// It's preferred to use uintptr(Value.Addr().UnsafePointer()) to get the equivalent result.
|
||||||
func (v Value) UnsafeAddr() uintptr {
|
func (v Value) UnsafeAddr() uintptr {
|
||||||
if v.typ == nil {
|
if v.typ() == nil {
|
||||||
panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
|
panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
|
||||||
}
|
}
|
||||||
if v.flag&flagAddr == 0 {
|
if v.flag&flagAddr == 0 {
|
||||||
|
|
@ -2707,7 +2724,7 @@ func (v Value) UnsafePointer() unsafe.Pointer {
|
||||||
k := v.kind()
|
k := v.kind()
|
||||||
switch k {
|
switch k {
|
||||||
case Pointer:
|
case Pointer:
|
||||||
if v.typ.PtrBytes == 0 {
|
if v.typ().PtrBytes == 0 {
|
||||||
// Since it is a not-in-heap pointer, all pointers to the heap are
|
// Since it is a not-in-heap pointer, all pointers to the heap are
|
||||||
// forbidden! See comment in Value.Elem and issue #48399.
|
// forbidden! See comment in Value.Elem and issue #48399.
|
||||||
if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
|
if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
|
||||||
|
|
@ -2808,7 +2825,7 @@ func (v Value) grow(n int) {
|
||||||
case p.Len+n < 0:
|
case p.Len+n < 0:
|
||||||
panic("reflect.Value.Grow: slice overflow")
|
panic("reflect.Value.Grow: slice overflow")
|
||||||
case p.Len+n > p.Cap:
|
case p.Len+n > p.Cap:
|
||||||
t := v.typ.Elem()
|
t := v.typ().Elem()
|
||||||
*p = growslice(t, *p, n)
|
*p = growslice(t, *p, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -2841,10 +2858,10 @@ func (v Value) Clear() {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case Slice:
|
case Slice:
|
||||||
sh := *(*unsafeheader.Slice)(v.ptr)
|
sh := *(*unsafeheader.Slice)(v.ptr)
|
||||||
st := (*sliceType)(unsafe.Pointer(v.typ))
|
st := (*sliceType)(unsafe.Pointer(v.typ()))
|
||||||
typedarrayclear(st.Elem, sh.Data, sh.Len)
|
typedarrayclear(st.Elem, sh.Data, sh.Len)
|
||||||
case Map:
|
case Map:
|
||||||
mapclear(v.typ, v.pointer())
|
mapclear(v.typ(), v.pointer())
|
||||||
default:
|
default:
|
||||||
panic(&ValueError{"reflect.Value.Clear", v.Kind()})
|
panic(&ValueError{"reflect.Value.Clear", v.Kind()})
|
||||||
}
|
}
|
||||||
|
|
@ -2895,16 +2912,16 @@ func Copy(dst, src Value) int {
|
||||||
sk := src.kind()
|
sk := src.kind()
|
||||||
var stringCopy bool
|
var stringCopy bool
|
||||||
if sk != Array && sk != Slice {
|
if sk != Array && sk != Slice {
|
||||||
stringCopy = sk == String && dst.typ.Elem().Kind() == abi.Uint8
|
stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8
|
||||||
if !stringCopy {
|
if !stringCopy {
|
||||||
panic(&ValueError{"reflect.Copy", sk})
|
panic(&ValueError{"reflect.Copy", sk})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
src.mustBeExported()
|
src.mustBeExported()
|
||||||
|
|
||||||
de := dst.typ.Elem()
|
de := dst.typ().Elem()
|
||||||
if !stringCopy {
|
if !stringCopy {
|
||||||
se := src.typ.Elem()
|
se := src.typ().Elem()
|
||||||
typesMustMatch("reflect.Copy", toType(de), toType(se))
|
typesMustMatch("reflect.Copy", toType(de), toType(se))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3039,7 +3056,7 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
|
||||||
}
|
}
|
||||||
ch.mustBe(Chan)
|
ch.mustBe(Chan)
|
||||||
ch.mustBeExported()
|
ch.mustBeExported()
|
||||||
tt := (*chanType)(unsafe.Pointer(ch.typ))
|
tt := (*chanType)(unsafe.Pointer(ch.typ()))
|
||||||
if ChanDir(tt.Dir)&SendDir == 0 {
|
if ChanDir(tt.Dir)&SendDir == 0 {
|
||||||
panic("reflect.Select: SendDir case using recv-only channel")
|
panic("reflect.Select: SendDir case using recv-only channel")
|
||||||
}
|
}
|
||||||
|
|
@ -3070,7 +3087,7 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
|
||||||
}
|
}
|
||||||
ch.mustBe(Chan)
|
ch.mustBe(Chan)
|
||||||
ch.mustBeExported()
|
ch.mustBeExported()
|
||||||
tt := (*chanType)(unsafe.Pointer(ch.typ))
|
tt := (*chanType)(unsafe.Pointer(ch.typ()))
|
||||||
if ChanDir(tt.Dir)&RecvDir == 0 {
|
if ChanDir(tt.Dir)&RecvDir == 0 {
|
||||||
panic("reflect.Select: RecvDir case using send-only channel")
|
panic("reflect.Select: RecvDir case using send-only channel")
|
||||||
}
|
}
|
||||||
|
|
@ -3100,7 +3117,11 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// implemented in package runtime
|
// implemented in package runtime
|
||||||
|
|
||||||
|
//go:noescape
|
||||||
func unsafe_New(*abi.Type) unsafe.Pointer
|
func unsafe_New(*abi.Type) unsafe.Pointer
|
||||||
|
|
||||||
|
//go:noescape
|
||||||
func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
|
func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
|
||||||
|
|
||||||
// MakeSlice creates a new zero-initialized slice value
|
// MakeSlice creates a new zero-initialized slice value
|
||||||
|
|
@ -3252,14 +3273,14 @@ func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Va
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case directlyAssignable(dst, v.typ):
|
case directlyAssignable(dst, v.typ()):
|
||||||
// Overwrite type so that they match.
|
// Overwrite type so that they match.
|
||||||
// Same memory layout, so no harm done.
|
// Same memory layout, so no harm done.
|
||||||
fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
|
fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
|
||||||
fl |= flag(dst.Kind())
|
fl |= flag(dst.Kind())
|
||||||
return Value{dst, v.ptr, fl}
|
return Value{dst, v.ptr, fl}
|
||||||
|
|
||||||
case implements(dst, v.typ):
|
case implements(dst, v.typ()):
|
||||||
if v.Kind() == Interface && v.IsNil() {
|
if v.Kind() == Interface && v.IsNil() {
|
||||||
// A nil ReadWriter passed to nil Reader is OK,
|
// A nil ReadWriter passed to nil Reader is OK,
|
||||||
// but using ifaceE2I below will panic.
|
// but using ifaceE2I below will panic.
|
||||||
|
|
@ -3279,7 +3300,7 @@ func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Va
|
||||||
}
|
}
|
||||||
|
|
||||||
// Failed.
|
// Failed.
|
||||||
panic(context + ": value of type " + stringFor(v.typ) + " is not assignable to type " + stringFor(dst))
|
panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert returns the value v converted to type t.
|
// Convert returns the value v converted to type t.
|
||||||
|
|
@ -3289,9 +3310,9 @@ func (v Value) Convert(t Type) Value {
|
||||||
if v.flag&flagMethod != 0 {
|
if v.flag&flagMethod != 0 {
|
||||||
v = makeMethodValue("Convert", v)
|
v = makeMethodValue("Convert", v)
|
||||||
}
|
}
|
||||||
op := convertOp(t.common(), v.typ)
|
op := convertOp(t.common(), v.typ())
|
||||||
if op == nil {
|
if op == nil {
|
||||||
panic("reflect.Value.Convert: value of type " + stringFor(v.typ) + " cannot be converted to type " + t.String())
|
panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String())
|
||||||
}
|
}
|
||||||
return op(v, t)
|
return op(v, t)
|
||||||
}
|
}
|
||||||
|
|
@ -3929,3 +3950,9 @@ func contentEscapes(x unsafe.Pointer) {
|
||||||
escapes(*(*any)(x)) // the dereference may not always be safe, but never executed
|
escapes(*(*any)(x)) // the dereference may not always be safe, but never executed
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//go:nosplit
|
||||||
|
func noescape(p unsafe.Pointer) unsafe.Pointer {
|
||||||
|
x := uintptr(p)
|
||||||
|
return unsafe.Pointer(x ^ 0)
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -229,6 +229,9 @@ func SetEnvs(e []string) { envs = e }
|
||||||
// For benchmarking.
|
// For benchmarking.
|
||||||
|
|
||||||
func BenchSetType(n int, x any) {
|
func BenchSetType(n int, x any) {
|
||||||
|
// Escape x to ensure it is allocated on the heap, as we are
|
||||||
|
// working on the heap bits here.
|
||||||
|
Escape(x)
|
||||||
e := *efaceOf(&x)
|
e := *efaceOf(&x)
|
||||||
t := e._type
|
t := e._type
|
||||||
var size uintptr
|
var size uintptr
|
||||||
|
|
|
||||||
|
|
@ -42,12 +42,12 @@ func small7() { // ERROR "can inline small7"
|
||||||
|
|
||||||
var rwmutex *sync.RWMutex
|
var rwmutex *sync.RWMutex
|
||||||
|
|
||||||
func small8() {
|
func small8() { // ERROR "can inline small8"
|
||||||
// the RUnlock fast path should be inlined
|
// the RUnlock fast path should be inlined
|
||||||
rwmutex.RUnlock() // ERROR "inlining call to sync\.\(\*RWMutex\)\.RUnlock" "inlining call to atomic\.\(\*Int32\)\.Add"
|
rwmutex.RUnlock() // ERROR "inlining call to sync\.\(\*RWMutex\)\.RUnlock" "inlining call to atomic\.\(\*Int32\)\.Add"
|
||||||
}
|
}
|
||||||
|
|
||||||
func small9() {
|
func small9() { // ERROR "can inline small9"
|
||||||
// the RLock fast path should be inlined
|
// the RLock fast path should be inlined
|
||||||
rwmutex.RLock() // ERROR "inlining call to sync\.\(\*RWMutex\)\.RLock" "inlining call to atomic\.\(\*Int32\)\.Add"
|
rwmutex.RLock() // ERROR "inlining call to sync\.\(\*RWMutex\)\.RLock" "inlining call to atomic\.\(\*Int32\)\.Add"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue