mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
reflect: add register ABI support for makeFuncStub and methodValueCall
This change finishes off functionality register ABI for the reflect package. Specifically, it implements a call on a MakeFunc'd value by performing the reverse process that reflect.Value.Call does, using the same ABI steps. It implements a call on a method value created by reflect by translating between the method value's ABI to the method's ABI. Tests are added for both cases. For #40724. Change-Id: I302820b61fc0a8f94c5525a002bc02776aef41af Reviewed-on: https://go-review.googlesource.com/c/go/+/298670 Trust: Michael Knyszek <mknyszek@google.com> Run-TryBot: Michael Knyszek <mknyszek@google.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Cherry Zhang <cherryyz@google.com>
This commit is contained in:
parent
6996bae5d1
commit
28c5fed557
18 changed files with 991 additions and 207 deletions
|
|
@ -647,32 +647,81 @@ func (v Value) call(op string, in []Value) []Value {
|
|||
// frame is a pointer to the arguments to that closure on the stack.
|
||||
// retValid points to a boolean which should be set when the results
|
||||
// section of frame is set.
|
||||
func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool) {
|
||||
//
|
||||
// regs contains the argument values passed in registers and will contain
|
||||
// the values returned from ctxt.fn in registers.
|
||||
func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
|
||||
if callGC {
|
||||
// Call GC upon entry during testing.
|
||||
// Getting our stack scanned here is the biggest hazard, because
|
||||
// our caller (makeFuncStub) could have failed to place the last
|
||||
// pointer to a value in regs' pointer space, in which case it
|
||||
// won't be visible to the GC.
|
||||
runtime.GC()
|
||||
}
|
||||
ftyp := ctxt.ftyp
|
||||
f := ctxt.fn
|
||||
|
||||
// Copy argument frame into Values.
|
||||
_, _, abi := funcLayout(ftyp, nil)
|
||||
|
||||
// Copy arguments into Values.
|
||||
ptr := frame
|
||||
off := uintptr(0)
|
||||
in := make([]Value, 0, int(ftyp.inCount))
|
||||
for _, typ := range ftyp.in() {
|
||||
off += -off & uintptr(typ.align-1)
|
||||
for i, typ := range ftyp.in() {
|
||||
if typ.Size() == 0 {
|
||||
in = append(in, Zero(typ))
|
||||
continue
|
||||
}
|
||||
v := Value{typ, nil, flag(typ.Kind())}
|
||||
if ifaceIndir(typ) {
|
||||
// value cannot be inlined in interface data.
|
||||
// Must make a copy, because f might keep a reference to it,
|
||||
// and we cannot let f keep a reference to the stack frame
|
||||
// after this function returns, not even a read-only reference.
|
||||
v.ptr = unsafe_New(typ)
|
||||
if typ.size > 0 {
|
||||
typedmemmove(typ, v.ptr, add(ptr, off, "typ.size > 0"))
|
||||
steps := abi.call.stepsForValue(i)
|
||||
if st := steps[0]; st.kind == abiStepStack {
|
||||
if ifaceIndir(typ) {
|
||||
// value cannot be inlined in interface data.
|
||||
// Must make a copy, because f might keep a reference to it,
|
||||
// and we cannot let f keep a reference to the stack frame
|
||||
// after this function returns, not even a read-only reference.
|
||||
v.ptr = unsafe_New(typ)
|
||||
if typ.size > 0 {
|
||||
typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
|
||||
}
|
||||
v.flag |= flagIndir
|
||||
} else {
|
||||
v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
|
||||
}
|
||||
v.flag |= flagIndir
|
||||
} else {
|
||||
v.ptr = *(*unsafe.Pointer)(add(ptr, off, "1-ptr"))
|
||||
if ifaceIndir(typ) {
|
||||
// All that's left is values passed in registers that we need to
|
||||
// create space for the values.
|
||||
v.flag |= flagIndir
|
||||
v.ptr = unsafe_New(typ)
|
||||
for _, st := range steps {
|
||||
switch st.kind {
|
||||
case abiStepIntReg:
|
||||
offset := add(v.ptr, st.offset, "precomputed value offset")
|
||||
memmove(offset, unsafe.Pointer(®s.Ints[st.ireg]), st.size)
|
||||
case abiStepPointer:
|
||||
s := add(v.ptr, st.offset, "precomputed value offset")
|
||||
*((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
|
||||
case abiStepFloatReg:
|
||||
offset := add(v.ptr, st.offset, "precomputed value offset")
|
||||
memmove(offset, unsafe.Pointer(®s.Floats[st.freg]), st.size)
|
||||
case abiStepStack:
|
||||
panic("register-based return value has stack component")
|
||||
default:
|
||||
panic("unknown ABI part kind")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Pointer-valued data gets put directly
|
||||
// into v.ptr.
|
||||
if steps[0].kind != abiStepPointer {
|
||||
print("kind=", steps[0].kind, ", type=", typ.String(), "\n")
|
||||
panic("mismatch between ABI description and types")
|
||||
}
|
||||
v.ptr = regs.Ptrs[steps[0].ireg]
|
||||
}
|
||||
}
|
||||
in = append(in, v)
|
||||
off += typ.size
|
||||
}
|
||||
|
||||
// Call underlying function.
|
||||
|
|
@ -682,9 +731,8 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool) {
|
|||
panic("reflect: wrong return count from function created by MakeFunc")
|
||||
}
|
||||
|
||||
// Copy results back into argument frame.
|
||||
// Copy results back into argument frame and register space.
|
||||
if numOut > 0 {
|
||||
off += -off & (ptrSize - 1)
|
||||
for i, typ := range ftyp.out() {
|
||||
v := out[i]
|
||||
if v.typ == nil {
|
||||
|
|
@ -695,31 +743,67 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool) {
|
|||
panic("reflect: function created by MakeFunc using " + funcName(f) +
|
||||
" returned value obtained from unexported field")
|
||||
}
|
||||
off += -off & uintptr(typ.align-1)
|
||||
if typ.size == 0 {
|
||||
continue
|
||||
}
|
||||
addr := add(ptr, off, "typ.size > 0")
|
||||
|
||||
// Convert v to type typ if v is assignable to a variable
|
||||
// of type t in the language spec.
|
||||
// See issue 28761.
|
||||
if typ.Kind() == Interface {
|
||||
// We must clear the destination before calling assignTo,
|
||||
// in case assignTo writes (with memory barriers) to the
|
||||
// target location used as scratch space. See issue 39541.
|
||||
*(*uintptr)(addr) = 0
|
||||
*(*uintptr)(add(addr, ptrSize, "typ.size == 2*ptrSize")) = 0
|
||||
//
|
||||
//
|
||||
// TODO(mknyszek): In the switch to the register ABI we lost
|
||||
// the scratch space here for the register cases (and
|
||||
// temporarily for all the cases).
|
||||
//
|
||||
// If/when this happens, take note of the following:
|
||||
//
|
||||
// We must clear the destination before calling assignTo,
|
||||
// in case assignTo writes (with memory barriers) to the
|
||||
// target location used as scratch space. See issue 39541.
|
||||
v = v.assignTo("reflect.MakeFunc", typ, nil)
|
||||
stepsLoop:
|
||||
for _, st := range abi.ret.stepsForValue(i) {
|
||||
switch st.kind {
|
||||
case abiStepStack:
|
||||
// Copy values to the "stack."
|
||||
addr := add(ptr, st.stkOff, "precomputed stack arg offset")
|
||||
// Do not use write barriers. The stack space used
|
||||
// for this call is not adequately zeroed, and we
|
||||
// are careful to keep the arguments alive until we
|
||||
// return to makeFuncStub's caller.
|
||||
if v.flag&flagIndir != 0 {
|
||||
memmove(addr, v.ptr, st.size)
|
||||
} else {
|
||||
// This case must be a pointer type.
|
||||
*(*uintptr)(addr) = uintptr(v.ptr)
|
||||
}
|
||||
// There's only one step for a stack-allocated value.
|
||||
break stepsLoop
|
||||
case abiStepIntReg, abiStepPointer:
|
||||
// Copy values to "integer registers."
|
||||
if v.flag&flagIndir != 0 {
|
||||
offset := add(v.ptr, st.offset, "precomputed value offset")
|
||||
memmove(unsafe.Pointer(®s.Ints[st.ireg]), offset, st.size)
|
||||
} else {
|
||||
// Only populate the Ints space on the return path.
|
||||
// This is safe because out is kept alive until the
|
||||
// end of this function, and the return path through
|
||||
// makeFuncStub has no preemption, so these pointers
|
||||
// are always visible to the GC.
|
||||
regs.Ints[st.ireg] = uintptr(v.ptr)
|
||||
}
|
||||
case abiStepFloatReg:
|
||||
// Copy values to "float registers."
|
||||
if v.flag&flagIndir == 0 {
|
||||
panic("attempted to copy pointer to FP register")
|
||||
}
|
||||
offset := add(v.ptr, st.offset, "precomputed value offset")
|
||||
memmove(unsafe.Pointer(®s.Floats[st.freg]), offset, st.size)
|
||||
default:
|
||||
panic("unknown ABI part kind")
|
||||
}
|
||||
}
|
||||
v = v.assignTo("reflect.MakeFunc", typ, addr)
|
||||
|
||||
// We are writing to stack. No write barrier.
|
||||
if v.flag&flagIndir != 0 {
|
||||
memmove(addr, v.ptr, typ.size)
|
||||
} else {
|
||||
*(*uintptr)(addr) = uintptr(v.ptr)
|
||||
}
|
||||
off += typ.size
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -820,51 +904,147 @@ func align(x, n uintptr) uintptr {
|
|||
// frame is a pointer to the arguments to that closure on the stack.
|
||||
// retValid points to a boolean which should be set when the results
|
||||
// section of frame is set.
|
||||
func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool) {
|
||||
//
|
||||
// regs contains the argument values passed in registers and will contain
|
||||
// the values returned from ctxt.fn in registers.
|
||||
func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
|
||||
rcvr := ctxt.rcvr
|
||||
rcvrtype, t, fn := methodReceiver("call", rcvr, ctxt.method)
|
||||
frametype, framePool, abid := funcLayout(t, rcvrtype)
|
||||
argSize, retOffset := abid.stackCallArgsSize, abid.retOffset
|
||||
rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
|
||||
|
||||
// There are two ABIs at play here.
|
||||
//
|
||||
// methodValueCall was invoked with the ABI assuming there was no
|
||||
// receiver ("value ABI") and that's what frame and regs are holding.
|
||||
//
|
||||
// Meanwhile, we need to actually call the method with a receiver, which
|
||||
// has its own ABI ("method ABI"). Everything that follows is a translation
|
||||
// between the two.
|
||||
_, _, valueABI := funcLayout(valueFuncType, nil)
|
||||
valueFrame, valueRegs := frame, regs
|
||||
methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
|
||||
|
||||
// Make a new frame that is one word bigger so we can store the receiver.
|
||||
// This space is used for both arguments and return values.
|
||||
scratch := framePool.Get().(unsafe.Pointer)
|
||||
methodFrame := methodFramePool.Get().(unsafe.Pointer)
|
||||
var methodRegs abi.RegArgs
|
||||
|
||||
// Copy in receiver and rest of args.
|
||||
storeRcvr(rcvr, scratch)
|
||||
// Align the first arg. The alignment can't be larger than ptrSize.
|
||||
argOffset := uintptr(ptrSize)
|
||||
if len(t.in()) > 0 {
|
||||
argOffset = align(argOffset, uintptr(t.in()[0].align))
|
||||
}
|
||||
// Avoid constructing out-of-bounds pointers if there are no args.
|
||||
if argSize-argOffset > 0 {
|
||||
typedmemmovepartial(frametype, add(scratch, argOffset, "argSize > argOffset"), frame, argOffset, argSize-argOffset)
|
||||
// Deal with the receiver. It's guaranteed to only be one word in size.
|
||||
if st := methodABI.call.steps[0]; st.kind == abiStepStack {
|
||||
// Only copy the reciever to the stack if the ABI says so.
|
||||
// Otherwise, it'll be in a register already.
|
||||
storeRcvr(rcvr, methodFrame)
|
||||
} else {
|
||||
// Put the receiver in a register.
|
||||
storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints))
|
||||
}
|
||||
|
||||
frameSize := frametype.size
|
||||
// Translate the rest of the arguments.
|
||||
for i, t := range valueFuncType.in() {
|
||||
valueSteps := valueABI.call.stepsForValue(i)
|
||||
methodSteps := methodABI.call.stepsForValue(i + 1)
|
||||
|
||||
// Zero-sized types are trivial: nothing to do.
|
||||
if len(valueSteps) == 0 {
|
||||
if len(methodSteps) != 0 {
|
||||
panic("method ABI and value ABI do not align")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// There are three cases to handle in translating each
|
||||
// argument:
|
||||
// 1. Stack -> stack translation.
|
||||
// 2. Registers -> stack translation.
|
||||
// 3. Registers -> registers translation.
|
||||
// The fourth cases can't happen, because a method value
|
||||
// call uses strictly fewer registers than a method call.
|
||||
|
||||
// If the value ABI passes the value on the stack,
|
||||
// then the method ABI does too, because it has strictly
|
||||
// fewer arguments. Simply copy between the two.
|
||||
if vStep := valueSteps[0]; vStep.kind == abiStepStack {
|
||||
mStep := methodSteps[0]
|
||||
if mStep.kind != abiStepStack || vStep.size != mStep.size {
|
||||
panic("method ABI and value ABI do not align")
|
||||
}
|
||||
typedmemmove(t,
|
||||
add(methodFrame, mStep.stkOff, "precomputed stack offset"),
|
||||
add(valueFrame, vStep.stkOff, "precomputed stack offset"))
|
||||
continue
|
||||
}
|
||||
// Handle register -> stack translation.
|
||||
if mStep := methodSteps[0]; mStep.kind == abiStepStack {
|
||||
for _, vStep := range valueSteps {
|
||||
to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
|
||||
switch vStep.kind {
|
||||
case abiStepPointer:
|
||||
// Do the pointer copy directly so we get a write barrier.
|
||||
*(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
|
||||
case abiStepIntReg:
|
||||
memmove(to, unsafe.Pointer(&valueRegs.Ints[vStep.ireg]), vStep.size)
|
||||
case abiStepFloatReg:
|
||||
memmove(to, unsafe.Pointer(&valueRegs.Floats[vStep.freg]), vStep.size)
|
||||
default:
|
||||
panic("unexpected value step")
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Handle register -> register translation.
|
||||
if len(valueSteps) != len(methodSteps) {
|
||||
// Because it's the same type for the value, and it's assigned
|
||||
// to registers both times, it should always take up the same
|
||||
// number of registers for each ABI.
|
||||
panic("method ABI and value ABI don't align")
|
||||
}
|
||||
for i, vStep := range valueSteps {
|
||||
mStep := methodSteps[i]
|
||||
if mStep.kind != vStep.kind {
|
||||
panic("method ABI and value ABI don't align")
|
||||
}
|
||||
switch vStep.kind {
|
||||
case abiStepPointer:
|
||||
// Copy this too, so we get a write barrier.
|
||||
methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
|
||||
fallthrough
|
||||
case abiStepIntReg:
|
||||
methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
|
||||
case abiStepFloatReg:
|
||||
methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
|
||||
default:
|
||||
panic("unexpected value step")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
methodFrameSize := methodFrameType.size
|
||||
// TODO(mknyszek): Remove this when we no longer have
|
||||
// caller reserved spill space.
|
||||
frameSize = align(frameSize, ptrSize)
|
||||
frameSize += abid.spill
|
||||
methodFrameSize = align(methodFrameSize, ptrSize)
|
||||
methodFrameSize += methodABI.spill
|
||||
|
||||
// Call.
|
||||
// Call copies the arguments from scratch to the stack, calls fn,
|
||||
// and then copies the results back into scratch.
|
||||
//
|
||||
// TODO(mknyszek): Have this actually support the register-based ABI.
|
||||
var regs abi.RegArgs
|
||||
call(frametype, fn, scratch, uint32(frametype.size), uint32(retOffset), uint32(frameSize), ®s)
|
||||
call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.size), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
|
||||
|
||||
// Copy return values.
|
||||
// Ignore any changes to args and just copy return values.
|
||||
//
|
||||
// This is somewhat simpler because both ABIs have an identical
|
||||
// return value ABI (the types are identical). As a result, register
|
||||
// results can simply be copied over. Stack-allocated values are laid
|
||||
// out the same, but are at different offsets from the start of the frame
|
||||
// Ignore any changes to args.
|
||||
// Avoid constructing out-of-bounds pointers if there are no return values.
|
||||
if frametype.size-retOffset > 0 {
|
||||
callerRetOffset := retOffset - argOffset
|
||||
// because the arguments may be laid out differently.
|
||||
if valueRegs != nil {
|
||||
*valueRegs = methodRegs
|
||||
}
|
||||
if retSize := methodFrameType.size - methodABI.retOffset; retSize > 0 {
|
||||
valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
|
||||
methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
|
||||
// This copies to the stack. Write barriers are not needed.
|
||||
memmove(add(frame, callerRetOffset, "frametype.size > retOffset"),
|
||||
add(scratch, retOffset, "frametype.size > retOffset"),
|
||||
frametype.size-retOffset)
|
||||
memmove(valueRet, methodRet, retSize)
|
||||
}
|
||||
|
||||
// Tell the runtime it can now depend on the return values
|
||||
|
|
@ -874,8 +1054,8 @@ func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool) {
|
|||
// Clear the scratch space and put it back in the pool.
|
||||
// This must happen after the statement above, so that the return
|
||||
// values will always be scanned by someone.
|
||||
typedmemclr(frametype, scratch)
|
||||
framePool.Put(scratch)
|
||||
typedmemclr(methodFrameType, methodFrame)
|
||||
methodFramePool.Put(methodFrame)
|
||||
|
||||
// See the comment in callReflect.
|
||||
runtime.KeepAlive(ctxt)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue