diff --git a/src/runtime/_mkmalloc/mkmalloc.go b/src/runtime/_mkmalloc/mkmalloc.go index 986b0aa9f85..1f040c88610 100644 --- a/src/runtime/_mkmalloc/mkmalloc.go +++ b/src/runtime/_mkmalloc/mkmalloc.go @@ -254,7 +254,8 @@ func inline(config generatorConfig) []byte { } // Write out the package and import declarations. - out.WriteString("// Code generated by mkmalloc.go; DO NOT EDIT.\n\n") + out.WriteString("// Code generated by mkmalloc.go; DO NOT EDIT.\n") + out.WriteString("// See overview in malloc_stubs.go.\n\n") out.WriteString("package " + f.Name.Name + "\n\n") for _, importDecl := range importDecls { out.Write(mustFormatNode(fset, importDecl)) diff --git a/src/runtime/malloc.go b/src/runtime/malloc.go index 13f5fc30814..d49dacaf686 100644 --- a/src/runtime/malloc.go +++ b/src/runtime/malloc.go @@ -1094,6 +1094,8 @@ const sizeSpecializedMallocEnabled = goexperiment.SizeSpecializedMalloc && GOOS // implementation and the corresponding allocation-related changes: the experiment must be // enabled, and none of the memory sanitizers should be enabled. We allow the race detector, // in contrast to sizeSpecializedMallocEnabled. +// TODO(thepudds): it would be nice to check Valgrind integration, though there are some hints +// there might not be any canned tests in tree for Go's integration with Valgrind. const runtimeFreegcEnabled = goexperiment.RuntimeFreegc && !asanenabled && !msanenabled && !valgrindenabled // Allocate an object of size bytes. @@ -1966,10 +1968,15 @@ const ( // or roughly when the liveness analysis of the compiler // would otherwise have determined ptr's object is reclaimable by the GC. func freegc(ptr unsafe.Pointer, size uintptr, noscan bool) bool { - if !runtimeFreegcEnabled || sizeSpecializedMallocEnabled || !reusableSize(size) { - // TODO(thepudds): temporarily disable freegc with SizeSpecializedMalloc until we finish integrating. + if !runtimeFreegcEnabled || !reusableSize(size) { return false } + if sizeSpecializedMallocEnabled && !noscan { + // TODO(thepudds): temporarily disable freegc with SizeSpecializedMalloc for pointer types + // until we finish integrating. + return false + } + if ptr == nil { throw("freegc nil") } diff --git a/src/runtime/malloc_generated.go b/src/runtime/malloc_generated.go index 2215dbaddb2..5abb61257a4 100644 --- a/src/runtime/malloc_generated.go +++ b/src/runtime/malloc_generated.go @@ -1,4 +1,5 @@ // Code generated by mkmalloc.go; DO NOT EDIT. +// See overview in malloc_stubs.go. package runtime @@ -6400,6 +6401,32 @@ func mallocgcSmallNoScanSC2(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6497,6 +6524,32 @@ func mallocgcSmallNoScanSC3(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6594,6 +6647,32 @@ func mallocgcSmallNoScanSC4(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6691,6 +6770,32 @@ func mallocgcSmallNoScanSC5(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6788,6 +6893,32 @@ func mallocgcSmallNoScanSC6(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6885,6 +7016,32 @@ func mallocgcSmallNoScanSC7(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -6982,6 +7139,32 @@ func mallocgcSmallNoScanSC8(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7079,6 +7262,32 @@ func mallocgcSmallNoScanSC9(size uintptr, typ *_type, needzero bool) unsafe.Poin const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7176,6 +7385,32 @@ func mallocgcSmallNoScanSC10(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7273,6 +7508,32 @@ func mallocgcSmallNoScanSC11(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7370,6 +7631,32 @@ func mallocgcSmallNoScanSC12(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7467,6 +7754,32 @@ func mallocgcSmallNoScanSC13(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7564,6 +7877,32 @@ func mallocgcSmallNoScanSC14(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7661,6 +8000,32 @@ func mallocgcSmallNoScanSC15(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7758,6 +8123,32 @@ func mallocgcSmallNoScanSC16(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7855,6 +8246,32 @@ func mallocgcSmallNoScanSC17(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -7952,6 +8369,32 @@ func mallocgcSmallNoScanSC18(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8049,6 +8492,32 @@ func mallocgcSmallNoScanSC19(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8146,6 +8615,32 @@ func mallocgcSmallNoScanSC20(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8243,6 +8738,32 @@ func mallocgcSmallNoScanSC21(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8340,6 +8861,32 @@ func mallocgcSmallNoScanSC22(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8437,6 +8984,32 @@ func mallocgcSmallNoScanSC23(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8534,6 +9107,32 @@ func mallocgcSmallNoScanSC24(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8631,6 +9230,32 @@ func mallocgcSmallNoScanSC25(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) @@ -8728,6 +9353,32 @@ func mallocgcSmallNoScanSC26(size uintptr, typ *_type, needzero bool) unsafe.Poi const spc = spanClass(sizeclass<<1) | spanClass(1) span := c.alloc[spc] + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + x := v + { + + if valgrindenabled { + valgrindMalloc(x, size) + } + + if gcBlackenEnabled != 0 && elemsize != 0 { + if assistG := getg().m.curg; assistG != nil { + assistG.gcAssistBytes -= int64(elemsize - size) + } + } + + if debug.malloc { + postMallocgcDebug(x, elemsize, typ) + } + return x + } + + } + var nextFreeFastResult gclinkptr if span.allocCache != 0 { theBit := sys.TrailingZeros64(span.allocCache) diff --git a/src/runtime/malloc_stubs.go b/src/runtime/malloc_stubs.go index 224746f3d41..e9752956b82 100644 --- a/src/runtime/malloc_stubs.go +++ b/src/runtime/malloc_stubs.go @@ -7,6 +7,8 @@ // to produce a full mallocgc function that's specialized for a span class // or specific size in the case of the tiny allocator. // +// To generate the specialized mallocgc functions, do 'go run .' inside runtime/_mkmalloc. +// // To assemble a mallocgc function, the mallocStub function is cloned, and the call to // inlinedMalloc is replaced with the inlined body of smallScanNoHeaderStub, // smallNoScanStub or tinyStub, depending on the parameters being specialized. @@ -71,7 +73,8 @@ func mallocStub(size uintptr, typ *_type, needzero bool) unsafe.Pointer { } } - // Assist the GC if needed. + // Assist the GC if needed. (On the reuse path, we currently compensate for this; + // changes here might require changes there.) if gcBlackenEnabled != 0 { deductAssistCredit(size) } @@ -242,6 +245,23 @@ func smallNoScanStub(size uintptr, typ *_type, needzero bool) (unsafe.Pointer, u c := getMCache(mp) const spc = spanClass(sizeclass<<1) | spanClass(noscanint_) span := c.alloc[spc] + + // First, check for a reusable object. + if runtimeFreegcEnabled && c.hasReusableNoscan(spc) { + // We have a reusable object, use it. + v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero) + mp.mallocing = 0 + releasem(mp) + + // TODO(thepudds): note that the generated return path is essentially duplicated + // by the generator. For example, see the two postMallocgcDebug calls and + // related duplicated code on the return path currently in the generated + // mallocgcSmallNoScanSC2 function. One set of those correspond to this + // return here. We might be able to de-duplicate the generated return path + // by updating the generator, perhaps by jumping to a shared return or similar. + return v, elemsize + } + v := nextFreeFastStub(span) if v == 0 { v, span, checkGCTrigger = c.nextFree(spc) diff --git a/src/runtime/malloc_test.go b/src/runtime/malloc_test.go index 10c20e6c232..97cf0eed54c 100644 --- a/src/runtime/malloc_test.go +++ b/src/runtime/malloc_test.go @@ -349,8 +349,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) { t.Run("allocs-with-free", func(t *testing.T) { // Same allocations, but now using explicit free so that // no allocs get reported. (Again, not the desired long-term behavior). - if SizeSpecializedMallocEnabled { - t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc") + if SizeSpecializedMallocEnabled && !noscan { + // TODO(thepudds): skip at this point in the stack for size-specialized malloc + // with !noscan. Additional integration with sizespecializedmalloc is in a later CL. + t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc for pointer types") } if !RuntimeFreegcEnabled { t.Skip("skipping alloc tests with runtime.freegc disabled") @@ -370,8 +372,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) { // Multiple allocations outstanding before explicitly freeing, // but still within the limit of our smallest free list size // so that no allocs are reported. (Again, not long-term behavior). - if SizeSpecializedMallocEnabled { - t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc") + if SizeSpecializedMallocEnabled && !noscan { + // TODO(thepudds): skip at this point in the stack for size-specialized malloc + // with !noscan. Additional integration with sizespecializedmalloc is in a later CL. + t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc for pointer types") } if !RuntimeFreegcEnabled { t.Skip("skipping alloc tests with runtime.freegc disabled") @@ -514,10 +518,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) { // See https://go.dev/cl/717520 for some additional discussion, // including how we can deliberately cause the test to fail currently // if we purposefully introduce some assist credit bugs. - if SizeSpecializedMallocEnabled { + if SizeSpecializedMallocEnabled && !noscan { // TODO(thepudds): skip this test at this point in the stack; later CL has // integration with sizespecializedmalloc. - t.Skip("temporarily skip assist credit test for GOEXPERIMENT=sizespecializedmalloc") + t.Skip("temporarily skip assist credit tests for GOEXPERIMENT=sizespecializedmalloc for pointer types") } if !RuntimeFreegcEnabled { t.Skip("skipping assist credit test with runtime.freegc disabled")