From 70462f90ec6cf4c0a98f9c556f71b9c99aea94c0 Mon Sep 17 00:00:00 2001 From: Austin Clements Date: Tue, 21 Jul 2015 11:31:44 -0400 Subject: [PATCH] runtime: simplify mSpan_Sweep This is a cleanup following cc8f544, which was a minimal change to fix issue #11617. This consolidates the two places in mSpan_Sweep that update sweepgen. Previously this was necessary because sweepgen must be updated before freeing the span, but we freed large spans early. Now we free large spans later, so there's no need to duplicate the sweepgen update. This also means large spans can take advantage of the sweepgen sanity checking performed for other spans. Change-Id: I23b79dbd9ec81d08575cd307cdc0fa6b20831768 Reviewed-on: https://go-review.googlesource.com/12451 Reviewed-by: Rick Hudson --- src/runtime/mgcsweep.go | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/runtime/mgcsweep.go b/src/runtime/mgcsweep.go index eaa446323b..d9eb58fdf7 100644 --- a/src/runtime/mgcsweep.go +++ b/src/runtime/mgcsweep.go @@ -235,9 +235,6 @@ func mSpan_Sweep(s *mspan, preserve bool) bool { heapBitsForSpan(p).initSpan(s.layout()) s.needzero = 1 - // important to set sweepgen before returning it to heap - atomicstore(&s.sweepgen, sweepgen) - // Free the span after heapBitsSweepSpan // returns, since it's not done with the span. freeToHeap = true @@ -264,10 +261,7 @@ func mSpan_Sweep(s *mspan, preserve bool) bool { // But we need to set it before we make the span available for allocation // (return it to heap or mcentral), because allocation code assumes that a // span is already swept if available for allocation. - // - // TODO(austin): Clean this up by consolidating atomicstore in - // large span path above with this. - if !freeToHeap && nfree == 0 { + if freeToHeap || nfree == 0 { // The span must be in our exclusive ownership until we update sweepgen, // check for potential races. if s.state != mSpanInUse || s.sweepgen != sweepgen-1 {