mirror of
https://github.com/golang/go
synced 2024-11-23 05:30:07 -07:00
runtime: fix and speedup malloc stats
Currently per-sizeclass stats are lost for destroyed MCache's. This patch fixes this. Also, only update mstats.heap_alloc on heap operations, because that's the only stat that needs to be promptly updated. Everything else needs to be up-to-date only in ReadMemStats(). R=golang-dev, remyoudompheng, dave, iant CC=golang-dev https://golang.org/cl/9207047
This commit is contained in:
parent
4c2df76b7d
commit
c075d82cca
@ -278,6 +278,8 @@ runtime·freemcache(MCache *c)
|
|||||||
void
|
void
|
||||||
runtime·purgecachedstats(MCache *c)
|
runtime·purgecachedstats(MCache *c)
|
||||||
{
|
{
|
||||||
|
int32 i;
|
||||||
|
|
||||||
// Protected by either heap or GC lock.
|
// Protected by either heap or GC lock.
|
||||||
mstats.heap_alloc += c->local_cachealloc;
|
mstats.heap_alloc += c->local_cachealloc;
|
||||||
c->local_cachealloc = 0;
|
c->local_cachealloc = 0;
|
||||||
@ -293,6 +295,12 @@ runtime·purgecachedstats(MCache *c)
|
|||||||
c->local_alloc= 0;
|
c->local_alloc= 0;
|
||||||
mstats.total_alloc += c->local_total_alloc;
|
mstats.total_alloc += c->local_total_alloc;
|
||||||
c->local_total_alloc= 0;
|
c->local_total_alloc= 0;
|
||||||
|
for(i=0; i<nelem(c->local_by_size); i++) {
|
||||||
|
mstats.by_size[i].nmalloc += c->local_by_size[i].nmalloc;
|
||||||
|
c->local_by_size[i].nmalloc = 0;
|
||||||
|
mstats.by_size[i].nfree += c->local_by_size[i].nfree;
|
||||||
|
c->local_by_size[i].nfree = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uintptr runtime·sizeof_C_MStats = sizeof(MStats);
|
uintptr runtime·sizeof_C_MStats = sizeof(MStats);
|
||||||
|
@ -285,15 +285,18 @@ struct MCacheList
|
|||||||
|
|
||||||
struct MCache
|
struct MCache
|
||||||
{
|
{
|
||||||
MCacheList list[NumSizeClasses];
|
// The following members are accessed on every malloc,
|
||||||
|
// so they are grouped here for better caching.
|
||||||
|
int32 next_sample; // trigger heap sample after allocating this many bytes
|
||||||
intptr local_cachealloc; // bytes allocated (or freed) from cache since last lock of heap
|
intptr local_cachealloc; // bytes allocated (or freed) from cache since last lock of heap
|
||||||
|
// The rest is not accessed on every malloc.
|
||||||
|
MCacheList list[NumSizeClasses];
|
||||||
intptr local_objects; // objects allocated (or freed) from cache since last lock of heap
|
intptr local_objects; // objects allocated (or freed) from cache since last lock of heap
|
||||||
intptr local_alloc; // bytes allocated (or freed) since last lock of heap
|
intptr local_alloc; // bytes allocated (or freed) since last lock of heap
|
||||||
uintptr local_total_alloc; // bytes allocated (even if freed) since last lock of heap
|
uintptr local_total_alloc; // bytes allocated (even if freed) since last lock of heap
|
||||||
uintptr local_nmalloc; // number of mallocs since last lock of heap
|
uintptr local_nmalloc; // number of mallocs since last lock of heap
|
||||||
uintptr local_nfree; // number of frees since last lock of heap
|
uintptr local_nfree; // number of frees since last lock of heap
|
||||||
uintptr local_nlookup; // number of pointer lookups since last lock of heap
|
uintptr local_nlookup; // number of pointer lookups since last lock of heap
|
||||||
int32 next_sample; // trigger heap sample after allocating this many bytes
|
|
||||||
// Statistics about allocation size classes since last lock of heap
|
// Statistics about allocation size classes since last lock of heap
|
||||||
struct {
|
struct {
|
||||||
uintptr nmalloc;
|
uintptr nmalloc;
|
||||||
|
@ -1863,12 +1863,6 @@ cachestats(GCStats *stats)
|
|||||||
if(c==nil)
|
if(c==nil)
|
||||||
continue;
|
continue;
|
||||||
runtime·purgecachedstats(c);
|
runtime·purgecachedstats(c);
|
||||||
for(i=0; i<nelem(c->local_by_size); i++) {
|
|
||||||
mstats.by_size[i].nmalloc += c->local_by_size[i].nmalloc;
|
|
||||||
c->local_by_size[i].nmalloc = 0;
|
|
||||||
mstats.by_size[i].nfree += c->local_by_size[i].nfree;
|
|
||||||
c->local_by_size[i].nfree = 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
mstats.stacks_inuse = stacks_inuse;
|
mstats.stacks_inuse = stacks_inuse;
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,8 @@ runtime·MHeap_Alloc(MHeap *h, uintptr npage, int32 sizeclass, int32 acct, int32
|
|||||||
MSpan *s;
|
MSpan *s;
|
||||||
|
|
||||||
runtime·lock(h);
|
runtime·lock(h);
|
||||||
runtime·purgecachedstats(m->mcache);
|
mstats.heap_alloc += m->mcache->local_cachealloc;
|
||||||
|
m->mcache->local_cachealloc = 0;
|
||||||
s = MHeap_AllocLocked(h, npage, sizeclass);
|
s = MHeap_AllocLocked(h, npage, sizeclass);
|
||||||
if(s != nil) {
|
if(s != nil) {
|
||||||
mstats.heap_inuse += npage<<PageShift;
|
mstats.heap_inuse += npage<<PageShift;
|
||||||
@ -296,7 +297,8 @@ void
|
|||||||
runtime·MHeap_Free(MHeap *h, MSpan *s, int32 acct)
|
runtime·MHeap_Free(MHeap *h, MSpan *s, int32 acct)
|
||||||
{
|
{
|
||||||
runtime·lock(h);
|
runtime·lock(h);
|
||||||
runtime·purgecachedstats(m->mcache);
|
mstats.heap_alloc += m->mcache->local_cachealloc;
|
||||||
|
m->mcache->local_cachealloc = 0;
|
||||||
mstats.heap_inuse -= s->npages<<PageShift;
|
mstats.heap_inuse -= s->npages<<PageShift;
|
||||||
if(acct) {
|
if(acct) {
|
||||||
mstats.heap_alloc -= s->npages<<PageShift;
|
mstats.heap_alloc -= s->npages<<PageShift;
|
||||||
|
Loading…
Reference in New Issue
Block a user