1
0
mirror of https://github.com/golang/go synced 2024-09-24 23:10:12 -06:00

runtime: fix and speedup malloc stats

Currently per-sizeclass stats are lost for destroyed MCache's. This patch fixes this.
Also, only update mstats.heap_alloc on heap operations, because that's the only
stat that needs to be promptly updated. Everything else needs to be up-to-date only in ReadMemStats().

R=golang-dev, remyoudompheng, dave, iant
CC=golang-dev
https://golang.org/cl/9207047
This commit is contained in:
Dmitriy Vyukov 2013-05-22 22:22:57 +04:00
parent 4c2df76b7d
commit c075d82cca
4 changed files with 17 additions and 10 deletions

View File

@ -278,6 +278,8 @@ runtime·freemcache(MCache *c)
void
runtime·purgecachedstats(MCache *c)
{
int32 i;
// Protected by either heap or GC lock.
mstats.heap_alloc += c->local_cachealloc;
c->local_cachealloc = 0;
@ -293,6 +295,12 @@ runtime·purgecachedstats(MCache *c)
c->local_alloc= 0;
mstats.total_alloc += c->local_total_alloc;
c->local_total_alloc= 0;
for(i=0; i<nelem(c->local_by_size); i++) {
mstats.by_size[i].nmalloc += c->local_by_size[i].nmalloc;
c->local_by_size[i].nmalloc = 0;
mstats.by_size[i].nfree += c->local_by_size[i].nfree;
c->local_by_size[i].nfree = 0;
}
}
uintptr runtime·sizeof_C_MStats = sizeof(MStats);

View File

@ -285,15 +285,18 @@ struct MCacheList
struct MCache
{
MCacheList list[NumSizeClasses];
// The following members are accessed on every malloc,
// so they are grouped here for better caching.
int32 next_sample; // trigger heap sample after allocating this many bytes
intptr local_cachealloc; // bytes allocated (or freed) from cache since last lock of heap
// The rest is not accessed on every malloc.
MCacheList list[NumSizeClasses];
intptr local_objects; // objects allocated (or freed) from cache since last lock of heap
intptr local_alloc; // bytes allocated (or freed) since last lock of heap
uintptr local_total_alloc; // bytes allocated (even if freed) since last lock of heap
uintptr local_nmalloc; // number of mallocs since last lock of heap
uintptr local_nfree; // number of frees since last lock of heap
uintptr local_nlookup; // number of pointer lookups since last lock of heap
int32 next_sample; // trigger heap sample after allocating this many bytes
// Statistics about allocation size classes since last lock of heap
struct {
uintptr nmalloc;

View File

@ -1863,12 +1863,6 @@ cachestats(GCStats *stats)
if(c==nil)
continue;
runtime·purgecachedstats(c);
for(i=0; i<nelem(c->local_by_size); i++) {
mstats.by_size[i].nmalloc += c->local_by_size[i].nmalloc;
c->local_by_size[i].nmalloc = 0;
mstats.by_size[i].nfree += c->local_by_size[i].nfree;
c->local_by_size[i].nfree = 0;
}
}
mstats.stacks_inuse = stacks_inuse;
}

View File

@ -73,7 +73,8 @@ runtime·MHeap_Alloc(MHeap *h, uintptr npage, int32 sizeclass, int32 acct, int32
MSpan *s;
runtime·lock(h);
runtime·purgecachedstats(m->mcache);
mstats.heap_alloc += m->mcache->local_cachealloc;
m->mcache->local_cachealloc = 0;
s = MHeap_AllocLocked(h, npage, sizeclass);
if(s != nil) {
mstats.heap_inuse += npage<<PageShift;
@ -296,7 +297,8 @@ void
runtime·MHeap_Free(MHeap *h, MSpan *s, int32 acct)
{
runtime·lock(h);
runtime·purgecachedstats(m->mcache);
mstats.heap_alloc += m->mcache->local_cachealloc;
m->mcache->local_cachealloc = 0;
mstats.heap_inuse -= s->npages<<PageShift;
if(acct) {
mstats.heap_alloc -= s->npages<<PageShift;