1
0
mirror of https://github.com/golang/go synced 2024-11-22 09:34:54 -07:00

runtime: rename FlagNoPointers to FlagNoScan. Better represents

the use of the flag, especially for objects which actually do have
pointers but we don't want the GC to scan them.

R=golang-dev, cshapiro
CC=golang-dev
https://golang.org/cl/13181045
This commit is contained in:
Keith Randall 2013-08-23 17:28:47 -07:00
parent b15a64e35d
commit d0dd420a24
5 changed files with 25 additions and 23 deletions

View File

@ -98,12 +98,14 @@ runtime·mallocgc(uintptr size, uintptr typ, uint32 flag)
}
if(!(flag & FlagNoGC))
runtime·markallocated(v, size, (flag&FlagNoPointers) != 0);
runtime·markallocated(v, size, (flag&FlagNoScan) != 0);
if(DebugTypeAtBlockEnd)
*(uintptr*)((uintptr)v+size-sizeof(uintptr)) = typ;
if(UseSpanType && !(flag & FlagNoPointers) && typ != 0) {
// TODO: save type even if FlagNoScan? Potentially expensive but might help
// heap profiling/tracing.
if(UseSpanType && !(flag & FlagNoScan) && typ != 0) {
uintptr *buf, i;
buf = m->settype_buf;
@ -114,7 +116,7 @@ runtime·mallocgc(uintptr size, uintptr typ, uint32 flag)
}
m->mallocing = 0;
if(UseSpanType && !(flag & FlagNoPointers) && typ != 0 && m->settype_bufsize == nelem(m->settype_buf))
if(UseSpanType && !(flag & FlagNoScan) && typ != 0 && m->settype_bufsize == nelem(m->settype_buf))
runtime·settype_flush(m);
m->locks--;
if(m->locks == 0 && g->preempt) // restore the preemption request in case we've cleared it in newstack
@ -601,7 +603,7 @@ runtime·settype_flush(M *mp)
case MTypes_Empty:
ntypes = (s->npages << PageShift) / size;
nbytes3 = 8*sizeof(uintptr) + 1*ntypes;
data3 = runtime·mallocgc(nbytes3, 0, FlagNoProfiling|FlagNoPointers|FlagNoInvokeGC);
data3 = runtime·mallocgc(nbytes3, 0, FlagNoProfiling|FlagNoScan|FlagNoInvokeGC);
s->types.compression = MTypes_Bytes;
s->types.data = (uintptr)data3;
((uintptr*)data3)[1] = typ;
@ -628,7 +630,7 @@ runtime·settype_flush(M *mp)
} else {
ntypes = (s->npages << PageShift) / size;
nbytes2 = ntypes * sizeof(uintptr);
data2 = runtime·mallocgc(nbytes2, 0, FlagNoProfiling|FlagNoPointers|FlagNoInvokeGC);
data2 = runtime·mallocgc(nbytes2, 0, FlagNoProfiling|FlagNoScan|FlagNoInvokeGC);
s->types.compression = MTypes_Words;
s->types.data = (uintptr)data2;
@ -699,7 +701,7 @@ runtime·mal(uintptr n)
void
runtime·new(Type *typ, uint8 *ret)
{
ret = runtime·mallocgc(typ->size, (uintptr)typ | TypeInfo_SingleObject, typ->kind&KindNoPointers ? FlagNoPointers : 0);
ret = runtime·mallocgc(typ->size, (uintptr)typ | TypeInfo_SingleObject, typ->kind&KindNoPointers ? FlagNoScan : 0);
FLUSH(&ret);
}
@ -710,7 +712,7 @@ cnew(Type *typ, intgo n, int32 objtyp)
runtime·throw("runtime: invalid objtyp");
if(n < 0 || (typ->size > 0 && n > MaxMem/typ->size))
runtime·panicstring("runtime: allocation size out of range");
return runtime·mallocgc(typ->size*n, (uintptr)typ | objtyp, typ->kind&KindNoPointers ? FlagNoPointers : 0);
return runtime·mallocgc(typ->size*n, (uintptr)typ | objtyp, typ->kind&KindNoPointers ? FlagNoScan : 0);
}
// same as runtime·new, but callable from C

View File

@ -467,7 +467,7 @@ uintptr runtime·gettype(void*);
enum
{
// flags to malloc
FlagNoPointers = 1<<0, // no pointers here
FlagNoScan = 1<<0, // GC doesn't have to scan object
FlagNoProfiling = 1<<1, // must not profile
FlagNoGC = 1<<2, // must not free or scan for pointers
FlagNoZero = 1<<3, // don't zero memory

View File

@ -123,7 +123,7 @@ resizefintab(Fintab *tab)
newtab.max *= 3;
}
newtab.key = runtime·mallocgc(newtab.max*sizeof newtab.key[0], 0, FlagNoInvokeGC|FlagNoPointers);
newtab.key = runtime·mallocgc(newtab.max*sizeof newtab.key[0], 0, FlagNoInvokeGC|FlagNoScan);
newtab.val = runtime·mallocgc(newtab.max*sizeof newtab.val[0], 0, FlagNoInvokeGC);
for(i=0; i<tab->max; i++) {

View File

@ -51,7 +51,7 @@ enum {
// The bits in the word are packed together by type first, then by
// heap location, so each 64-bit bitmap word consists of, from top to bottom,
// the 16 bitSpecial bits for the corresponding heap words, then the 16 bitMarked bits,
// then the 16 bitNoPointers/bitBlockBoundary bits, then the 16 bitAllocated bits.
// then the 16 bitNoScan/bitBlockBoundary bits, then the 16 bitAllocated bits.
// This layout makes it easier to iterate over the bits of a given type.
//
// The bitmap starts at mheap.arena_start and extends *backward* from
@ -68,7 +68,7 @@ enum {
// /* then test bits & bitAllocated, bits & bitMarked, etc. */
//
#define bitAllocated ((uintptr)1<<(bitShift*0))
#define bitNoPointers ((uintptr)1<<(bitShift*1)) /* when bitAllocated is set */
#define bitNoScan ((uintptr)1<<(bitShift*1)) /* when bitAllocated is set */
#define bitMarked ((uintptr)1<<(bitShift*2)) /* when bitAllocated is set */
#define bitSpecial ((uintptr)1<<(bitShift*3)) /* when bitAllocated is set - has finalizer or being profiled */
#define bitBlockBoundary ((uintptr)1<<(bitShift*1)) /* when bitAllocated is NOT set */
@ -454,7 +454,7 @@ flushptrbuf(PtrTarget *ptrbuf, PtrTarget **ptrbufpos, Obj **_wp, Workbuf **_wbuf
}
// If object has no pointers, don't need to scan further.
if((bits & bitNoPointers) != 0)
if((bits & bitNoScan) != 0)
continue;
// Ask span about size class.
@ -1198,7 +1198,7 @@ debug_scanblock(byte *b, uintptr n)
runtime·printf("found unmarked block %p in %p\n", obj, vp+i);
// If object has no pointers, don't need to scan further.
if((bits & bitNoPointers) != 0)
if((bits & bitNoScan) != 0)
continue;
debug_scanblock(obj, size);
@ -2345,9 +2345,9 @@ runfinq(void)
runtime·free(frame);
// The frame does not contain pointers interesting for GC,
// all not yet finalized objects are stored in finc.
// If we do not mark it as FlagNoPointers,
// If we do not mark it as FlagNoScan,
// the last finalized object is not collected.
frame = runtime·mallocgc(framesz, 0, FlagNoPointers|FlagNoInvokeGC);
frame = runtime·mallocgc(framesz, 0, FlagNoScan|FlagNoInvokeGC);
framecap = framesz;
}
if(f->fint == nil)
@ -2381,9 +2381,9 @@ runfinq(void)
}
// mark the block at v of size n as allocated.
// If noptr is true, mark it as having no pointers.
// If noscan is true, mark it as not needing scanning.
void
runtime·markallocated(void *v, uintptr n, bool noptr)
runtime·markallocated(void *v, uintptr n, bool noscan)
{
uintptr *b, obits, bits, off, shift;
@ -2400,8 +2400,8 @@ runtime·markallocated(void *v, uintptr n, bool noptr)
for(;;) {
obits = *b;
bits = (obits & ~(bitMask<<shift)) | (bitAllocated<<shift);
if(noptr)
bits |= bitNoPointers<<shift;
if(noscan)
bits |= bitNoScan<<shift;
if(runtime·gomaxprocs == 1) {
*b = bits;
break;

View File

@ -47,7 +47,7 @@ gostringsize(intgo l)
if(l == 0)
return runtime·emptystring;
// leave room for NUL for C runtime (e.g., callers of getenv)
s.str = runtime·mallocgc(l+1, 0, FlagNoPointers|FlagNoZero);
s.str = runtime·mallocgc(l+1, 0, FlagNoScan|FlagNoZero);
s.len = l;
s.str[l] = 0;
for(;;) {
@ -85,7 +85,7 @@ runtime·gobytes(byte *p, intgo n)
{
Slice sl;
sl.array = runtime·mallocgc(n, 0, FlagNoPointers|FlagNoZero);
sl.array = runtime·mallocgc(n, 0, FlagNoScan|FlagNoZero);
sl.len = n;
sl.cap = n;
runtime·memmove(sl.array, p, n);
@ -252,7 +252,7 @@ func slicebytetostring(b Slice) (s String) {
}
func stringtoslicebyte(s String) (b Slice) {
b.array = runtime·mallocgc(s.len, 0, FlagNoPointers|FlagNoZero);
b.array = runtime·mallocgc(s.len, 0, FlagNoScan|FlagNoZero);
b.len = s.len;
b.cap = s.len;
runtime·memmove(b.array, s.str, s.len);
@ -301,7 +301,7 @@ func stringtoslicerune(s String) (b Slice) {
n++;
}
b.array = runtime·mallocgc(n*sizeof(r[0]), 0, FlagNoPointers|FlagNoZero);
b.array = runtime·mallocgc(n*sizeof(r[0]), 0, FlagNoScan|FlagNoZero);
b.len = n;
b.cap = n;
p = s.str;