Exemplo n.º 1
0
void 
runtime·free ( void *v ) 
{ 
int32 sizeclass; 
MSpan *s; 
MCache *c; 
uint32 prof; 
uintptr size; 
#line 2161 "C:\Go\src\pkg\runtime\malloc.goc"
if ( v == nil ) 
return; 
#line 2167 "C:\Go\src\pkg\runtime\malloc.goc"
if ( m->mallocing ) 
runtime·throw ( "malloc/free - deadlock" ) ; 
m->mallocing = 1; 
#line 2171 "C:\Go\src\pkg\runtime\malloc.goc"
if ( !runtime·mlookup ( v , nil , nil , &s ) ) { 
runtime·printf ( "free %p: not an allocated block\n" , v ) ; 
runtime·throw ( "free runtime·mlookup" ) ; 
} 
prof = runtime·blockspecial ( v ) ; 
#line 2178 "C:\Go\src\pkg\runtime\malloc.goc"
sizeclass = s->sizeclass; 
c = m->mcache; 
if ( sizeclass == 0 ) { 
#line 2182 "C:\Go\src\pkg\runtime\malloc.goc"
size = s->npages<<PageShift; 
* ( uintptr* ) ( s->start<<PageShift ) = 1; 
#line 2186 "C:\Go\src\pkg\runtime\malloc.goc"
runtime·markfreed ( v , size ) ; 
runtime·unmarkspan ( v , 1<<PageShift ) ; 
runtime·MHeap_Free ( &runtime·mheap , s , 1 ) ; 
} else { 
#line 2191 "C:\Go\src\pkg\runtime\malloc.goc"
size = runtime·class_to_size[sizeclass]; 
if ( size > sizeof ( uintptr ) ) 
( ( uintptr* ) v ) [1] = 1; 
#line 2197 "C:\Go\src\pkg\runtime\malloc.goc"
runtime·markfreed ( v , size ) ; 
c->local_by_size[sizeclass].nfree++; 
runtime·MCache_Free ( c , v , sizeclass , size ) ; 
} 
c->local_alloc -= size; 
if ( prof ) 
runtime·MProf_Free ( v , size ) ; 
m->mallocing = 0; 
} 
Exemplo n.º 2
0
// free RefNone, free & queue finalizers for RefNone|RefHasFinalizer, reset RefSome
static void
sweepspan(MSpan *s)
{
	int32 n, npages, size;
	byte *p;
	uint32 ref, *gcrefp, *gcrefep;
	MCache *c;
	Finalizer *f;

	p = (byte*)(s->start << PageShift);
	if(s->sizeclass == 0) {
		// Large block.
		ref = s->gcref0;
		switch(ref & ~(RefFlags^RefHasFinalizer)) {
		case RefNone:
			// Free large object.
			mstats.alloc -= s->npages<<PageShift;
			runtime_memclr(p, s->npages<<PageShift);
			if(ref & RefProfiled)
				MProf_Free(p, s->npages<<PageShift);
			s->gcref0 = RefFree;
			MHeap_Free(&mheap, s, 1);
			break;
		case RefNone|RefHasFinalizer:
			f = getfinalizer(p, 1);
			if(f == nil)
				throw("finalizer inconsistency");
			f->arg = p;
			f->next = finq;
			finq = f;
			ref &= ~RefHasFinalizer;
			// fall through
		case RefSome:
		case RefSome|RefHasFinalizer:
			s->gcref0 = RefNone | (ref&RefFlags);
			break;
		}
		return;
	}

	// Chunk full of small blocks.
	MGetSizeClassInfo(s->sizeclass, &size, &npages, &n);
	gcrefp = s->gcref;
	gcrefep = s->gcref + n;
	for(; gcrefp < gcrefep; gcrefp++, p += size) {
		ref = *gcrefp;
		if(ref < RefNone)	// RefFree or RefStack
			continue;
		switch(ref & ~(RefFlags^RefHasFinalizer)) {
		case RefNone:
			// Free small object.
			if(ref & RefProfiled)
				MProf_Free(p, size);
			*gcrefp = RefFree;
			c = m->mcache;
			if(size > sizeof(uintptr))
				((uintptr*)p)[1] = 1;	// mark as "needs to be zeroed"
			mstats.alloc -= size;
			mstats.by_size[s->sizeclass].nfree++;
			MCache_Free(c, p, s->sizeclass, size);
			break;
		case RefNone|RefHasFinalizer:
			f = getfinalizer(p, 1);
			if(f == nil)
				throw("finalizer inconsistency");
			f->arg = p;
			f->next = finq;
			finq = f;
			ref &= ~RefHasFinalizer;
			// fall through
		case RefSome:
		case RefSome|RefHasFinalizer:
			*gcrefp = RefNone | (ref&RefFlags);
			break;
		}
	}
}