Esempio n. 1
0
bool
runtime_addfinalizer(void *p, FuncVal *f, const struct __go_func_type *ft)
{
	Fintab *tab;
	byte *base;
	
	if(debug) {
		if(!runtime_mlookup(p, &base, nil, nil) || p != base)
			runtime_throw("addfinalizer on invalid pointer");
	}
	
	tab = TAB(p);
	runtime_lock(tab);
	if(f == nil) {
		lookfintab(tab, p, true, nil);
		runtime_unlock(tab);
		return true;
	}

	if(lookfintab(tab, p, false, nil)) {
		runtime_unlock(tab);
		return false;
	}

	if(tab->nkey >= tab->max/2+tab->max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.
		resizefintab(tab);
	}

	addfintab(tab, p, f, ft);
	runtime_setblockspecial(p, true);
	runtime_unlock(tab);
	return true;
}
Esempio n. 2
0
bool
runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft)
{
	Fintab *tab;
	byte *base;
	bool ret = false;
	
	if(debug) {
		if(!runtime_mlookup(p, &base, nil, nil) || p != base)
			runtime_throw("addfinalizer on invalid pointer");
	}
	
	if(!__sync_bool_compare_and_swap(&m->holds_finlock, 0, 1))
		runtime_throw("finalizer deadlock");

	tab = TAB(p);
	runtime_lock(tab);
	if(f == nil) {
		if(lookfintab(tab, p, true, nil))
			runtime_setblockspecial(p, false);
		ret = true;
		goto unlock;
	}

	if(lookfintab(tab, p, false, nil)) {
		ret = false;
		goto unlock;
	}

	if(tab->nkey >= tab->max/2+tab->max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.
		resizefintab(tab);
	}

	addfintab(tab, p, f, ft);
	runtime_setblockspecial(p, true);
	ret = true;

 unlock:
	runtime_unlock(tab);

	__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);

	if(__sync_bool_compare_and_swap(&m->gcing_for_finlock, 1, 0)) {
		__go_run_goroutine_gc(200);
	}

	return ret;
}
Esempio n. 3
0
static void
markfin(void *v)
{
	uintptr size;
	uint32 *refp;

	size = 0;
	refp = nil;
	if(!runtime_mlookup(v, (byte**)&v, &size, nil, &refp) || !(*refp & RefHasFinalizer))
		runtime_throw("mark - finalizer inconsistency");
	
	// do not mark the finalizer block itself.  just mark the things it points at.
	scanblock(v, size);
}
Esempio n. 4
0
static void
scanblock(byte *b, int64 n)
{
	int32 off;
	void *obj;
	uintptr size;
	uint32 *refp, ref;
	void **vp;
	int64 i;
	BlockList *w;

	w = bl;
	w->obj = b;
	w->size = n;
	w++;

	while(w > bl) {
		w--;
		b = w->obj;
		n = w->size;

		if(Debug > 1)
			runtime_printf("scanblock %p %lld\n", b, (long long) n);
		off = (uint32)(uintptr)b & (PtrSize-1);
		if(off) {
			b += PtrSize - off;
			n -= PtrSize - off;
		}
	
		vp = (void**)b;
		n /= PtrSize;
		for(i=0; i<n; i++) {
			obj = vp[i];
			if(obj == nil)
				continue;
			if(runtime_mheap.closure_min != nil && runtime_mheap.closure_min <= (byte*)obj && (byte*)obj < runtime_mheap.closure_max) {
				if((((uintptr)obj) & 63) != 0)
					continue;
	
				// Looks like a Native Client closure.
				// Actual pointer is pointed at by address in first instruction.
				// Embedded pointer starts at byte 2.
				// If it is f4f4f4f4 then that space hasn't been
				// used for a closure yet (f4 is the HLT instruction).
				// See nacl/386/closure.c for more.
				void **pp;
				pp = *(void***)((byte*)obj+2);
				if(pp == (void**)0xf4f4f4f4)	// HLT... - not a closure after all
					continue;
				obj = *pp;
			}
			if(runtime_mheap.min <= (byte*)obj && (byte*)obj < runtime_mheap.max) {
				if(runtime_mlookup(obj, (byte**)&obj, &size, nil, &refp)) {
					ref = *refp;
					switch(ref & ~RefFlags) {
					case RefNone:
						if(Debug > 1)
							runtime_printf("found at %p: ", &vp[i]);
						*refp = RefSome | (ref & RefFlags);
						if(!(ref & RefNoPointers)) {
							if(w >= ebl)
								runtime_throw("scanblock: garbage collection stack overflow");
							w->obj = obj;
							w->size = size;
							w++;
						}
						break;
					}
				}
			}
		}
	}
}
Esempio n. 5
0
// add finalizer; caller is responsible for making sure not already in table
void
runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft)
{
	Fintab newtab;
	int32 i;
	uint32 *ref;
	byte *base;
	Finalizer *e;
	
	e = nil;
	if(f != nil) {
		e = runtime_mal(sizeof *e);
		e->fn = f;
		e->ft = ft;
	}

	runtime_lock(&finlock);
	if(!runtime_mlookup(p, &base, nil, nil, &ref) || p != base) {
		runtime_unlock(&finlock);
		runtime_throw("addfinalizer on invalid pointer");
	}
	if(f == nil) {
		if(*ref & RefHasFinalizer) {
			lookfintab(&fintab, p, 1);
			*ref &= ~RefHasFinalizer;
		}
		runtime_unlock(&finlock);
		return;
	}

	if(*ref & RefHasFinalizer) {
		runtime_unlock(&finlock);
		runtime_throw("double finalizer");
	}
	*ref |= RefHasFinalizer;

	if(fintab.nkey >= fintab.max/2+fintab.max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.

		runtime_memclr((byte*)&newtab, sizeof newtab);
		newtab.max = fintab.max;
		if(newtab.max == 0)
			newtab.max = 3*3*3;
		else if(fintab.ndead < fintab.nkey/2) {
			// grow table if not many dead values.
			// otherwise just rehash into table of same size.
			newtab.max *= 3;
		}

		newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], RefNoPointers, 0, 1);
		newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1);

		for(i=0; i<fintab.max; i++) {
			void *k;

			k = fintab.key[i];
			if(k != nil && k != (void*)-1)
				addfintab(&newtab, k, fintab.val[i]);
		}
		runtime_free(fintab.key);
		runtime_free(fintab.val);
		fintab = newtab;
	}

	addfintab(&fintab, p, e);
	runtime_unlock(&finlock);
}
// add finalizer; caller is responsible for making sure not already in table
void
runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft)
{
	Fintab newtab;
	int32 i;
	byte *base;
	Finalizer *e;
	
	e = nil;
	if(f != nil) {
		e = runtime_mal(sizeof *e);
		e->fn = f;
		e->ft = ft;
	}

	if(!__sync_bool_compare_and_swap(&m->holds_finlock, 0, 1))
		runtime_throw("finalizer deadlock");

	runtime_lock(&finlock);
	if(!runtime_mlookup(p, &base, nil, nil) || p != base) {
		runtime_unlock(&finlock);
		__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);
		runtime_throw("addfinalizer on invalid pointer");
	}
	if(f == nil) {
		lookfintab(&fintab, p, 1);
		goto unlock;
	}

	if(lookfintab(&fintab, p, 0)) {
		runtime_unlock(&finlock);
		__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);
		runtime_throw("double finalizer");
	}
	runtime_setblockspecial(p);

	if(fintab.nkey >= fintab.max/2+fintab.max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.

		runtime_memclr((byte*)&newtab, sizeof newtab);
		newtab.max = fintab.max;
		if(newtab.max == 0)
			newtab.max = 3*3*3;
		else if(fintab.ndead < fintab.nkey/2) {
			// grow table if not many dead values.
			// otherwise just rehash into table of same size.
			newtab.max *= 3;
		}

		newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], FlagNoPointers, 0, 1);
		newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1);

		for(i=0; i<fintab.max; i++) {
			void *k;

			k = fintab.key[i];
			if(k != nil && k != (void*)-1)
				addfintab(&newtab, k, fintab.val[i]);
		}
		runtime_free(fintab.key);
		runtime_free(fintab.val);
		fintab = newtab;
	}

	addfintab(&fintab, p, e);
 unlock:
	runtime_unlock(&finlock);

	__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);

	if(__sync_bool_compare_and_swap(&m->gcing_for_finlock, 1, 0)) {
		__go_run_goroutine_gc(200);
	}
}