Hchan* runtime_makechan_c(ChanType *t, int64 hint) { Hchan *c; uintptr n; const Type *elem; elem = t->__element_type; // compiler checks this but be safe. if(elem->__size >= (1<<16)) runtime_throw("makechan: invalid channel element type"); if(hint < 0 || (intgo)hint != hint || (elem->__size > 0 && (uintptr)hint > MaxMem / elem->__size)) runtime_panicstring("makechan: size out of range"); n = sizeof(*c); // allocate memory in one call c = (Hchan*)runtime_mal(n + hint*elem->__size); c->elemsize = elem->__size; c->elemalign = elem->__align; c->dataqsiz = hint; if(debug) runtime_printf("makechan: chan=%p; elemsize=%D; elemalign=%d; dataqsiz=%D\n", c, (int64)elem->__size, elem->__align, (int64)c->dataqsiz); return c; }
uintptr reflect_makechan(ChanType *t, uint64 size) { void *ret; Hchan *c; c = runtime_makechan_c(t, size); ret = runtime_mal(sizeof(void*)); __builtin_memcpy(ret, &c, sizeof(void*)); return (uintptr)ret; }
// add finalizer; caller is responsible for making sure not already in table void runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft) { Fintab newtab; int32 i; uint32 *ref; byte *base; Finalizer *e; e = nil; if(f != nil) { e = runtime_mal(sizeof *e); e->fn = f; e->ft = ft; } runtime_lock(&finlock); if(!runtime_mlookup(p, &base, nil, nil, &ref) || p != base) { runtime_unlock(&finlock); runtime_throw("addfinalizer on invalid pointer"); } if(f == nil) { if(*ref & RefHasFinalizer) { lookfintab(&fintab, p, 1); *ref &= ~RefHasFinalizer; } runtime_unlock(&finlock); return; } if(*ref & RefHasFinalizer) { runtime_unlock(&finlock); runtime_throw("double finalizer"); } *ref |= RefHasFinalizer; if(fintab.nkey >= fintab.max/2+fintab.max/4) { // keep table at most 3/4 full: // allocate new table and rehash. runtime_memclr((byte*)&newtab, sizeof newtab); newtab.max = fintab.max; if(newtab.max == 0) newtab.max = 3*3*3; else if(fintab.ndead < fintab.nkey/2) { // grow table if not many dead values. // otherwise just rehash into table of same size. newtab.max *= 3; } newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], RefNoPointers, 0, 1); newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1); for(i=0; i<fintab.max; i++) { void *k; k = fintab.key[i]; if(k != nil && k != (void*)-1) addfintab(&newtab, k, fintab.val[i]); } runtime_free(fintab.key); runtime_free(fintab.val); fintab = newtab; } addfintab(&fintab, p, e); runtime_unlock(&finlock); }
// add finalizer; caller is responsible for making sure not already in table void runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft) { Fintab newtab; int32 i; byte *base; Finalizer *e; e = nil; if(f != nil) { e = runtime_mal(sizeof *e); e->fn = f; e->ft = ft; } if(!__sync_bool_compare_and_swap(&m->holds_finlock, 0, 1)) runtime_throw("finalizer deadlock"); runtime_lock(&finlock); if(!runtime_mlookup(p, &base, nil, nil) || p != base) { runtime_unlock(&finlock); __sync_bool_compare_and_swap(&m->holds_finlock, 1, 0); runtime_throw("addfinalizer on invalid pointer"); } if(f == nil) { lookfintab(&fintab, p, 1); goto unlock; } if(lookfintab(&fintab, p, 0)) { runtime_unlock(&finlock); __sync_bool_compare_and_swap(&m->holds_finlock, 1, 0); runtime_throw("double finalizer"); } runtime_setblockspecial(p); if(fintab.nkey >= fintab.max/2+fintab.max/4) { // keep table at most 3/4 full: // allocate new table and rehash. runtime_memclr((byte*)&newtab, sizeof newtab); newtab.max = fintab.max; if(newtab.max == 0) newtab.max = 3*3*3; else if(fintab.ndead < fintab.nkey/2) { // grow table if not many dead values. // otherwise just rehash into table of same size. newtab.max *= 3; } newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], FlagNoPointers, 0, 1); newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1); for(i=0; i<fintab.max; i++) { void *k; k = fintab.key[i]; if(k != nil && k != (void*)-1) addfintab(&newtab, k, fintab.val[i]); } runtime_free(fintab.key); runtime_free(fintab.val); fintab = newtab; } addfintab(&fintab, p, e); unlock: runtime_unlock(&finlock); __sync_bool_compare_and_swap(&m->holds_finlock, 1, 0); if(__sync_bool_compare_and_swap(&m->gcing_for_finlock, 1, 0)) { __go_run_goroutine_gc(200); } }