static Atom LookupAtom(const unsigned char *atom) { /* lookup atom in atom table */ uint64_t hash; const unsigned char *p; Atom a, na; AtomEntry *ae; size_t sz = AtomHashTableSize; /* compute hash */ p = atom; hash = HashFunction(p); hash = hash % sz ; /* we'll start by holding a read lock in order to avoid contention */ READ_LOCK(HashChain[hash].AERWLock); a = HashChain[hash].Entry; /* search atom in chain */ na = SearchAtom(atom, a); if (na != NIL) { READ_UNLOCK(HashChain[hash].AERWLock); return (na); } READ_UNLOCK(HashChain[hash].AERWLock); /* we need a write lock */ WRITE_LOCK(HashChain[hash].AERWLock); /* concurrent version of Yap, need to take care */ #if defined(YAPOR) || defined(THREADS) if (a != HashChain[hash].Entry) { a = HashChain[hash].Entry; na = SearchAtom(atom, a); if (na != NIL) { WRITE_UNLOCK(HashChain[hash].AERWLock); return (na); } } #endif /* add new atom to start of chain */ ae = (AtomEntry *)Yap_AllocAtomSpace((sizeof *ae) + strlen((const char *)atom) + 1); if (ae == NULL) { WRITE_UNLOCK(HashChain[hash].AERWLock); return NIL; } NOfAtoms++; na = AbsAtom(ae); ae->PropsOfAE = NIL; if (ae->UStrOfAE != atom) strcpy((char *)ae->StrOfAE, (const char *)atom); ae->NextOfAE = a; HashChain[hash].Entry = na; INIT_RWLOCK(ae->ARWLock); WRITE_UNLOCK(HashChain[hash].AERWLock); if (NOfAtoms > 2 * AtomHashTableSize) { Yap_signal(YAP_CDOVF_SIGNAL); } return na; }
static AtomEntry * lookupBlob(void *blob, size_t len, PL_blob_t *type) { BlobPropEntry *b; AtomEntry *ae; LOCK(SWI_Blobs_Lock); if (type->flags & PL_BLOB_UNIQUE) { /* just keep a linked chain for now */ ae = SWI_Blobs; while (ae) { if (ae->PropsOfAE && RepBlobProp(ae->PropsOfAE)->blob_t == type && ae->rep.blob->length == len && !memcmp(ae->rep.blob->data, blob, len)) { UNLOCK(SWI_Blobs_Lock); return ae; } ae = RepAtom(ae->NextOfAE); } } b = (BlobPropEntry *)Yap_AllocCodeSpace(sizeof(BlobPropEntry)); if (!b) { UNLOCK(SWI_Blobs_Lock); return NULL; } b->NextOfPE = NIL; b->KindOfPE = BlobProperty; b->blob_t = type; ae = (AtomEntry *)Yap_AllocCodeSpace(sizeof(AtomEntry)+len+sizeof(size_t)); if (!ae) { UNLOCK(SWI_Blobs_Lock); return NULL; } NOfBlobs++; INIT_RWLOCK(ae->ARWLock); ae->PropsOfAE = AbsBlobProp(b); ae->NextOfAE = AbsAtom(SWI_Blobs); ae->rep.blob->length = len; memcpy(ae->rep.blob->data, blob, len); SWI_Blobs = ae; UNLOCK(SWI_Blobs_Lock); if (NOfBlobs > NOfBlobsMax) { Yap_signal(YAP_CDOVF_SIGNAL); } return ae; }
static Atom LookupWideAtom(const wchar_t *atom) { /* lookup atom in atom table */ CELL hash; wchar_t *p; Atom a, na; AtomEntry *ae; UInt sz; WideAtomEntry *wae; /* compute hash */ p = (wchar_t *)atom; hash = WideHashFunction(p) % WideAtomHashTableSize; /* we'll start by holding a read lock in order to avoid contention */ READ_LOCK(WideHashChain[hash].AERWLock); a = WideHashChain[hash].Entry; /* search atom in chain */ na = SearchWideAtom(atom, a); if (na != NIL) { READ_UNLOCK(WideHashChain[hash].AERWLock); return (na); } READ_UNLOCK(WideHashChain[hash].AERWLock); /* we need a write lock */ WRITE_LOCK(WideHashChain[hash].AERWLock); /* concurrent version of Yap, need to take care */ #if defined(YAPOR) || defined(THREADS) if (a != WideHashChain[hash].Entry) { a = WideHashChain[hash].Entry; na = SearchWideAtom(atom, a); if (na != NIL) { WRITE_UNLOCK(WideHashChain[hash].AERWLock); return na; } } #endif /* add new atom to start of chain */ sz = wcslen(atom); ae = (AtomEntry *)Yap_AllocAtomSpace((size_t)(((AtomEntry *)NULL) + 1) + sizeof(wchar_t) * (sz + 1)); if (ae == NULL) { WRITE_UNLOCK(WideHashChain[hash].AERWLock); return NIL; } wae = (WideAtomEntry *)Yap_AllocAtomSpace(sizeof(WideAtomEntry)); if (wae == NULL) { WRITE_UNLOCK(WideHashChain[hash].AERWLock); return NIL; } na = AbsAtom(ae); ae->PropsOfAE = AbsWideAtomProp(wae); wae->NextOfPE = NIL; wae->KindOfPE = WideAtomProperty; wae->SizeOfAtom = sz; if (ae->WStrOfAE != atom) wcscpy(ae->WStrOfAE, atom); NOfAtoms++; ae->NextOfAE = a; WideHashChain[hash].Entry = na; INIT_RWLOCK(ae->ARWLock); WRITE_UNLOCK(WideHashChain[hash].AERWLock); if (NOfWideAtoms > 2 * WideAtomHashTableSize) { Yap_signal(YAP_CDOVF_SIGNAL); } return na; }
static Int p_continue_signals( USES_REGS1 ) { /* hack to force the signal anew */ if (LOCAL_ActiveSignals & YAP_ITI_SIGNAL) { Yap_signal(YAP_ITI_SIGNAL); } if (LOCAL_ActiveSignals & YAP_INT_SIGNAL) { Yap_signal(YAP_INT_SIGNAL); } if (LOCAL_ActiveSignals & YAP_USR2_SIGNAL) { Yap_signal(YAP_USR2_SIGNAL); } if (LOCAL_ActiveSignals & YAP_USR1_SIGNAL) { Yap_signal(YAP_USR1_SIGNAL); } if (LOCAL_ActiveSignals & YAP_HUP_SIGNAL) { Yap_signal(YAP_HUP_SIGNAL); } if (LOCAL_ActiveSignals & YAP_ALARM_SIGNAL) { Yap_signal(YAP_ALARM_SIGNAL); } if (LOCAL_ActiveSignals & YAP_VTALARM_SIGNAL) { Yap_signal(YAP_VTALARM_SIGNAL); } if (LOCAL_ActiveSignals & YAP_CREEP_SIGNAL) { Yap_signal(YAP_CREEP_SIGNAL); } if (LOCAL_ActiveSignals & YAP_DEBUG_SIGNAL) { Yap_signal(YAP_DEBUG_SIGNAL); } if (LOCAL_ActiveSignals & YAP_BREAK_SIGNAL) { Yap_signal(YAP_BREAK_SIGNAL); } if (LOCAL_ActiveSignals & YAP_STACK_DUMP_SIGNAL) { Yap_signal(YAP_STACK_DUMP_SIGNAL); } if (LOCAL_ActiveSignals & YAP_STATISTICS_SIGNAL) { Yap_signal(YAP_STATISTICS_SIGNAL); } if (LOCAL_ActiveSignals & YAP_FAIL_SIGNAL) { Yap_signal(YAP_FAIL_SIGNAL); } MUTEX_UNLOCK(&(LOCAL_ThreadHandle.tlock)); return TRUE; }
static char * AllocHeap(unsigned long int size) { BlockHeader *b, *n; YAP_SEG_SIZE *sp; UInt align, extra; /* { static long long int vsc_alloc_ops; vsc_alloc_ops++; BlockHeader *q = FreeBlocks; while (q) q = q->b_next_size; }*/ extra = size/16; #if SIZEOF_INT_P==4 align = 2*sizeof(CELL); /* size in dwords + 2 */ #endif #if SIZEOF_INT_P==8 align = sizeof(CELL); #endif while (align < extra) align *= 2; size = ALIGN_SIZE(size,align); if (size < sizeof(BlockHeader)) size = sizeof(BlockHeader); size += sizeof(YAP_SEG_SIZE); /* change units to cells */ size = size/sizeof(CELL); LOCK(FreeBlocksLock); if ((b = GetBlock(size))) { if (b->b_size >= size+24+1) { n = (BlockHeader *) (((YAP_SEG_SIZE *) b) + size + 1); n->b_size = b->b_size - size - 1; b->b_size = size; AddToFreeList(n); } sp = &(b->b_size) + b->b_size; *sp = b->b_size | InUseFlag; b->b_size |= InUseFlag; UNLOCK(FreeBlocksLock); return (Addr(b) + sizeof(YAP_SEG_SIZE)); } LOCK(HeapTopLock); UNLOCK(FreeBlocksLock); b = (BlockHeader *) HeapTop; HeapTop += size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); LOCK(HeapUsedLock); HeapUsed += size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); #ifdef YAPOR if (HeapTop > Addr(Yap_GlobalBase) - MinHeapGap) Yap_Error(INTERNAL_ERROR, TermNil, "no heap left (AllocHeap)"); #else if (HeapTop > HeapLim - MinHeapGap) { HeapTop -= size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); HeapUsed -= size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); if (HeapTop > HeapLim) { UNLOCK(HeapUsedLock); UNLOCK(HeapTopLock); /* we destroyed the stack */ Yap_Error(OUT_OF_HEAP_ERROR, TermNil, "Stack Crashed against Heap..."); return(NULL); } else { if (HeapTop + size * sizeof(CELL) + sizeof(YAP_SEG_SIZE) < HeapLim) { /* small allocations, we can wait */ HeapTop += size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); HeapUsed += size * sizeof(CELL) + sizeof(YAP_SEG_SIZE); UNLOCK(HeapUsedLock); UNLOCK(HeapTopLock); Yap_signal(YAP_CDOVF_SIGNAL); } else { if (size > SizeOfOverflow) SizeOfOverflow = size*sizeof(CELL) + sizeof(YAP_SEG_SIZE); /* big allocations, the caller must handle the problem */ UNLOCK(HeapUsedLock); UNLOCK(HeapTopLock); return(NULL); } } } #endif /* YAPOR */ *((YAP_SEG_SIZE *) HeapTop) = InUseFlag; if (HeapUsed > HeapMax) HeapMax = HeapUsed; UNLOCK(HeapUsedLock); b->b_size = size | InUseFlag; sp = &(b->b_size) + size; *sp = b->b_size; UNLOCK(HeapTopLock); return (Addr(b) + sizeof(YAP_SEG_SIZE)); }