/* ============================================================================= * TMregion_refine * -- Returns net number of elements added to mesh * ============================================================================= */ long TMregion_refine (TM_ARGDECL region_t* regionPtr, element_t* elementPtr, mesh_t* meshPtr) { long numDelta = 0L; MAP_T* edgeMapPtr = NULL; element_t* encroachElementPtr = NULL; TMELEMENT_ISGARBAGE(elementPtr); /* so we can detect conflicts */ while (1) { edgeMapPtr = MAP_ALLOC(NULL, &element_mapCompareEdge); assert(edgeMapPtr); encroachElementPtr = TMgrowRegion(TM_ARG elementPtr, regionPtr, meshPtr, edgeMapPtr); if (encroachElementPtr) { TMELEMENT_SETISREFERENCED(encroachElementPtr, TRUE); numDelta += TMregion_refine(TM_ARG regionPtr, encroachElementPtr, meshPtr); if (TMELEMENT_ISGARBAGE(elementPtr)) { break; } } else { break; } PMAP_FREE(edgeMapPtr); } /* * Perform retriangulation. */ if (!TMELEMENT_ISGARBAGE(elementPtr)) { numDelta += TMretriangulate(TM_ARG elementPtr, regionPtr, meshPtr, edgeMapPtr); } PMAP_FREE(edgeMapPtr); /* no need to free elements */ return numDelta; }
/* ============================================================================= * decoder_alloc * ============================================================================= */ decoder_t* decoder_alloc () { decoder_t* decoderPtr; decoderPtr = (decoder_t*)SEQ_MALLOC(sizeof(decoder_t)); if (decoderPtr) { decoderPtr->fragmentedMapPtr = MAP_ALLOC(NULL, NULL); assert(decoderPtr->fragmentedMapPtr); decoderPtr->decodedQueuePtr = queue_alloc(1024); assert(decoderPtr->decodedQueuePtr); } return decoderPtr; }
/* ============================================================================= * stream_alloc * ============================================================================= */ stream_t* stream_alloc (long percentAttack) { stream_t* streamPtr; streamPtr = (stream_t*)malloc(sizeof(stream_t)); if (streamPtr) { streamPtr->percentAttack = percentAttack; streamPtr->randomPtr = random_alloc(); streamPtr->allocVectorPtr = vector_alloc(1); streamPtr->packetQueuePtr = queue_alloc(-1); streamPtr->attackMapPtr = MAP_ALLOC(NULL, NULL); } return streamPtr; }
/*** Initialize the counter */ void bench_init() { //SET = new HashTable(); //SET = (MAP_T*)hcmalloc(sizeof(MAP_T)); SET = MAP_ALLOC(NULL, NULL); //SET->init((CFG.elements/4)); std::cout << "startup " << std::endl; // warm up the datastructure TM_BEGIN_FAST_INITIALIZATION(); for (uint32_t w = 0; w < CFG.elements; w++) { uint32_t seed = 7; int val = rand_r(&seed) % CFG.elements; //SET->insert(val TM_PARAM); MAP_INSERT(SET, val, val); } TM_END_FAST_INITIALIZATION(); }
/* ============================================================================= * decoder_alloc * ============================================================================= */ decoder_t* decoder_alloc (long numFlow) { decoder_t* decoderPtr; decoderPtr = (decoder_t*)SEQ_MALLOC(sizeof(decoder_t)); if (decoderPtr) { printf("hastable alloc size %lx\n", numFlow); #ifdef MAP_USE_RBTREE decoderPtr->fragmentedMapPtr = MAP_ALLOC(NULL, NULL); #else decoderPtr->fragmentedMapPtr = hashtable_alloc(numFlow, NULL, NULL, 2, 2); #endif assert(decoderPtr->fragmentedMapPtr); decoderPtr->decodedQueuePtr = queue_alloc(1024); assert(decoderPtr->decodedQueuePtr); } return decoderPtr; }
/* ============================================================================= * tableAlloc * ============================================================================= */ static MAP_T* tableAlloc () { return MAP_ALLOC(NULL, NULL); }