/* doc: <routine name="rt_scoop_try_call" return_type="EIF_BOOLEAN" export="shared"> doc: <summary> Try to apply the feature in 'call', and catch any exceptions that may occur. </summary> doc: <param name="call" type="struct call_data*"> The feature to apply. Must not be NULL. </param> doc: <thread_safety> Not safe. </thread_safety> doc: <synchronization> None </synchronization> doc: </routine> */ rt_shared EIF_BOOLEAN rt_scoop_try_call (call_data *call) { /* Switch this on to catch exceptions */ /* This section slows down some benchmarks by 2x. I believe */ /* this is due to either some locking in the allocation routines (again) */ /* or reloading the thread local variables often. */ EIF_GET_CONTEXT EIF_BOOLEAN success; jmp_buf exenv; RTXDR; #ifdef WORKBENCH RTLXD; RTLXL; #endif /* TODO: We used to keep track of last_exception in this function, * which caused a call into Eiffel code. Therefore it was necessary * register the call_data struct somewhere for GC traversal. * This is no longer the case now, which means some client code * can be simplified. */ /* Record pseudo execution vector */ excatch(&exenv); if (!setjmp(exenv)) { /* Execute the Eiffel function. */ #ifdef WORKBENCH rt_apply_wcall (call); #else call->pattern (call); #endif success = EIF_TRUE; expop(&eif_stack); } else { #ifdef WORKBENCH RTLXE; #endif RTXSC; success = EIF_FALSE; } return success; }
/* doc: <routine name="rt_try_execute_scoop_call" return_type="EIF_BOOLEAN" export="shared"> doc: <summary> Try to apply the feature in 'call', and catch any exceptions that may occur. </summary> doc: <param name="call" type="struct eif_scoop_call_data*"> The feature to apply. Must not be NULL. </param> doc: <thread_safety> Not safe. </thread_safety> doc: <synchronization> None </synchronization> doc: </routine> */ rt_shared EIF_BOOLEAN rt_try_execute_scoop_call (struct eif_scoop_call_data *call) { /* NOTE: It is vitally important that this function does not trigger * garbage collection until the stack frame of the Eiffel function described * by 'call' is properly set up and registered with the GC. * The reason is that the eif_scoop_call_data structure is NOT marked by * the GC any more at this point. */ EIF_GET_CONTEXT EIF_BOOLEAN success; jmp_buf exenv; RTXDR; RTS_SDX; /* Record the request group stack count, the lock stack count, and the current region ID. */ #ifdef WORKBENCH RTLXD; RTLXL; #endif /* Record pseudo execution vector */ excatch(&exenv); if (!setjmp(exenv)) { /* Execute the Eiffel function. */ execute_scoop_call (call); success = EIF_TRUE; expop(&eif_stack); } else { #ifdef WORKBENCH RTLXE; #endif RTXSC; RTS_SRR; /* Restore the two stacks and the region ID. */ success = EIF_FALSE; } CHECK ("same_request_group_stack", eif_scoop_request_group_stack_count (l_scoop_processor_id) == l_scoop_request_group_stack_count); CHECK ("same_lock_stack", eif_scoop_lock_stack_count (l_scoop_processor_id) == l_scoop_lock_stack_count); return success; }
rt_public EIF_REFERENCE edclone(EIF_CONTEXT EIF_REFERENCE source) { /* Recursive Eiffel clone. This function recursively clones the source * object and returns a pointer to the top of the new tree. */ RT_GET_CONTEXT EIF_GET_CONTEXT EIF_REFERENCE root = (EIF_REFERENCE) 0; /* Root of the deep cloned object */ jmp_buf exenv; /* Environment saving */ struct { union overhead discard; /* Pseudo object header */ EIF_REFERENCE boot; /* Anchor point for cloning process */ } anchor; struct rt_traversal_context traversal_context; int volatile is_locked; #ifdef DEBUG int xobjs; #endif if (source == NULL) { return NULL; /* Void source */ } /* The deep clone of the source will be attached in the 'boot' entry from * the anchor structure. It all happens as if we were in fact deep cloning * the anchor pseudo-object. */ memset (&anchor, 0, sizeof(anchor)); /* Reset header */ anchor.boot = (EIF_REFERENCE) &root; /* To boostrap cloning process */ RT_GC_PROTECT(source); /* Protect source: allocation will occur */ #ifdef DEBUG xobjs = nomark(source); printf("Source has %x %d objects\n", source, xobjs); #endif /* Set up an exception trap. If any exception occurs, control will be * transferred back here by the run-time to give us a chance to clean-up * our structures. */ { RTXDRH; /* Save stack contexts */ EIF_EO_STORE_LOCK; /* Because we perform a traversal that marks objects, we need to be sure we are the only one doing it. */ is_locked = 1; excatch(&exenv); /* Record pseudo-execution vector */ if (setjmp(exenv)) { RTXSCH; /* Restore stack contexts */ map_reset(1); /* Reset in emergency situation */ /* If we locked the EO_STORE_MUTEX, then we need to unmark objects * and unlock it. */ if (is_locked) { /* We are only concerned abount unmarking objects, so we do not perform any * accounting. */ CHECK ("Not accounting", traversal_context.accounting == 0); CHECK ("Not unmarking", traversal_context.is_unmarking == 0); /* First we mark again all objects. */ traversal_context.is_unmarking = 0; traversal(&traversal_context, source); /* Then we unmark them. */ traversal_context.is_unmarking = 1; traversal(&traversal_context, source); /* Now we can unlock our mutex. */ EIF_EO_STORE_UNLOCK; } ereturn(MTC_NOARG); /* And propagate the exception */ } /* Now start the traversal of the source, allocating all the objects as * needed and stuffing them into a FIFO stack for later perusal by the * cloning process. */ memset (&traversal_context, 0, sizeof(struct rt_traversal_context)); traversal_context.accounting = TR_MAP; traversal(&traversal_context, source); /* Object traversal, mark with EO_STORE */ hash_malloc(&hclone, traversal_context.obj_nb); /* Hash table allocation */ map_start(); /* Restart at bottom of FIFO stack */ #ifdef DEBUG printf("Computed %x %d objects\n\n", source, traversal_context.obj_nb); #endif /* Throughout the deep cloning process, we need to maintain the notion of * enclosing object for GC aging tests. The enclosing object is defined as * being the object to which the currently cloned tree will be attached. * * We need to initialize the cloning process by computing a valid reference * into the root variable. That will be the enclosing object, and of course * it cannot be void, ever, or something really really weird is happening. * * To get rid of code duplication, I am initially calling rdeepclone with * an enclosing object address set to anchor.boot. The anchor structure * represents a pseudo anchor object for the object hierarchy being cloned. */ rdeepclone(source, (EIF_REFERENCE) &anchor.boot, 0); /* Recursive clone */ hash_free(&hclone); /* Free hash table */ map_reset(0); /* And eif_free maping table */ /* Release all the hector pointers asked for during the map table * construction (obj_nb exactly) */ CHECK("Has objects", traversal_context.obj_nb > 0); eif_ostack_npop(&hec_stack, traversal_context.obj_nb); #ifdef DEBUG xobjs= nomark(source); printf("Source now has %d objects\n", xobjs); xobjs = nomark(anchor.boot); printf("Result has %d objects\n", xobjs); #endif RT_GC_WEAN(source); /* Release GC protection */ expop(&eif_stack); /* Remove pseudo execution vector */ } EIF_EO_STORE_UNLOCK; /* Free marking mutex */ is_locked = 0; return anchor.boot; /* The cloned object tree */ }