Ejemplo n.º 1
0
static HB_GARBAGE_FUNC( hb_codeblockGarbageMark )
{
   PHB_CODEBLOCK pCBlock = ( PHB_CODEBLOCK ) Cargo;

   HB_TRACE( HB_TR_DEBUG, ( "hb_codeblockGarbageMark(%p)", Cargo ) );

   if( pCBlock->uiLocals )
   {
      PHB_ITEM pLocals = pCBlock->pLocals;
      HB_USHORT uiLocals = pCBlock->uiLocals;

      do
      {
         hb_gcItemRef( &pLocals[ uiLocals ] );
      }
      while( --uiLocals );
   }
}
Ejemplo n.º 2
0
/* Check all memory blocks if they can be released
*/
void hb_gcCollectAll( BOOL bForce )
{
   HB_GARBAGE_PTR pAlloc, pDelete;

   HB_TRACE( HB_TR_INFO, ( "hb_gcCollectAll(%i), %p, %i", bForce, s_pCurrBlock, s_bCollecting ) );

   /* is anoter garbage in action? */
   #ifdef HB_THREAD_SUPPORT
      HB_CRITICAL_LOCK( hb_garbageAllocMutex );
      if ( s_pCurrBlock == NULL || ( bForce == FALSE && s_uAllocated < HB_GC_COLLECTION_JUSTIFIED ) )
      {
         HB_CRITICAL_UNLOCK( hb_garbageAllocMutex );
         return;
      }
      HB_CRITICAL_UNLOCK( hb_garbageAllocMutex );

      /* Force this thread to be an idle inspector: only this thread can run
         past this point; depending on settings, this thread may prevents others
         to regain control or just wait for a time where no thread is active. */
      hb_threadWaitForIdle();

   #else
      if ( s_bCollecting )  // note: 1) is volatile and 2) not very important if fails 1 time
      {
         return;
      }
      /* Even if not locked, a read only non-critical variable here
      should not be a problem */
      if( s_pCurrBlock == NULL || ( bForce == FALSE && s_uAllocated < HB_GC_COLLECTION_JUSTIFIED ) )
      {
         s_bCollecting = FALSE;
         return;
      }
   #endif

   /* By hypotesis, only one thread will be granted the right to be here;
   so cheching for consistency of s_pCurrBlock further is useless.*/

   /* Now that we are rightful owner of the GC process, we must
   * forbid all other threads from acting into the objects that
   * are going to be (in different times):
   * - scanned,
   * - freed (in their members)
   * - modified/released (in their strucure )
   *****/

   s_bCollecting = TRUE;
   s_uAllocated = 0;

   /* Step 1 - mark */
   /* All blocks are already marked because we are flipping
   * the used/unused flag
   */

   HB_TRACE( HB_TR_INFO, ( "Sweep Scan" ) );

   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "Sweep Scan\n" );
   #endif

   /* Step 1 - MARK */
   /* check all known places for blocks they are referring */
   #ifdef HB_THREAD_SUPPORT
      hb_threadIsLocalRef();
   #else
      hb_vmIsLocalRef();
   #endif

   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After LocalRef\n" );
   #endif

   hb_vmIsStaticRef();
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After StaticRef\n" );
   #endif

   hb_vmIsGlobalRef();
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After Globals\n" );
   #endif

   #ifndef HB_THREAD_SUPPORT
   /* JC1: under MT, each threadIsLocalRef does its memvar reffing */
   hb_memvarsIsMemvarRef();
   #endif
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After MemvarRef\n" );
   #endif

   hb_clsIsClassRef();
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After ClassRef\n" );
   #endif

   if( HB_IS_GCITEM( &hb_vm_BreakBlock ) )
   {
      hb_gcItemRef( &hb_vm_BreakBlock );
   }
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After BreakBlock\n" );
   #endif

   HB_TRACE( HB_TR_INFO, ( "Locked Scan" ) );

   /* check list of locked blocks for blocks referenced from
   * locked block
   */

   if( s_pLockedBlock )
   {
      pAlloc = s_pLockedBlock;

      do
      {
         /* it is not very elegant method but it works well */
         if( pAlloc->pFunc == hb_gcGripRelease )
         {
            hb_gcItemRef( ( HB_ITEM_PTR ) ( pAlloc + 1 ) );
         }
         else if( pAlloc->pFunc == hb_arrayReleaseGarbage )
         {
            HB_ITEM FakedItem;

            (&FakedItem)->type = HB_IT_ARRAY;
            (&FakedItem)->item.asArray.value = ( PHB_BASEARRAY )( pAlloc + 1 );

            hb_gcItemRef( &FakedItem );
         }
         else if( pAlloc->pFunc == hb_hashReleaseGarbage )
         {
            HB_ITEM FakedItem;

            (&FakedItem)->type = HB_IT_HASH;
            (&FakedItem)->item.asHash.value = ( PHB_BASEHASH )( pAlloc + 1 );

            hb_gcItemRef( &FakedItem );
         }
         else if( pAlloc->pFunc == hb_codeblockDeleteGarbage )
         {
            HB_ITEM FakedItem;

            (&FakedItem)->type = HB_IT_BLOCK;
            (&FakedItem)->item.asBlock.value = ( PHB_CODEBLOCK )( pAlloc + 1 );

            hb_gcItemRef( &FakedItem );
         }

         pAlloc = pAlloc->pNext;
      }
      while ( s_pLockedBlock != pAlloc );
   }
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After Lock scan\n" );
   #endif

   HB_TRACE( HB_TR_INFO, ( "Cleanup Scan" ) );

   /* Step 3 - Call Cleanup Functions  */

   pAlloc = s_pCurrBlock;
   do
   {
      if( s_pCurrBlock->used == s_uUsedFlag )
      {
         s_pCurrBlock->used |= HB_GC_DELETE;

         /* call the cleanup function - now for NON Blosks. */
         if( s_pCurrBlock->pFunc )
         {
            HB_TRACE( HB_TR_INFO, ( "Cleanup, %p", s_pCurrBlock ) );
            ( s_pCurrBlock->pFunc )( ( void *)( s_pCurrBlock + 1 ) );
            HB_TRACE( HB_TR_INFO, ( "DONE Cleanup, %p", s_pCurrBlock ) );
         }
      }

      s_pCurrBlock = s_pCurrBlock->pNext;
   }
   while ( s_pCurrBlock && ( s_pCurrBlock != pAlloc ) );
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After Cleanup scan\n" );
   #endif

   HB_TRACE( HB_TR_INFO, ( "Release Scan" ) );

   /* Step 4 - Release all blocks that are still marked as unused */
   pAlloc = s_pCurrBlock;
   do
   {
      NewTopBlock:

      if( s_pCurrBlock->used & HB_GC_DELETE )
      {
         HB_TRACE( HB_TR_INFO, ( "Delete, %p", s_pCurrBlock ) );

         pDelete = s_pCurrBlock;
         hb_gcUnlink( &s_pCurrBlock, s_pCurrBlock );

         /*
            Releasing the top block in the list, so we must mark the new top into pAlloc
            but we still need to process this new top. Without this goto, the while
            condition will immediatly fail. Using extra flags, and new conditions
            will adversly effect performance.
         */
         if( pDelete == pAlloc )
         {
            HB_TRACE( HB_TR_INFO, ( "New Top, %p", pDelete ) );

            pAlloc = s_pCurrBlock;
            HB_GARBAGE_FREE( pDelete );

            if( s_pCurrBlock )
            {
               goto NewTopBlock;
            }
         }
         else
         {
            HB_TRACE( HB_TR_INFO, ( "Free, %p", pDelete ) );
            HB_GARBAGE_FREE( pDelete );
            HB_TRACE( HB_TR_INFO, ( "DONE Free, %p", pDelete ) );
         }
      }
      else
      {
         s_pCurrBlock = s_pCurrBlock->pNext;
      }
   }
   while ( s_pCurrBlock && ( pAlloc != s_pCurrBlock ) );
   #ifdef TRACE_COLLECT
      TraceLog( NULL,  "After Release scan\n" );
   #endif

   s_pCurrBlock = pAlloc;

   /* Step 4 - flip flag */
   /* Reverse used/unused flag so we don't have to mark all blocks
   * during next collecting
   */
   s_uUsedFlag ^= HB_GC_USED_FLAG;

   /* Step 5: garbage requests will be now allowed again. */
   s_bCollecting = FALSE;

   /* Step 6: release all the locks on the scanned objects */
   /* Put itself back on machine execution count */

   #if defined( HB_THREAD_SUPPORT )
      hb_threadIdleEnd();
   #endif

}