Esempio n. 1
0
Val*   heapio__read_externs_table   (Inbuf *bp)   {
    // =========================
    //

    // Read the header:
    //
    Externs_Header	    header;
    heapio__read_block( bp, &header, sizeof( header ) );

    Val* externs =  MALLOC_VEC( Val, header.externs_count );

    // Read in the names of the exported symbols:
    //
    Unt8*                  buf =  MALLOC_VEC( Unt8, header.externs_bytesize );
    heapio__read_block( bp, buf, header.externs_bytesize );

    // Map the names of the external symbols
    // to addresses in the run-time system:
    //
    Unt8* cp = buf;
    for (int i = 0;  i < header.externs_count;  i++) {
        //
        Val  heapval  =  find_cfun ((char*) cp);          if (heapval == HEAP_VOID)    die ("Run-time system does not provide \"%s\"", cp);

        externs[i] = heapval;

	cp +=  strlen((char*)cp) + 1;
    }

    FREE( buf );

    return externs;
}
Esempio n. 2
0
static void   load_compiled_file__may_heapclean   (
    //        =================================
    //
    Task*  task,
    char*  filename,
    Roots* extra_roots
){
    ///////////////////////////////////////////////////////
    // Loading an compiledfile is a five-step process:
    //
    // 1. Read the header, which holds various
    //    numbers we need such as the number of
    //    code segments in the compiledfile.
    //
    // 2. Locate all the values imported by this
    //    compiledfile from the export lists of
    //    previously loaded compiled_files.
    //      For subsequent ease of access, we
    //    construct an 'import record' (a vector)
    //    holding all these values packed
    //    consecutively.
    //
    ///////////////////////////////////////////////////////

    FILE* file;
    int   i;
    int   bytes_of_code_remaining;
    int   bytes_of_exports = 0;

    Compiledfile_Header   header;

    Picklehash	export_picklehash;

    Int1         segment_bytesize;
    Int1         entrypoint_offset_in_bytes;

    size_t          archive_offset;
    char*           compiledfile_filename = filename;
    

    // If 'filename' is a "library@offset:compiledfile" triple,
    // parse it into its three parts:
    //
    {   char* at_ptr
            =
            strchr (filename, '@');

	if  (!at_ptr) {

	    archive_offset = 0; 	// We're loading a bare .compiled, not one packed within a library archive.

	} else {

            char* colon_ptr = strchr (at_ptr + 1, ':');
	    if   (colon_ptr) {
		 *colon_ptr = '\0';

		 compiledfile_filename = colon_ptr + 1;
	    }

	    archive_offset = strtoul (at_ptr + 1, NULL, 0);        // XXX SUCKO FIXME Needs more sanity checking.
	    *at_ptr = '\0';
	}
    }

    // Log all files loaded, for diagnostic/information purposes:
    //
    if (!archive_offset) {
        //
	fprintf (
	    log_fd ? log_fd : stderr,
	    "                    load-compiledfiles.c:   Loading   object file   %s\n",
		   filename
	);

    } else {

	fprintf (
	    log_fd ? log_fd : stderr,
	    "                    load-compiledfiles.c:   Loading   offset        %8d in lib  %s  \tnamely object file %s\n",
	    archive_offset,
	    filename,
	    compiledfile_filename
	);
    }

    // Open the file:
    //
    file = open_file( filename, TRUE );            if (!file)   print_stats_and_exit( 1 );

    // If an offset is given (which is to say, if we are loading
    // an compiledfile packed within a library archive) then
    // then seek to the beginning of the section that contains
    // the image of our compiledfile:
    //
    if (archive_offset) {
        //
        if (fseek (file, archive_offset, SEEK_SET) == -1) {
	    //
	    die ("Cannot seek on archive file \"%s@%ul\": %s", filename, (unsigned long) archive_offset, strerror(errno) );
        }
    }

    // Get the header:
    //
    read_n_bytes_from_file( file, &header, sizeof(Compiledfile_Header), filename );

    // The integers in the header are kept in big-endian byte 
    // order, so convert them if we're on a little-endian box:
    //
    header.number_of_imported_picklehashes	= BIGENDIAN_TO_HOST( header.number_of_imported_picklehashes	);
    header.number_of_exported_picklehashes	= BIGENDIAN_TO_HOST( header.number_of_exported_picklehashes	);
    header.bytes_of_import_tree			= BIGENDIAN_TO_HOST( header.bytes_of_import_tree            	);
    header.bytes_of_dependency_info		= BIGENDIAN_TO_HOST( header.bytes_of_dependency_info	 	);
    header.bytes_of_inlinable_code		= BIGENDIAN_TO_HOST( header.bytes_of_inlinable_code	 	);
    header.reserved				= BIGENDIAN_TO_HOST( header.reserved			 	);
    header.pad             			= BIGENDIAN_TO_HOST( header.pad       			 	);
    header.bytes_of_compiled_code		= BIGENDIAN_TO_HOST( header.bytes_of_compiled_code		);
    header.bytes_of_symbolmapstack		= BIGENDIAN_TO_HOST( header.bytes_of_symbolmapstack		);

    // XXX SUCKO FIXME These days 99% of the market is little-endian,
    // so should either change to always little-endian, or else
    // (better) always use host system's native byte ordering.
    // Ideally we should be able to just mmap the .compiledfile into
    // memory and be ready to go, with no bit-fiddling needed at all.



    // Read the 'import tree' and locate all the thus-specified
    // needed values located in the export tree of previously-
    // loaded compiled_files:
    //
    int imports_record_slot_count
        =
        header.number_of_imported_picklehashes + 1;

    // Make sure we have enough free heap space to allocate 
    // our 'import record' vector of imported values:
    //
    if (need_to_call_heapcleaner (task, REC_BYTESIZE(imports_record_slot_count))) {
        //
	call_heapcleaner_with_extra_roots (task, 0, extra_roots );
    }

    // Write the header for our 'import record', which will be 
    // a Mythryl record with 'imports_record_slot_count' slots:
    //
    set_slot_in_nascent_heapchunk (task, 0, MAKE_TAGWORD(imports_record_slot_count, PAIRS_AND_RECORDS_BTAG));

    // Locate all the required import values and
    // save them in our nascent on-heap 'import record':
    //
    {   int    next_imports_record_slot_to_fill = 1;

        // Over all previously loaded .compiled files
        // from which we import values:
        //
	while (next_imports_record_slot_to_fill < imports_record_slot_count) {
	    //
	    Picklehash	picklehash_naming_previously_loaded_compiled_file;

	    read_n_bytes_from_file( file, &picklehash_naming_previously_loaded_compiled_file, sizeof(Picklehash), filename );

            // Locate all needed imports exported by that
            // particular pre-loaded compiledfile:
            //
	    next_imports_record_slot_to_fill
		=
		fetch_imports (
		    task,
		    file,
		    filename,
		    next_imports_record_slot_to_fill,
		    picklehash_to_exports_tree( &picklehash_naming_previously_loaded_compiled_file )
		);
	}
    }

    // Put a dummy valid value (NIL) in the last slot,
    // just so the cleaner won't go bananas if it
    // looks at that slot:
    //
    set_slot_in_nascent_heapchunk( task, imports_record_slot_count, HEAP_NIL );

    // Complete the above by actually allocating
    // the 'import record' on the Mythryl heap:
    //
    Val import_record =  commit_nascent_heapchunk( task, imports_record_slot_count );			// Contains all the values we import from other compiled_files.

    Roots roots1 = { &import_record, extra_roots };

    // Get the export picklehash for this compiledfile.
    // This is the name by which other compiled_files will
    // refer to us in their turn as they are loaded.
    //
    // Some compiled_files may not have such a name, in
    // which case they have no directly visible exported
    // values.  (This typically means that they are a
    // plug-in which installs pointers to itself in some
    // other module's datastructures, as a side-effect
    // during loading.)
    //
    if (header.number_of_exported_picklehashes == 1) {

	bytes_of_exports = sizeof( Picklehash );

	read_n_bytes_from_file( file, &export_picklehash, bytes_of_exports, filename );

    } else if (header.number_of_exported_picklehashes != 0) {

	die ("Number of exported picklehashes is %d (should be 0 or 1)",
            (int)header.number_of_exported_picklehashes
        );
    }

    // Seek to the first "code segment" within our compiledfile image.
    // This contains bytecoded instructions interpretable by
    // make-package-literals-via-bytecode-interpreter.c which construct all the needed constant
    // lists etc for this compiledfile.  (If we stored them as actual
    // lists, we'd have to do relocations on all the pointers in
    // those structures at this point.  The bytecode solution seems
    // simpler.)
    {
        // XXX BUGGO FIXME A 'long' is 32 bits on 32-bit Linux,
        // but files longer than 2GB (signed long!) are often
        // supported.  We probably should use fseeko in those
        // cases and then
        //    #define _FILE_OFFSET_BITS 64
        // so as to support large (well, *huge* :) library files.
        // See the manpage for details.
        // This probably won't be a frequent problem in practice
        // for a few years yet, and by then we'll probably be
        // running 64-bit Linux anyhow, so not a high priority.
        //
	long file_offset = archive_offset
	                 + sizeof(Compiledfile_Header)
			 + header.bytes_of_import_tree
	                 + bytes_of_exports
	                 + header.bytes_of_dependency_info
			 + header.bytes_of_inlinable_code
			 + header.reserved
	                 + header.pad;

	if (fseek(file, file_offset, SEEK_SET) == -1) {
	    //
	    die ("cannot seek on .compiled file \"%s\": %s", filename, strerror(errno) );
        }
    }

    ////////////////////////////////////////////////////////////////
    // In principle, a .compiled file can contain any number of
    // code segments, so we track the number of bytes of code
    // left to process:  When it hits zero, we've done all
    // the code segments.
    //
    // In practice, we currently always have exactly two
    // code segments, the first of which contains the byte-
    // coded logic constructing our literals (constants
    // -- see src/c/heapcleaner/make-package-literals-via-bytecode-interpreter.c)
    // and the second of which contains all our compiled
    // native code for the compiledfile, including that
    // which constructs our tree of exported (directly externally
    // visible) values.
    ////////////////////////////////////////////////////////////////

    bytes_of_code_remaining
	=
	header.bytes_of_compiled_code;

    // Read the size and the dummy entry point for the
    // first code segment (literal-constructing bytecodes).
    // The entrypoint offset of this first segment is always
    // zero, which is why we ignore it here:
    //
    read_n_bytes_from_file( file, &segment_bytesize, sizeof(Int1), filename );
    //
    segment_bytesize = BIGENDIAN_TO_HOST( segment_bytesize );
    //
    read_n_bytes_from_file( file, &entrypoint_offset_in_bytes, sizeof(Int1), filename );
    //	
    // entrypoint_offset_in_bytes = BIGENDIAN_TO_HOST( entrypoint_offset_in_bytes );

    bytes_of_code_remaining -=  segment_bytesize + 2 * sizeof(Int1);
    //
    if (bytes_of_code_remaining < 0) {
	//
	die ("format error (data size mismatch) in .compiled file \"%s\"", filename);
    }


    Val	    mythryl_result = HEAP_VOID;


    if (segment_bytesize > 0) {
	//
	Unt8* data_chunk =  MALLOC_VEC( Unt8, segment_bytesize );

	read_n_bytes_from_file( file, data_chunk, segment_bytesize, filename );

	mythryl_result = make_package_literals_via_bytecode_interpreter__may_heapclean (task, data_chunk, segment_bytesize, &roots1);

	FREE(data_chunk);
    }

    // Do a functional update of the last element of the import_record:
    //
    for (i = 0;  i < imports_record_slot_count;  i++) {
	//
	set_slot_in_nascent_heapchunk(task, i, PTR_CAST(Val*, import_record)[i-1]);	// <============ last use of import_record
    }
    set_slot_in_nascent_heapchunk( task, imports_record_slot_count, mythryl_result );
    mythryl_result = commit_nascent_heapchunk( task, imports_record_slot_count );

    Roots roots2 = { &mythryl_result, extra_roots };					// 'extra_roots' not '&roots1' because import_record is dead here.

    // Do a garbage collection, if necessary:
    //
    if (need_to_call_heapcleaner( task, PICKLEHASH_BYTES + REC_BYTESIZE(5)) ) {
        //
	call_heapcleaner_with_extra_roots (task, 0, &roots2 );
    }

    while (bytes_of_code_remaining > 0) {						// In practice, we always execute this loop exactly once.
	//
        // Read the size and entry point for this code chunk:

	read_n_bytes_from_file( file, &segment_bytesize, sizeof(Int1), filename );
      
	segment_bytesize =  BIGENDIAN_TO_HOST( segment_bytesize );

	read_n_bytes_from_file( file, &entrypoint_offset_in_bytes, sizeof(Int1), filename );

	entrypoint_offset_in_bytes =  BIGENDIAN_TO_HOST( entrypoint_offset_in_bytes );

        // How much more?
        //
	bytes_of_code_remaining -=  segment_bytesize + 2 * sizeof(Int1);
	//
	if (bytes_of_code_remaining < 0)   die ("format error (code size mismatch) in .compiled file \"%s\"", filename);

        // Allocate heap space and read code chunk:
	//
	Val code_chunk = allocate_nonempty_code_chunk (task, segment_bytesize);
	//
	read_n_bytes_from_file( file, PTR_CAST(char*, code_chunk), segment_bytesize, filename );

        // Flush the instruction cache, so CPU will see
        // our newly loaded code.  (To gain speed, and
        // simplify the hardware design, most modern CPUs
        //  assume that code is never modified on the fly,
        // or at least not without manually  flushing the
        // instruction cache this way.)
	//
	flush_instruction_cache (PTR_CAST(char*, code_chunk), segment_bytesize);
      
        // Create closure, taking entry point into account:
	//
	{   Val closure = make_one_slot_record(  task,  PTR_CAST( Val, PTR_CAST (char*, code_chunk) + entrypoint_offset_in_bytes)  );

	    // Apply the closure to the import picklehash vector.
	    //
	    // This actually executes all the top-level code for
	    // the compile unit, which is to say that if the
	    // source for our compiledfile looked something like
	    //
	    // package my_pkg {
	    //     my _ = file::print "Hello, world!\n";
	    // };
	    //
	    // then when we do the following 'apply' call, you'd see
	    //
	    // Hello, world!
	    //
	    // printed on the standard output.
	    //
	    // In addition, invisible compiler-generated code
	    // constructs and returns the tree of exports from
	    // our compiledfile.
	    //
	    save_c_state                                          (task, extra_roots);				// We do NOT want mythryl_result on the extra_roots list here.
	    mythryl_result =  run_mythryl_function__may_heapclean (task, closure, mythryl_result, TRUE, NULL); 	// run_mythryl_function__may_heapclean		def in   src/c/main/run-mythryl-code-and-runtime-eventloop.c
	    restore_c_state					  (task, extra_roots);
	}

	if (need_to_call_heapcleaner (task, PICKLEHASH_BYTES+REC_BYTESIZE(5))) {
	    //
	    call_heapcleaner_with_extra_roots (task, 0, &roots2 );
        }
    }

    // Publish this compiled_file's exported-values tree
    // for the benefit of compiled_files loaded later:
    //
    if (bytes_of_exports) {
	//
	register_compiled_file_exports__may_heapclean (
            task,
            &export_picklehash,     // key -- the 16-byte picklehash naming this compiledfile.
            mythryl_result,         // val -- the tree of exported Mythryl values.
	    extra_roots
        );
    }

    fclose( file );
}                                   // load_compiled_file__may_heapclean
Esempio n. 3
0
static void   read_heap   (
    //        =========
    //
    Inbuf*       bp,
    Heap_Header* header,
    Task*        task,
    Val*         externs
){
    Heap*		heap =  task->heap;

    Sib_Header*	sib_headers;
    Sib_Header*	p;
    Sib_Header*	q;

    int			sib_headers_bytesize;
    int			i, j, k;

    long		prevSzB[MAX_PLAIN_SIBS], size;
    Sibid*		oldBOOK2SIBID;
    Punt		addrOffset[MAX_AGEGROUPS][MAX_PLAIN_SIBS];

    Hugechunk_Quire_Relocation_Info*	boRelocInfo;

    Addresstable*	boRegionTable;

    // Allocate a book_to_sibid__global for the imported
    // heap image's address space:
    //
    #ifdef TWO_LEVEL_MAP
        #error two level map not supported
    #else
	oldBOOK2SIBID = MALLOC_VEC (Sibid, BOOK2SIBID_TABLE_SIZE_IN_SLOTS);
    #endif

    // Read in the hugechunk region descriptors
    // for the old address space:
    //
    {
	int		  size;
	Hugechunk_Quire_Header* boRgnHdr;

	boRegionTable = make_address_hashtable(LOG2_BOOK_BYTESIZE+1, header->hugechunk_quire_count);

	size = header->hugechunk_quire_count * sizeof(Hugechunk_Quire_Header);

	boRgnHdr = (Hugechunk_Quire_Header*) MALLOC (size);

	heapio__read_block( bp, boRgnHdr, size );

	boRelocInfo = MALLOC_VEC(Hugechunk_Quire_Relocation_Info, header->hugechunk_quire_count);

	for (i = 0;  i < header->hugechunk_quire_count;  i++) {

	    set_book2sibid_entries_for_range(oldBOOK2SIBID,
		(Val*)(boRgnHdr[i].base_address),
		BOOKROUNDED_BYTESIZE(boRgnHdr[i].bytesize),
		HUGECHUNK_DATA_SIBID(1)
            );

	    oldBOOK2SIBID[GET_BOOK_CONTAINING_POINTEE(boRgnHdr[i].base_address)] = HUGECHUNK_RECORD_SIBID(MAX_AGEGROUPS);

	    boRelocInfo[i].first_ram_quantum = boRgnHdr[i].first_ram_quantum;

	    boRelocInfo[i].page_count
                =
                (boRgnHdr[i].bytesize - (boRgnHdr[i].first_ram_quantum - boRgnHdr[i].base_address))
                >>
                LOG2_HUGECHUNK_RAM_QUANTUM_IN_BYTES;

	    boRelocInfo[i].hugechunk_page_to_hugechunk = MALLOC_VEC(Hugechunk_Relocation_Info*, boRelocInfo[i].page_count);

	    for (j = 0;  j < boRelocInfo[i].page_count;  j++) {
	        //
		boRelocInfo[i].hugechunk_page_to_hugechunk[j] = NULL;
            } 
	    addresstable_insert (boRegionTable, boRgnHdr[i].base_address, &(boRelocInfo[i]));
	}
	FREE (boRgnHdr);
    }

    // Read the sib headers:
    //
    sib_headers_bytesize = header->active_agegroups * TOTAL_SIBS * sizeof( Sib_Header );
    //
    sib_headers = (Sib_Header*) MALLOC( sib_headers_bytesize );
    //
    heapio__read_block( bp, sib_headers, sib_headers_bytesize );

    for (i = 0;  i < MAX_PLAIN_SIBS;  i++) {
        //
	prevSzB[i] = task->heap_allocation_buffer_bytesize;
    }

    // Allocate the sib buffers and read in the heap image:
    //
    for (p = sib_headers, i = 0;  i < header->active_agegroups;  i++) {
        //
	Agegroup*  age =  heap->agegroup[ i ];

	// Compute the space required for this agegroup,
	// and mark the oldBOOK2SIBID to reflect the old address space:
	//
	for (q = p, j = 0;  j < MAX_PLAIN_SIBS;  j++) {

	    set_book2sibid_entries_for_range (
		//
		oldBOOK2SIBID,

		(Val*) q->info.o.base_address,

		BOOKROUNDED_BYTESIZE( q->info.o.bytesize ),

		age->sib[ j ]->id
	    );

	    size = q->info.o.bytesize + prevSzB[j];

	    if (j == RO_CONSCELL_SIB
            &&  size > 0
            ){
		size += 2*WORD_BYTESIZE;
	    }

	    age->sib[ j ]->tospace.bytesize
		=
		BOOKROUNDED_BYTESIZE( size );

	    prevSzB[ j ] =  q->info.o.bytesize;

	    q++;
	}

	if (set_up_tospace_sib_buffers_for_agegroup(age) == FALSE) {
	    die ("unable to allocated space for agegroup %d\n", i+1);
        } 
	if (sib_is_active( age->sib[ RW_POINTERS_SIB ] )) {							// sib_is_active	def in    src/c/h/heap.h
	    //
	    make_new_coarse_inter_agegroup_pointers_map_for_agegroup (age);
        }

	// Read in the sib buffers for this agegroup
	// and initialize the address offset table:
	//
	for (int j = 0;  j < MAX_PLAIN_SIBS;  j++) {
	    //
	    Sib* ap = age->sib[ j ];

	    if (p->info.o.bytesize > 0) {

		addrOffset[i][j] = (Punt)(ap->tospace.start) - (Punt)(p->info.o.base_address);

		heapio__seek( bp, (long) p->offset );

		heapio__read_block( bp, (ap->tospace.start), p->info.o.bytesize );

		ap->tospace.used_end  = (Val *)((Punt)(ap->tospace.start) + p->info.o.bytesize);

		ap->fromspace.seniorchunks_end =  ap->tospace.start;

	    } else if (sib_is_active(ap)) {

		ap->fromspace.seniorchunks_end =  ap->tospace.start;
	    }

	    if (verbosity__global > 0)   say(".");

	    p++;
	}

        // Read in the hugechunk sib buffers (currently just codechunks):
        //
	for (int ilk = 0;  ilk < MAX_HUGE_SIBS;  ilk++) {			// MAX_HUGE_SIBS		def in    src/c/h/sibid.h
	    //	
	    Punt	 totSizeB;

	    Hugechunk* free_chunk;
	    Hugechunk* bdp = NULL;		// Without this initialization, gcc -Wall gives a 'possible uninitialized use' warning.

	    Hugechunk_Quire*	 free_quire;
	    Hugechunk_Header*	 boHdrs;

	    int			 boHdrSizeB;
	    int			 index;

	    Hugechunk_Quire_Relocation_Info*  region;

	    if (p->info.bo.hugechunk_quanta_count > 0) {
		//
		totSizeB = p->info.bo.hugechunk_quanta_count << LOG2_HUGECHUNK_RAM_QUANTUM_IN_BYTES;

		free_chunk = allocate_hugechunk_quire( heap, totSizeB );

		free_quire = free_chunk->hugechunk_quire;

		free_quire->age_of_youngest_live_chunk_in_quire
		    =
                    i;

		set_book2sibid_entries_for_range (
		    //
		    book_to_sibid__global,
                    (Val*) free_quire,
		    BYTESIZE_OF_QUIRE( free_quire->quire ),
		    HUGECHUNK_DATA_SIBID( i )
		);

		book_to_sibid__global[ GET_BOOK_CONTAINING_POINTEE( free_quire ) ]
		    =
		    HUGECHUNK_RECORD_SIBID( i );

	        // Read in the hugechunk headers:
                //
		boHdrSizeB = p->info.bo.hugechunk_count * sizeof(Hugechunk_Header);
		//
		boHdrs = (Hugechunk_Header*) MALLOC (boHdrSizeB);
		//
		heapio__read_block (bp, boHdrs, boHdrSizeB);

	        // Read in the hugechunks:
                //
		heapio__read_block( bp, (void *)(free_chunk->chunk), totSizeB );
		//
		if (ilk == CODE__HUGE_SIB) {					// ilk = 0 == CODE__HUGE_SIB	def in    src/c/h/sibid.h
		    //
		    flush_instruction_cache ((void *)(free_chunk->chunk), totSizeB);
		}

	        // Set up the hugechunk descriptors 
                // and per-chunk relocation info:
                //
		for (k = 0;  k < p->info.bo.hugechunk_count;  k++) {
		    //
		    // Find the region relocation info for the
		    // chunk's region in the exported heap:
		    //
		    for (index = GET_BOOK_CONTAINING_POINTEE(boHdrs[k].base_address);
			!SIBID_ID_IS_BIGCHUNK_RECORD(oldBOOK2SIBID[index]);
			index--)
			continue;

		    region = LOOK_UP_HUGECHUNK_REGION (boRegionTable, index);

		    // Allocate the hugechunk record for
		    // the chunk and link it into the list
                    // of hugechunks for its agegroup.
		    //
		    bdp = allocate_a_hugechunk( free_chunk, &(boHdrs[k]), region );

		    bdp->next = age->hugechunks[ ilk ];

		    age->hugechunks[ ilk ] = bdp;

		    ASSERT( bdp->gen == i+1 );

		    if (codechunk_comment_display_is_enabled__global
                    &&  ilk == CODE__HUGE_SIB
                    ){
		        // Dump the comment string of the code chunk.

			Unt8* namestring;
			//
			if ((namestring = get_codechunk_comment_string_else_null( bdp ))) {
			    debug_say ("[%6d bytes] %s\n", bdp->bytesize, (char*)namestring);
                        }
		    }
		}

		if (free_chunk != bdp) {					// if p->info.bo.hugechunk_count can be zero, 'bdp' value here may be bogus. XXX BUGGO FIXME.
		    //
		    // There was some extra space left in the region:
		    //
		    insert_hugechunk_in_doubly_linked_list( heap->hugechunk_freelist, free_chunk);						// insert_hugechunk_in_doubly_linked_list	def in   src/c/h/heap.h
		}

		FREE (boHdrs);
	    }

	    if (verbosity__global > 0)   say(".");

	    p++;
	}
    }

    repair_heap (heap, oldBOOK2SIBID, addrOffset, boRegionTable, externs);

    // Adjust the run-time globals
    // that point into the heap:
    //
    *PTR_CAST( Val*, PERVASIVE_PACKAGE_PICKLE_LIST_REFCELL__GLOBAL )
        =
        repair_word(
            *PTR_CAST( Val*, PERVASIVE_PACKAGE_PICKLE_LIST_REFCELL__GLOBAL ),
	    oldBOOK2SIBID,
            addrOffset,
            boRegionTable,
            externs
        );

    runtime_package__global = repair_word( runtime_package__global, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

#ifdef ASM_MATH
    mathvec__global = repair_word (mathvec__global, oldBOOK2SIBID, addrOffset, boRegionTable, externs);
#endif

    // Adjust the Mythryl registers
    // to the new address space:
    //
    ASSIGN(
        POSIX_INTERPROCESS_SIGNAL_HANDLER_REFCELL__GLOBAL,
	//
        repair_word (
	    //
	    DEREF( POSIX_INTERPROCESS_SIGNAL_HANDLER_REFCELL__GLOBAL ),
	    oldBOOK2SIBID,
	    addrOffset,
	    boRegionTable,
            externs
	)
    );

    task->argument
	=
	repair_word( task->argument, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->fate
	=
	repair_word( task->fate, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->current_closure
	=
	repair_word( task->current_closure, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->program_counter
	=
	repair_word(  task->program_counter, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->link_register
	=
	repair_word (task->link_register, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->exception_fate
	=
	repair_word( task->exception_fate, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->current_thread
	=
	repair_word( task->current_thread, oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->callee_saved_registers[0]
	=
	repair_word( task->callee_saved_registers[0], oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->callee_saved_registers[1]
	=
	repair_word( task->callee_saved_registers[1], oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    task->callee_saved_registers[2]
	=
	repair_word( task->callee_saved_registers[2], oldBOOK2SIBID, addrOffset, boRegionTable, externs );

    // Release storage:
    //
    for (i = 0; i < header->hugechunk_quire_count;  i++) {
      //
	Hugechunk_Relocation_Info*	p;
	for (p = NULL, j = 0;  j < boRelocInfo[i].page_count;  j++) {
	    if ((boRelocInfo[i].hugechunk_page_to_hugechunk[j] != NULL)
	    && (boRelocInfo[i].hugechunk_page_to_hugechunk[j] != p)) {
		FREE (boRelocInfo[i].hugechunk_page_to_hugechunk[j]);
		p = boRelocInfo[i].hugechunk_page_to_hugechunk[j];
	    }
	}
    }

    free_address_table( boRegionTable, FALSE );

    FREE( boRelocInfo    );
    FREE( sib_headers  );
    FREE( oldBOOK2SIBID       );

    // Reset the tospace.swept_end pointers:
    //
    for (int i = 0;  i < heap->active_agegroups;  i++) {
        //
	Agegroup*	age =  heap->agegroup[i];
        //
	for (int j = 0;  j < MAX_PLAIN_SIBS;  j++) {
	    //
	    Sib* ap =  age->sib[ j ];
	    //
	    if (sib_is_active(ap)) {							// sib_is_active	def in    src/c/h/heap.h
		//
		ap->tospace.swept_end
		    =
		    ap->tospace.used_end;
	    }
	}
    }
}                                                       // fun read_heap