Ejemplo n.º 1
0
Val   pickle_datastructure   (Task* task,  Val root_chunk)  {
    //====================
    //
    // Linearize a Mythryl heap chunk
    // into a vector of bytes.
    // Return HEAP_VOID on errors.
    //
    // This fn gets exported to the Mythryl level as 'pickle_datastructure' via
    //
    //     src/c/lib/heap/datastructure-pickler.c
    //
    // and then
    //
    //     src/lib/std/src/unsafe/unsafe.pkg

    {   Roots extra_roots = { &root_chunk, NULL };
	//
	call_heapcleaner_with_extra_roots (task, 0, &extra_roots );
    }

    int age =  get_chunk_age( root_chunk );						// get_chunk_age			def in   src/c/heapcleaner/get-chunk-age.c

    if (age == -1)   return pickle_unboxed_value( task, root_chunk );

    // A regular Mythryl heap chunk.
    // Do the pickler cleaning:

    // DEBUG  check_heap (task->heap, task->heap->active_agegroups);

    Pickler_Result  pickler_result								// Pickler_Result				def in    src/c/heapcleaner/datastructure-pickler.h
	=
	pickler__clean_heap( task, &root_chunk, age );					// pickler__clean_heap			def in    src/c/heapcleaner/datastructure-pickler-cleaner.c

    Val pickle
	=
	pickle_heap_datastructure( task, root_chunk, &pickler_result );			// Defined below.

    // Repair the heap or finish the cleaning:
    //
    pickler__wrap_up( task, &pickler_result );					// pickler__wrap_up		def in    src/c/heapcleaner/datastructure-pickler-cleaner.c

    // DEBUG check_heap (task->heap, result.maxGen);

    return pickle;
}
Ejemplo n.º 2
0
static void   load_compiled_file__may_heapclean   (
    //        =================================
    //
    Task*  task,
    char*  filename,
    Roots* extra_roots
){
    ///////////////////////////////////////////////////////
    // Loading an compiledfile is a five-step process:
    //
    // 1. Read the header, which holds various
    //    numbers we need such as the number of
    //    code segments in the compiledfile.
    //
    // 2. Locate all the values imported by this
    //    compiledfile from the export lists of
    //    previously loaded compiled_files.
    //      For subsequent ease of access, we
    //    construct an 'import record' (a vector)
    //    holding all these values packed
    //    consecutively.
    //
    ///////////////////////////////////////////////////////

    FILE* file;
    int   i;
    int   bytes_of_code_remaining;
    int   bytes_of_exports = 0;

    Compiledfile_Header   header;

    Picklehash	export_picklehash;

    Int1         segment_bytesize;
    Int1         entrypoint_offset_in_bytes;

    size_t          archive_offset;
    char*           compiledfile_filename = filename;
    

    // If 'filename' is a "library@offset:compiledfile" triple,
    // parse it into its three parts:
    //
    {   char* at_ptr
            =
            strchr (filename, '@');

	if  (!at_ptr) {

	    archive_offset = 0; 	// We're loading a bare .compiled, not one packed within a library archive.

	} else {

            char* colon_ptr = strchr (at_ptr + 1, ':');
	    if   (colon_ptr) {
		 *colon_ptr = '\0';

		 compiledfile_filename = colon_ptr + 1;
	    }

	    archive_offset = strtoul (at_ptr + 1, NULL, 0);        // XXX SUCKO FIXME Needs more sanity checking.
	    *at_ptr = '\0';
	}
    }

    // Log all files loaded, for diagnostic/information purposes:
    //
    if (!archive_offset) {
        //
	fprintf (
	    log_fd ? log_fd : stderr,
	    "                    load-compiledfiles.c:   Loading   object file   %s\n",
		   filename
	);

    } else {

	fprintf (
	    log_fd ? log_fd : stderr,
	    "                    load-compiledfiles.c:   Loading   offset        %8d in lib  %s  \tnamely object file %s\n",
	    archive_offset,
	    filename,
	    compiledfile_filename
	);
    }

    // Open the file:
    //
    file = open_file( filename, TRUE );            if (!file)   print_stats_and_exit( 1 );

    // If an offset is given (which is to say, if we are loading
    // an compiledfile packed within a library archive) then
    // then seek to the beginning of the section that contains
    // the image of our compiledfile:
    //
    if (archive_offset) {
        //
        if (fseek (file, archive_offset, SEEK_SET) == -1) {
	    //
	    die ("Cannot seek on archive file \"%s@%ul\": %s", filename, (unsigned long) archive_offset, strerror(errno) );
        }
    }

    // Get the header:
    //
    read_n_bytes_from_file( file, &header, sizeof(Compiledfile_Header), filename );

    // The integers in the header are kept in big-endian byte 
    // order, so convert them if we're on a little-endian box:
    //
    header.number_of_imported_picklehashes	= BIGENDIAN_TO_HOST( header.number_of_imported_picklehashes	);
    header.number_of_exported_picklehashes	= BIGENDIAN_TO_HOST( header.number_of_exported_picklehashes	);
    header.bytes_of_import_tree			= BIGENDIAN_TO_HOST( header.bytes_of_import_tree            	);
    header.bytes_of_dependency_info		= BIGENDIAN_TO_HOST( header.bytes_of_dependency_info	 	);
    header.bytes_of_inlinable_code		= BIGENDIAN_TO_HOST( header.bytes_of_inlinable_code	 	);
    header.reserved				= BIGENDIAN_TO_HOST( header.reserved			 	);
    header.pad             			= BIGENDIAN_TO_HOST( header.pad       			 	);
    header.bytes_of_compiled_code		= BIGENDIAN_TO_HOST( header.bytes_of_compiled_code		);
    header.bytes_of_symbolmapstack		= BIGENDIAN_TO_HOST( header.bytes_of_symbolmapstack		);

    // XXX SUCKO FIXME These days 99% of the market is little-endian,
    // so should either change to always little-endian, or else
    // (better) always use host system's native byte ordering.
    // Ideally we should be able to just mmap the .compiledfile into
    // memory and be ready to go, with no bit-fiddling needed at all.



    // Read the 'import tree' and locate all the thus-specified
    // needed values located in the export tree of previously-
    // loaded compiled_files:
    //
    int imports_record_slot_count
        =
        header.number_of_imported_picklehashes + 1;

    // Make sure we have enough free heap space to allocate 
    // our 'import record' vector of imported values:
    //
    if (need_to_call_heapcleaner (task, REC_BYTESIZE(imports_record_slot_count))) {
        //
	call_heapcleaner_with_extra_roots (task, 0, extra_roots );
    }

    // Write the header for our 'import record', which will be 
    // a Mythryl record with 'imports_record_slot_count' slots:
    //
    set_slot_in_nascent_heapchunk (task, 0, MAKE_TAGWORD(imports_record_slot_count, PAIRS_AND_RECORDS_BTAG));

    // Locate all the required import values and
    // save them in our nascent on-heap 'import record':
    //
    {   int    next_imports_record_slot_to_fill = 1;

        // Over all previously loaded .compiled files
        // from which we import values:
        //
	while (next_imports_record_slot_to_fill < imports_record_slot_count) {
	    //
	    Picklehash	picklehash_naming_previously_loaded_compiled_file;

	    read_n_bytes_from_file( file, &picklehash_naming_previously_loaded_compiled_file, sizeof(Picklehash), filename );

            // Locate all needed imports exported by that
            // particular pre-loaded compiledfile:
            //
	    next_imports_record_slot_to_fill
		=
		fetch_imports (
		    task,
		    file,
		    filename,
		    next_imports_record_slot_to_fill,
		    picklehash_to_exports_tree( &picklehash_naming_previously_loaded_compiled_file )
		);
	}
    }

    // Put a dummy valid value (NIL) in the last slot,
    // just so the cleaner won't go bananas if it
    // looks at that slot:
    //
    set_slot_in_nascent_heapchunk( task, imports_record_slot_count, HEAP_NIL );

    // Complete the above by actually allocating
    // the 'import record' on the Mythryl heap:
    //
    Val import_record =  commit_nascent_heapchunk( task, imports_record_slot_count );			// Contains all the values we import from other compiled_files.

    Roots roots1 = { &import_record, extra_roots };

    // Get the export picklehash for this compiledfile.
    // This is the name by which other compiled_files will
    // refer to us in their turn as they are loaded.
    //
    // Some compiled_files may not have such a name, in
    // which case they have no directly visible exported
    // values.  (This typically means that they are a
    // plug-in which installs pointers to itself in some
    // other module's datastructures, as a side-effect
    // during loading.)
    //
    if (header.number_of_exported_picklehashes == 1) {

	bytes_of_exports = sizeof( Picklehash );

	read_n_bytes_from_file( file, &export_picklehash, bytes_of_exports, filename );

    } else if (header.number_of_exported_picklehashes != 0) {

	die ("Number of exported picklehashes is %d (should be 0 or 1)",
            (int)header.number_of_exported_picklehashes
        );
    }

    // Seek to the first "code segment" within our compiledfile image.
    // This contains bytecoded instructions interpretable by
    // make-package-literals-via-bytecode-interpreter.c which construct all the needed constant
    // lists etc for this compiledfile.  (If we stored them as actual
    // lists, we'd have to do relocations on all the pointers in
    // those structures at this point.  The bytecode solution seems
    // simpler.)
    {
        // XXX BUGGO FIXME A 'long' is 32 bits on 32-bit Linux,
        // but files longer than 2GB (signed long!) are often
        // supported.  We probably should use fseeko in those
        // cases and then
        //    #define _FILE_OFFSET_BITS 64
        // so as to support large (well, *huge* :) library files.
        // See the manpage for details.
        // This probably won't be a frequent problem in practice
        // for a few years yet, and by then we'll probably be
        // running 64-bit Linux anyhow, so not a high priority.
        //
	long file_offset = archive_offset
	                 + sizeof(Compiledfile_Header)
			 + header.bytes_of_import_tree
	                 + bytes_of_exports
	                 + header.bytes_of_dependency_info
			 + header.bytes_of_inlinable_code
			 + header.reserved
	                 + header.pad;

	if (fseek(file, file_offset, SEEK_SET) == -1) {
	    //
	    die ("cannot seek on .compiled file \"%s\": %s", filename, strerror(errno) );
        }
    }

    ////////////////////////////////////////////////////////////////
    // In principle, a .compiled file can contain any number of
    // code segments, so we track the number of bytes of code
    // left to process:  When it hits zero, we've done all
    // the code segments.
    //
    // In practice, we currently always have exactly two
    // code segments, the first of which contains the byte-
    // coded logic constructing our literals (constants
    // -- see src/c/heapcleaner/make-package-literals-via-bytecode-interpreter.c)
    // and the second of which contains all our compiled
    // native code for the compiledfile, including that
    // which constructs our tree of exported (directly externally
    // visible) values.
    ////////////////////////////////////////////////////////////////

    bytes_of_code_remaining
	=
	header.bytes_of_compiled_code;

    // Read the size and the dummy entry point for the
    // first code segment (literal-constructing bytecodes).
    // The entrypoint offset of this first segment is always
    // zero, which is why we ignore it here:
    //
    read_n_bytes_from_file( file, &segment_bytesize, sizeof(Int1), filename );
    //
    segment_bytesize = BIGENDIAN_TO_HOST( segment_bytesize );
    //
    read_n_bytes_from_file( file, &entrypoint_offset_in_bytes, sizeof(Int1), filename );
    //	
    // entrypoint_offset_in_bytes = BIGENDIAN_TO_HOST( entrypoint_offset_in_bytes );

    bytes_of_code_remaining -=  segment_bytesize + 2 * sizeof(Int1);
    //
    if (bytes_of_code_remaining < 0) {
	//
	die ("format error (data size mismatch) in .compiled file \"%s\"", filename);
    }


    Val	    mythryl_result = HEAP_VOID;


    if (segment_bytesize > 0) {
	//
	Unt8* data_chunk =  MALLOC_VEC( Unt8, segment_bytesize );

	read_n_bytes_from_file( file, data_chunk, segment_bytesize, filename );

	mythryl_result = make_package_literals_via_bytecode_interpreter__may_heapclean (task, data_chunk, segment_bytesize, &roots1);

	FREE(data_chunk);
    }

    // Do a functional update of the last element of the import_record:
    //
    for (i = 0;  i < imports_record_slot_count;  i++) {
	//
	set_slot_in_nascent_heapchunk(task, i, PTR_CAST(Val*, import_record)[i-1]);	// <============ last use of import_record
    }
    set_slot_in_nascent_heapchunk( task, imports_record_slot_count, mythryl_result );
    mythryl_result = commit_nascent_heapchunk( task, imports_record_slot_count );

    Roots roots2 = { &mythryl_result, extra_roots };					// 'extra_roots' not '&roots1' because import_record is dead here.

    // Do a garbage collection, if necessary:
    //
    if (need_to_call_heapcleaner( task, PICKLEHASH_BYTES + REC_BYTESIZE(5)) ) {
        //
	call_heapcleaner_with_extra_roots (task, 0, &roots2 );
    }

    while (bytes_of_code_remaining > 0) {						// In practice, we always execute this loop exactly once.
	//
        // Read the size and entry point for this code chunk:

	read_n_bytes_from_file( file, &segment_bytesize, sizeof(Int1), filename );
      
	segment_bytesize =  BIGENDIAN_TO_HOST( segment_bytesize );

	read_n_bytes_from_file( file, &entrypoint_offset_in_bytes, sizeof(Int1), filename );

	entrypoint_offset_in_bytes =  BIGENDIAN_TO_HOST( entrypoint_offset_in_bytes );

        // How much more?
        //
	bytes_of_code_remaining -=  segment_bytesize + 2 * sizeof(Int1);
	//
	if (bytes_of_code_remaining < 0)   die ("format error (code size mismatch) in .compiled file \"%s\"", filename);

        // Allocate heap space and read code chunk:
	//
	Val code_chunk = allocate_nonempty_code_chunk (task, segment_bytesize);
	//
	read_n_bytes_from_file( file, PTR_CAST(char*, code_chunk), segment_bytesize, filename );

        // Flush the instruction cache, so CPU will see
        // our newly loaded code.  (To gain speed, and
        // simplify the hardware design, most modern CPUs
        //  assume that code is never modified on the fly,
        // or at least not without manually  flushing the
        // instruction cache this way.)
	//
	flush_instruction_cache (PTR_CAST(char*, code_chunk), segment_bytesize);
      
        // Create closure, taking entry point into account:
	//
	{   Val closure = make_one_slot_record(  task,  PTR_CAST( Val, PTR_CAST (char*, code_chunk) + entrypoint_offset_in_bytes)  );

	    // Apply the closure to the import picklehash vector.
	    //
	    // This actually executes all the top-level code for
	    // the compile unit, which is to say that if the
	    // source for our compiledfile looked something like
	    //
	    // package my_pkg {
	    //     my _ = file::print "Hello, world!\n";
	    // };
	    //
	    // then when we do the following 'apply' call, you'd see
	    //
	    // Hello, world!
	    //
	    // printed on the standard output.
	    //
	    // In addition, invisible compiler-generated code
	    // constructs and returns the tree of exports from
	    // our compiledfile.
	    //
	    save_c_state                                          (task, extra_roots);				// We do NOT want mythryl_result on the extra_roots list here.
	    mythryl_result =  run_mythryl_function__may_heapclean (task, closure, mythryl_result, TRUE, NULL); 	// run_mythryl_function__may_heapclean		def in   src/c/main/run-mythryl-code-and-runtime-eventloop.c
	    restore_c_state					  (task, extra_roots);
	}

	if (need_to_call_heapcleaner (task, PICKLEHASH_BYTES+REC_BYTESIZE(5))) {
	    //
	    call_heapcleaner_with_extra_roots (task, 0, &roots2 );
        }
    }

    // Publish this compiled_file's exported-values tree
    // for the benefit of compiled_files loaded later:
    //
    if (bytes_of_exports) {
	//
	register_compiled_file_exports__may_heapclean (
            task,
            &export_picklehash,     // key -- the 16-byte picklehash naming this compiledfile.
            mythryl_result,         // val -- the tree of exported Mythryl values.
	    extra_roots
        );
    }

    fclose( file );
}                                   // load_compiled_file__may_heapclean
Ejemplo n.º 3
0
static Val   read_in_compiled_file_list__may_heapclean   (
    //       =========================================
    //
    Task*          task,
    const char*    compiled_files_to_load_filename,
    int*           return_max_boot_path_len,
    Roots*         extra_roots
){
    // Open given file and read from it the list of
    // filenames of compiled_files to be later loaded.
    // Return them as a Mythryl list of Mythryl strings:

    #define    BUF_LEN	1024		//  "This should be plenty for two numbers."   "640K should be enough for anyone."
    char  buf[ BUF_LEN ];

//  Val*   file_names = NULL;
    char*  name_buf   = NULL;

    int    max_num_boot_files = MAX_NUMBER_OF_BOOT_FILES;
    int    max_boot_path_len  = MAX_LENGTH_FOR_A_BOOTFILE_PATHNAME;

    int    file_count = 0;

    FILE*  list_fd =  open_file( compiled_files_to_load_filename, FALSE );

    fprintf (
        stderr,
        "                    load-compiledfiles.c:   Reading   file          %s\n",
        compiled_files_to_load_filename
    );

    if (log_fd) {
	//
	fprintf (
	    log_fd,
	    "                    load-compiledfiles.c:   Reading   file                    %s\n",
	    compiled_files_to_load_filename
	);
    }

    Val  file_list = LIST_NIL;			Roots roots1 = { &file_list, extra_roots };

    if (list_fd) {

        // Read header:
        //
        for (;;) {
	    //
	    if (!fgets (buf, BUF_LEN, list_fd)) {
                die (
                    "compiled_files_to_load file \"%s\" ends before end-of-header (first empty line)",
                    compiled_files_to_load_filename
                );
            }

	    {    char* p = buf;
                 while (*p == ' ' || *p == '\t')   ++p;		// Skip leading whitespace.

		if (p[0] == '\n')   break;			// Header ends at first empty line.

		if (p[0] == '#')   continue;			// Ignore comment lines.

                if (strstr( p,"FILES=") == p) {
		    //
		    max_num_boot_files = strtoul(p+6, NULL, 0);
                    continue;
                }

                if (strstr(p,"MAX_LINE_LENGTH=") == p) {
		    //
		    max_boot_path_len  = strtoul(p+16, NULL, 0) +2;
                    continue;
                }

                die (
                    "compiled_files_to_load file \"%s\" contains unrecognized header line \"%s\"",
                    compiled_files_to_load_filename,
                    p
                );
	    }
        }

        if (max_num_boot_files < 0)  {
	    //
            die("compiled_files_to_load file \"%s\" contains negative files count?! (%d)",
                compiled_files_to_load_filename,
                max_num_boot_files
            );
        } 

        if (max_boot_path_len  < 0) {
	    //
            die("compiled_file_to_load file \"%s\" contains negative boot path len?! (%d)",
                compiled_files_to_load_filename,
                max_boot_path_len
            );
        }


	*return_max_boot_path_len =   max_boot_path_len;		// Tell the calling function.

	if (!(name_buf = MALLOC( max_boot_path_len ))) {
	    //
	    die ("unable to allocate space for .compiled file filenames");
        }

//	if (!(file_names = MALLOC( max_num_boot_files * sizeof(char*) ))) {
//	    //
//	    die ("Unable to allocate space for compiledfiles-to-load name table");
//        }

        // Read in the file names, converting them to
	// Mythryl strings and saving them in a list:
        //
	while (fgets( name_buf, max_boot_path_len, list_fd )) {

	    // Skip leading whitespace:
	    //
	    char* p = name_buf;
            while (*p == ' ' || *p == '\t')   ++p;

	    // Ignore empty lines and comment lines:
	    //
	    if (*p == '\n')   continue;
	    if (*p ==  '#')   continue;

	    // Strip any trailing newline:
	    //
	    {   int j = strlen(p)-1;
		//
	        if (p[j] == '\n') p[j] = '\0';
	    }	

	    if (file_count >= max_num_boot_files)   die ("too many files\n");

	    // If our agegroup0 buffer is more than half full,
	    // empty it by doing a heapcleaning.  This is very
	    // conservative -- which is the way I like it. *grin*
	    //
	    if (agegroup0_freespace_in_bytes( task )
	      < agegroup0_usedspace_in_bytes( task )
	    ){
		call_heapcleaner_with_extra_roots( task,  0, &roots1 );
	    }

	    Val file_name = make_ascii_string_from_c_string__may_heapclean(task, p, &roots1 );

	    file_list = LIST_CONS(task, file_name, file_list);
	}

	if (name_buf)    FREE( name_buf );

	fclose( list_fd );
    }


    // Reverse filename list (to restore
    // original order) and return it:
    //
    {   Val file_list2 = LIST_NIL;			Roots roots2 = { &file_list2, &roots1 };
	//
	for (; file_list != LIST_NIL;  file_list = LIST_TAIL(file_list)) {
	    //
	    Val file_name = LIST_HEAD(file_list);
	    //
	    file_list2 = LIST_CONS(task, file_name, file_list2);

	    // Again, if our agegroup0 buffer is more than
	    // half full, empty it by doing a heapcleaning:
	    //
	    if (agegroup0_freespace_in_bytes( task )
	      < agegroup0_usedspace_in_bytes( task )
	    ){
		call_heapcleaner_with_extra_roots( task,  0, &roots2 );
	    }
	}

	return file_list2;
    }
}
Ejemplo n.º 4
0
static Status   read_image  (Task* task,  Inbuf* bp,  Val* chunk_ref) {
    //          ==========
    //
    Pickle_Header	pickle_header;
    Val*		externs;

    Sib_Header*	sib_headers[ TOTAL_SIBS ];
    Sib_Header*	sib_headers_buffer;

    int sib_headers_size;

    Agegroup*  age1 =   task->heap->agegroup[ 0 ];

    if (heapio__read_block( bp, &pickle_header, sizeof(pickle_header) ) == FALSE
    ||  pickle_header.smallchunk_sibs_count > MAX_PLAIN_SIBS				// MAX_PLAIN_SIBS		def in    src/c/h/sibid.h
    ||  pickle_header.hugechunk_sibs_count  > MAX_HUGE_SIBS				// MAX_HUGE_SIBS		def in    src/c/h/sibid.h
    ){
	return FALSE;									// XXX BUGGO FIXME we gotta do better than this.
    }

    // Read the externals table:
    //
    externs = heapio__read_externs_table( bp );



    // Read the sib headers:
    //
    sib_headers_size =  (pickle_header.smallchunk_sibs_count + pickle_header.hugechunk_sibs_count)
		        *
                        sizeof( Sib_Header );
    //
    sib_headers_buffer =  (Sib_Header*) MALLOC (sib_headers_size);
    //
    if (heapio__read_block( bp, sib_headers_buffer, sib_headers_size ) == FALSE) {
	//
	FREE( sib_headers_buffer );
	return FALSE;
    }
    //
    for (int ilk = 0;  ilk < TOTAL_SIBS;  ilk++) {
        //
	sib_headers[ ilk ] =  NULL;
    }
    //
    for (int sib = 0;  sib < pickle_header.smallchunk_sibs_count;  sib++) {
        //
	Sib_Header* p =  &sib_headers_buffer[ sib ];
	//
	sib_headers[ p->chunk_ilk ] =  p;
    }



    // DO BIG CHUNK HEADERS TOO

    // Check the heap to see if there is
    // enough free space in agegroup 1:
    //
    {   Punt	agegroup0_buffer_bytesize =   agegroup0_buffer_size_in_bytes( task );
        //
	Bool needs_cleaning  =   FALSE;

	for (int ilk = 0;  ilk < MAX_PLAIN_SIBS;  ilk++) {
	    //
	    Sib* sib = age1->sib[ ilk ];

	    if (sib_headers[ilk] != NULL
		&&
               (!sib_is_active(sib)								// sib_is_active		def in    src/c/h/heap.h
	       || sib_freespace_in_bytes(sib) < sib_headers[ ilk ]->info.o.bytesize		// sib_freespace_in_bytes	def in    src/c/h/heap.h
                                               +
                                               agegroup0_buffer_bytesize
               )
            ){
		needs_cleaning = TRUE;
		sib->requested_extra_free_bytes = sib_headers[ ilk ]->info.o.bytesize;
	    }
	}

	if (needs_cleaning) {
	    //
	    if (bp->nbytes <= 0) {
		//
		call_heapcleaner( task, 1 );							// call_heapcleaner		def in   /src/c/heapcleaner/call-heapcleaner.c

	    } else {
		//
	        // The cleaning may move the buffer, so:
                
		Val buffer =  PTR_CAST( Val,  bp->base );

		{   Roots extra_roots = { &buffer, NULL };
		    //
		    call_heapcleaner_with_extra_roots (task, 1, &extra_roots );
		}

		if (buffer != PTR_CAST( Val,  bp->base )) {
		    //
		    // The buffer moved, so adjust the buffer pointers:

		    Unt8* new_base = PTR_CAST( Unt8*, buffer );

		    bp->buf  = new_base + (bp->buf - bp->base);
		    bp->base = new_base;
		}
            }
	}
    }
Val   make_package_literals_via_bytecode_interpreter   (Task* task,   Unt8* bytecode_vector,   int bytecode_vector_length_in_bytes)   {
    //==============
    //
    // NOTE: We allocate all of the chunks in agegroup 1,
    // but allocate the vector of literals in agegroup0.
    //
    // This fn gets exported to the Mythryl level as
    //
    //     make_package_literals_via_bytecode_interpreter
    // in
    //     src/lib/compiler/execution/code-segments/code-segment.pkg
    // via
    //     src/c/lib/heap/make-package-literals-via-bytecode-interpreter.c
    //
    // Our ultimate invocation is in
    //
    //     src/lib/compiler/execution/main/execute.pkg


    int pc = 0;

    // Check that sufficient space is available for the
    // literal chunk that we are about to allocate.
    // Note that the cons cell has already been accounted
    // for in space_available (but not in space_needed).
    //
    #define GC_CHECK										\
	do {											\
	    if (space_needed > space_available							\
            &&  need_to_call_heapcleaner( task, space_needed + LIST_CONS_CELL_BYTESIZE)		\
            ){											\
		call_heapcleaner_with_extra_roots (task, 0, (Val *)&bytecode_vector, &stk, NULL);	\
		space_available = 0;								\
												\
	    } else {										\
												\
		space_available -= space_needed;						\
	    }											\
	} while (0)

    #ifdef DEBUG_LITERALS
	debug_say("make_package_literals_via_bytecode_interpreter: bytecode_vector = %#x, bytecode_vector_length_in_bytes = %d\n", bytecode_vector, bytecode_vector_length_in_bytes);
    #endif

    if (bytecode_vector_length_in_bytes <= 8)   return HEAP_NIL;

    Val_Sized_Unt  magic
	=
	GET32(bytecode_vector);   pc += 4;

    Val_Sized_Unt  max_depth							/* This variable is currently unused, so suppress 'unused var' compiler warning: */   __attribute__((unused))
	=
	GET32(bytecode_vector);   pc += 4;

    if (magic != V1_MAGIC) {
	die("bogus literal magic number %#x", magic);
    }

    Val	stk = HEAP_NIL;

    int space_available = 0;

    for (;;) {
	//
	ASSERT(pc < bytecode_vector_length_in_bytes);

	space_available -= LIST_CONS_CELL_BYTESIZE;	// Space for stack cons cell.

	if (space_available < ONE_K_BINARY) {
	    //
	    if (need_to_call_heapcleaner(task, 64*ONE_K_BINARY)) {
		//
		call_heapcleaner_with_extra_roots (task, 0, (Val *)&bytecode_vector, &stk, NULL);
            }
	    space_available = 64*ONE_K_BINARY;
	}


	switch (bytecode_vector[ pc++ ]) {
	    //
	case I_INT:
	    {	int i = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: INT(%d)\n", pc-5, i);
		#endif

		LIST_CONS(task, stk, TAGGED_INT_FROM_C_INT(i), stk);
	    }
	    break;

	case I_RAW32:
	    {
		int i = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: RAW32[%d]\n", pc-5, i);
		#endif

		Val               result;
		INT1_ALLOC(task, result, i);

		LIST_CONS(task, stk, result, stk);
		space_available -= 2*WORD_BYTESIZE;
	    }
	    break;

	case I_RAW32L:
	    {
		int n = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		debug_say("[%2d]: RAW32L(%d) [...]\n", pc-5, n);
		#endif

		ASSERT(n > 0);

		int space_needed = 4*(n+1);
		GC_CHECK;

		LIB7_AllocWrite (task, 0, MAKE_TAGWORD(n, FOUR_BYTE_ALIGNED_NONPOINTER_DATA_BTAG));

		for (int j = 1;  j <= n;  j++) {
		    //
		    int i = GET32(bytecode_vector);	pc += 4;

		    LIB7_AllocWrite (task, j, (Val)i);
		}

		Val result =  LIB7_Alloc(task, n );

		LIST_CONS(task, stk, result, stk);
	    }
	    break;

	case I_RAW64:
	    {
		double d = get_double(&(bytecode_vector[pc]));	pc += 8;

		Val	           result;
		REAL64_ALLOC(task, result, d);

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: RAW64[%f] @ %#x\n", pc-5, d, result);
		#endif

		LIST_CONS(task, stk, result, stk);

		space_available -= 4*WORD_BYTESIZE;		// Extra 4 bytes for alignment padding.
	    }
	    break;

	case I_RAW64L:
	    {
		int n = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: RAW64L(%d) [...]\n", pc-5, n);
		#endif

		ASSERT(n > 0);

		int space_needed = 8*(n+1);
		GC_CHECK;

		#ifdef ALIGN_FLOAT64S
		    // Force FLOAT64_BYTESIZE alignment (descriptor is off by one word)
		    //
		    task->heap_allocation_pointer = (Val*)((Punt)(task->heap_allocation_pointer) | WORD_BYTESIZE);
		#endif

		int j = 2*n;							// Number of words.

		LIB7_AllocWrite (task, 0, MAKE_TAGWORD(j, EIGHT_BYTE_ALIGNED_NONPOINTER_DATA_BTAG));

		Val result =  LIB7_Alloc(task, j );

		for (int j = 0;  j < n;  j++) {
		    //
		    PTR_CAST(double*, result)[j] = get_double(&(bytecode_vector[pc]));	pc += 8;
		}
		LIST_CONS(task, stk, result, stk);
	    }
	    break;

	case I_STR:
	    {
		int n = GET32(bytecode_vector);		pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: STR(%d) [...]", pc-5, n);
		#endif

		if (n == 0) {
		    #ifdef DEBUG_LITERALS
			debug_say("\n");
		    #endif

		    LIST_CONS(task, stk, ZERO_LENGTH_STRING__GLOBAL, stk);

		    break;
		}

		int j = BYTES_TO_WORDS(n+1);								// '+1' to include space for '\0'.

		// The space request includes space for the data-chunk header word and
		// the sequence header chunk.
		//
		int space_needed = WORD_BYTESIZE*(j+1+3);
		GC_CHECK;

		// Allocate the data chunk:
		//
		LIB7_AllocWrite(task, 0, MAKE_TAGWORD(j, FOUR_BYTE_ALIGNED_NONPOINTER_DATA_BTAG));
		LIB7_AllocWrite (task, j, 0);								// So word-by-word string equality works.

		Val result = LIB7_Alloc (task, j);

		#ifdef DEBUG_LITERALS
		    debug_say(" @ %#x (%d words)\n", result, j);
		#endif
		memcpy (PTR_CAST(void*, result), &bytecode_vector[pc], n);		pc += n;

		// Allocate the header chunk:
		//
		SEQHDR_ALLOC(task, result, STRING_TAGWORD, result, n);

		// Push on stack:
		//
		LIST_CONS(task, stk, result, stk);
	    }
	    break;

	case I_LIT:
	    {
		int n = GET32(bytecode_vector);	pc += 4;

		Val result = stk;

		for (int j = 0;  j < n;  j++) {
		    //
		    result = LIST_TAIL(result);
		}

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: LIT(%d) = %#x\n", pc-5, n, LIST_HEAD(result));
		#endif

		LIST_CONS(task, stk, LIST_HEAD(result), stk);
	    }
	    break;

	  case I_VECTOR:
	    {
		int n = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: VECTOR(%d) [", pc-5, n);
		#endif

		if (n == 0) {
		    #ifdef DEBUG_LITERALS
			debug_say("]\n");
		    #endif
		    LIST_CONS(task, stk, ZERO_LENGTH_VECTOR__GLOBAL, stk);
		    break;
		}

		// The space request includes space
		// for the data-chunk header word and
		// the sequence header chunk.
		//
		int space_needed = WORD_BYTESIZE*(n+1+3);
		GC_CHECK;

		// Allocate the data chunk:
		//
		LIB7_AllocWrite(task, 0, MAKE_TAGWORD(n, RO_VECTOR_DATA_BTAG));

		// Top of stack is last element in vector:
		//
		for (int j = n;  j > 0;  j--) {
		    //
		    LIB7_AllocWrite(task, j, LIST_HEAD(stk));

		    stk = LIST_TAIL(stk);
		}

		Val result =  LIB7_Alloc(task, n );

		// Allocate the header chunk:
		//
		SEQHDR_ALLOC(task, result, TYPEAGNOSTIC_RO_VECTOR_TAGWORD, result, n);

		#ifdef DEBUG_LITERALS
		    debug_say("...] @ %#x\n", result);
		#endif

		LIST_CONS(task, stk, result, stk);
	    }
	    break;

	case I_RECORD:
	    {
		int n = GET32(bytecode_vector);	pc += 4;

		#ifdef DEBUG_LITERALS
		    debug_say("[%2d]: RECORD(%d) [", pc-5, n);
		#endif

		if (n == 0) {
		    #ifdef DEBUG_LITERALS
			debug_say("]\n");
		    #endif

		    LIST_CONS(task, stk, HEAP_VOID, stk);
		    break;

		} else {

		    int space_needed = 4*(n+1);
		    GC_CHECK;

		    LIB7_AllocWrite(task, 0, MAKE_TAGWORD(n, PAIRS_AND_RECORDS_BTAG));
		}

		// Top of stack is last element in record:
		//
		for (int j = n;  j > 0;  j--) {
		    //
		    LIB7_AllocWrite(task, j, LIST_HEAD(stk));

		    stk = LIST_TAIL(stk);
		}

		Val result = LIB7_Alloc(task, n );

		#ifdef DEBUG_LITERALS
		    debug_say("...] @ %#x\n", result);
		#endif

		LIST_CONS(task, stk, result, stk);
	    }
	    break;

	case I_RETURN:
	    ASSERT(pc == bytecode_vector_length_in_bytes);

	    #ifdef DEBUG_LITERALS
	        debug_say("[%2d]: RETURN(%#x)\n", pc-5, LIST_HEAD(stk));
	    #endif

	    return  LIST_HEAD( stk );
	    break;

	default:
	    die ("bogus literal opcode #%x @ %d", bytecode_vector[pc-1], pc-1);
	}								// switch
    }									// while
}									// fun make_package_literals_via_bytecode_interpreter