Esempio n. 1
0
static value_t List_remove( PREFUNC, value_t listObj,
	value_t indexObj )
{
	ARGCHECK_2( listObj, indexObj );
	value_t splits = METHOD_1( listObj, sym_partition, indexObj );
	value_t head_list = CALL_1( splits, num_zero );
	value_t tail_list = CALL_1( splits, num_one );
	tail_list = METHOD_0( tail_list, sym_pop );
	return METHOD_1( head_list, sym_concatenate, tail_list );
}
Esempio n. 2
0
static value_t List_insert( PREFUNC, value_t listObj,
	value_t indexObj, value_t value )
{
	ARGCHECK_3( listObj, indexObj, value );
	value_t splits = METHOD_1( listObj, sym_partition, indexObj );
	value_t head_list = CALL_1( splits, num_zero );
	value_t tail_list = CALL_1( splits, num_one );
	tail_list = METHOD_1( tail_list, sym_push, value );
	return METHOD_1( head_list, sym_concatenate, tail_list );
}
Esempio n. 3
0
static value_t List_partition( PREFUNC, value_t wrapper, value_t indexObj)
{
	ARGCHECK_2( wrapper, indexObj );
	value_t listObj = wrapper->slots[LIST_REVERSE_LIST_SLOT];
	indexObj = FlipIndex( zone, listObj, indexObj );
	value_t splits = METHOD_1( listObj, sym_partition, indexObj );
	// We have split the wrapped method - now we need to reverse the resulting
	// lists and return them in opposite order.
	value_t head_list = CALL_1( splits, num_zero );
	value_t tail_list = CALL_1( splits, num_one );
	head_list = METHOD_0( head_list, sym_reverse );
	tail_list = METHOD_0( tail_list, sym_reverse );
	return AllocPair( zone, tail_list, head_list );
}
Esempio n. 4
0
static value_t catch_exception_func( PREFUNC, value_t exp, value_t handler )
{
	ARGCHECK_2( NULL, handler );
	if (IsAnException( exp )) {
		value_t val = exp->slots[0];
		assert( handler );
		exp = CALL_1( handler, val );
	}
	return exp;
}
Esempio n. 5
0
void interactive_kernel_loop() {
    char *buff = (char*) 0x10000;
    uint32_t len = 0x20000 - 0x10000;
    char *decodebuff = (char*) 0x20000;
    uint32_t decodebufflen = 0x30000 - 0x20000;
    int status = 0;
    while (1) {
        status = uart_getln(buff, len);
        if (status == 0) {
            uart_puts(uart_newline);
            if (str_startswith(buff, "b64 ")) {
                uint32_t bytes_decoded = b64_decode(buff+4, decodebuff, decodebufflen);
                uart_puts("base64 decoded #bytes: ");
                char tmp[32];
                uint32_t tmplen = ARR_LEN(tmp);
                uart_puts(str_int_to_str(bytes_decoded, tmp, tmplen));
                uart_puts(uart_newline);
                // Copy the code of bootstrap_decoded_binary somewhere safe.
                uint32_t func_len = 64; // wild guess
                mem_cpy((uint32_t)bootstrap_decoded_binary, 0x30000, func_len);
                // Call bootstrap_decoded_binary from that safe location
                BRANCHTO(0x30000);
            } else if (str_startswith(buff, "m ")) {
                inspect_memory(buff+2);
            } else if (str_startswith(buff, "r ")) {
                inspect_reg(buff+2);
            } else if (str_startswith(buff, "icky")) {
                uart_puts(yoo);
            } else if (str_startswith(buff, "usr0")) {
                if (pr0) {
                    switch_to_user_process(pr0);
                }
            } else if (str_startswith(buff, "freloc")) {
                char tmp[32];
                uint32_t tmplen = ARR_LEN(tmp);
                uint32_t func_len = ((uint32_t) str_parse_int) - ((uint32_t) str_len);
                mem_cpy((uint32_t)str_len, 0x30000, func_len);
                uart_puts(str_int_to_str(CALL_1(0x30000, "xyz"), tmp, tmplen));
            } else if (str_startswith(buff, "version")) {
                uart_puts(version);
                uart_puts("\r\n");
            } else {
                int strlen = str_len(buff) - 1;
                int j = 0;
                for (; strlen != -1; --strlen, ++j) {
                    decodebuff[j] = buff[strlen];
                }
                decodebuff[j] = 0;
                uart_puts(decodebuff);
            }
        }
        uart_puts(uart_newline);
    }
}
Esempio n. 6
0
static value_t List_constructor( PREFUNC, value_t exp )
{
	ARGCHECK_1( exp );
	// This intrinsic function represents the brackets syntax for list
	// construction. Our exp is whatever was enclosed in the brackets.
	// This ought to be a tuple, or something which looks like one; each
	// element of the tuple will be a new entry in the array.

	value_t out = &list_empty;
	if (exp) {
		value_t item_count = METHOD_0( exp, sym_size );
		if (IsAnException( item_count )) return item_count;
		unsigned int items = IntFromFixint( item_count );
		for (unsigned int i = 0; i < items; i++) {
			value_t item = CALL_1( exp, NumberFromInt( zone, i ) );
			if (IsAnException( item )) return item;
			out = METHOD_1( out, sym_append, item );
		}
	}
	return out;
}
Esempio n. 7
0
static value_t List_partition( PREFUNC, value_t listObj, value_t indexObj )
{
	ARGCHECK_2( listObj, indexObj );
	value_t head_chunk = listObj->slots[LIST_HEAD_CHUNK_SLOT];
	value_t cold_storage = listObj->slots[LIST_COLD_STORAGE_SLOT];
	value_t tail_chunk = listObj->slots[LIST_TAIL_CHUNK_SLOT];

	int index = IntFromFixint( indexObj );
	int size = IntFromFixint( METHOD_0( listObj, sym_size ) );
	if (index < 0 || index > size) {
		return ThrowCStr( zone, "index out of bounds" );
	}

	// Our job is to partition the list into a head list and a tail list, where
	// the head does not contain the leaf value identified by the index, and
	// the first item in the tail does contain that leaf value. Items may not
	// be the same as leaves, because we may actually be an inner list serving
	// as cold storage for some outer list; all we need to do is get as close
	// as we can to
	value_t head_out = &list_empty;
	value_t tail_out = &list_empty;

	// If the index comes before the cold storage, we will call the list "tail"
	// and pop items from it onto a new, empty "head" list until the next pop
	// would include the requested leaf item onto the head.
	if (index < chunk_leaf_count( head_chunk )) {
		tail_out = listObj;
		while (index > 0) {
			value_t item = METHOD_0( tail_out, sym_head );
			int old_tail_size = IntFromFixint( METHOD_0( tail_out, sym_size ) );
			value_t temp_tail = METHOD_0( tail_out, sym_pop );
			int new_tail_size = IntFromFixint( METHOD_0( temp_tail, sym_size ) );
			int leaf_count = new_tail_size - old_tail_size;
			if (leaf_count <= index) {
				head_out = METHOD_1( head_out, sym_append, item );
				tail_out = temp_tail;
			}
			index -= leaf_count;
		}
	}

	// If the index comes after the cold storage, we will call the current list
	// "head" and chop items from it, pushing them onto a new, empty "tail"
	// list, until the tail contains the index item.
	else if (size - index <= chunk_leaf_count( tail_chunk )) {
		head_out = listObj;
		int target = size - index;
		while (IntFromFixint( METHOD_0( tail_out, sym_size ) ) < target) {
			value_t item = METHOD_0( head_out, sym_tail );
			head_out = METHOD_0( head_out, sym_chop );
			tail_out = METHOD_1( tail_out, sym_push, item );
		}
	}

	// If the leaf index lives in neither the head nor the tail chunk, it must
	// live in cold storage. We will split cold storage in half and use the two
	// new halves as our output lists.
	else {
		index -= chunk_leaf_count( head_chunk );
		indexObj = NumberFromInt( zone, index );
		value_t splits = METHOD_1( cold_storage, sym_partition, indexObj );
		value_t cold_head = CALL_1( splits, num_zero );
		value_t cold_tail = CALL_1( splits, num_one );

		// Make the output head list. We will use our head chunk and the head
		// portion of the cold list, which of course contains chunks. If the
		// cold_head is non-empty, we will simply pull our tail chunk from it.
		// Otherwise, we will have to turn the head chunk into its own list.
		if (BoolFromBoolean( zone, METHOD_0( cold_head, sym_is_empty ) )) {
			head_out = listify_chunk( zone, head_chunk );
		}
		else {
			value_t new_tail_chunk = METHOD_0( cold_head, sym_tail );
			cold_head = METHOD_0( cold_head, sym_chop );
			head_out = alloc_list( zone, head_chunk, cold_head, new_tail_chunk );
		}

		// Make the output tail list. We will combine the tail half of the
		// newly-partitioned cold storage list with our tail chunk. If the
		// cold tail is not empty, we will pop its first chunk as our new list
		// head chunk; otherwise we will turn our tail chunk into its own list.
		if (BoolFromBoolean( zone, METHOD_0( cold_tail, sym_is_empty ) )) {
			tail_out = listify_chunk( zone, tail_chunk );
		}
		else {
			value_t new_head_chunk = METHOD_0( cold_tail, sym_head );
			cold_tail = METHOD_0( cold_tail, sym_pop );
			tail_out = alloc_list( zone, new_head_chunk, cold_tail, tail_chunk );
		}

		// We've done the rough cut of the partition, but there may be a few
		// stragglers on the tail list, since our cold storage works in terms
		// of chunks and not of leaves. Move items from the head of our tail
		// list to the tail of our head list, until moving one more item would
		// put our index leaf onto the head list. We don't know, at this level,
		// whether we are moving leaves or chunks, so we will just have to try
		// it and see what we get.
		while (true) {
			value_t item = METHOD_0( tail_out, sym_head );
			value_t maybe_head = METHOD_1( head_out, sym_append, item );
			if (IntFromFixint( METHOD_0( maybe_head, sym_size ) ) > index) {
				break;
			}
			head_out = maybe_head;
			tail_out = METHOD_0( tail_out, sym_pop );
		}
	}

	// Assemble the head & tail lists we have created into a tuple, since that
	// is the idiom for returning multiple values from a function.
	return AllocPair( zone, head_out, tail_out );
}