void collection_remove(collection_t* collection, void* item) { if (collection && collection->head) { if (collection->head->item == item) { collection_pop(collection); return; } collection_node_t *p = collection->head; while (p->next) { if (p->next->item == item) { collection_node_t* node = p->next; p->next = node->next; collection->count--; _remove_node(collection, node); return; } p = p->next; } } }
module& module::remove_node(node* tar_node) { int i; int input_num = tar_node->get_input_num(); int output_num = tar_node->get_output_num(); string node_name; if (tar_node == NULL) throw exception("NULL node specified. (remove_node)"); node_name = tar_node->get_node_name(); for (i = 0; i<input_num; i++) { if (tar_node->get_input_net(i) != NULL) throw exception((node_name + " node to remove with input net connected. (remove_node)").c_str()); } for (i = 0; i<output_num; i++) { if (tar_node->get_output_net(i) != NULL) throw exception((node_name + "node to remove with output net connected. (remove_node)").c_str()); } if (!_remove_node(tar_node)) throw exception((node_name + "node to remove not founded. (remove_node)").c_str()); Record new_record; new_record.op_type = RG; new_record.op_info.node_ref = tar_node; lRecordLst.push_back(new_record); return *this; }
void* collection_pop(collection_t* collection) { if (collection && collection->head) { collection_node_t* head = collection->head; collection->head = head->next; collection->count--; _remove_node(collection, head); return head->item; } return 0; }
int ptree_inorder_walk_remove(ptree_node_t **rootp, void **oldval, void *pn, int (*cmp)(const void *v1, const void *v2)) { assert(pn); if (!pn) return 0; #ifdef VERIFY_TREE assert(_verify_tree(rootp, cmp)); #endif _remove_node(rootp, pn, oldval); #ifdef VERIFY_TREE assert(_verify_tree(rootp, cmp)); #endif return 1; }
void _utspace_split_free(allocman_t *alloc, void *_split, seL4_Word cookie, size_t size_bits) { utspace_split_t *split = (utspace_split_t*)_split; struct utspace_split_node *node = (struct utspace_split_node*)cookie; struct utspace_split_node *parent = node->parent; /* see if our sibling is also free */ if (parent && !node->sibling->head) { /* remove sibling from free list */ _remove_node(node->sibling->origin_head, node->sibling); /* delete both of us */ _delete_node(alloc, node->sibling); _delete_node(alloc, node); /* put the parent back in */ _utspace_split_free(alloc, split, (seL4_Word) parent, size_bits + 1); } else { /* just put ourselves back in */ _insert_node(node->head, node); } }
t_node *_input_isbackspace(t_node *list_start, int input, int *current, int *longest_arg) { if (input == KEY_BACKSPACE || input == 127) { if ((list_start = _remove_node(list_start, *current)) == NULL) { if (clear() == ERR || endwin() == ERR) my_exit(EXIT_FAILURE, "Failed to exit ncurses window!\n"); return (NULL); } if (*current > _count_elems_list(list_start)) *current = _count_elems_list(list_start); *longest_arg = _find_longest_arg(list_start); } return (list_start); }
void _utspace_trickle_free(struct allocman *alloc, void *_trickle, uint32_t cookie, uint32_t size_bits) { utspace_trickle_t *trickle = (utspace_trickle_t*)_trickle; struct utspace_trickle_node *node = _cookie_to_node(cookie); uint32_t offset = _cookie_to_offset(cookie); int in_list = !(node->bitmap == 0); node->bitmap |= BIT(31 - offset); if (node->bitmap == _make_bitmap(node->bitmap_bits)) { if (node->parent_cookie) { if (in_list) { _remove_node(&trickle->heads[size_bits], node); } _utspace_trickle_free(alloc, trickle, node->parent_cookie, size_bits + node->bitmap_bits - 1); _free_node(alloc, node); } else if (!in_list) { _insert_node(&trickle->heads[size_bits], node); } } else if (!in_list) { _insert_node(&trickle->heads[size_bits], node); } }
uint32_t _utspace_trickle_alloc(struct allocman *alloc, void *_trickle, uint32_t size_bits, seL4_Word type, cspacepath_t *slot, int *error) { uint32_t sel4_size_bits; int _error; utspace_trickle_t *trickle = (utspace_trickle_t*)_trickle; struct utspace_trickle_node *node; uint32_t offset; uint32_t mem_offset; /* get size of untyped call */ sel4_size_bits = get_sel4_object_size(type, size_bits); if (size_bits != vka_get_object_size(type, sel4_size_bits) || size_bits == 0) { SET_ERROR(error, 1); return 0; } assert(size_bits < 32); if (!trickle->heads[size_bits]) { _error = _refill_pool(alloc, trickle, size_bits); if (_error) { SET_ERROR(error, _error); return 0; } } node = trickle->heads[size_bits]; offset = CLZ(node->bitmap); mem_offset = node->offset + (offset << size_bits); if (slot) { _error = seL4_Untyped_RetypeAtOffset(node->ut->capPtr, type, mem_offset, sel4_size_bits, slot->root, slot->dest, slot->destDepth, slot->offset, 1); if (_error != seL4_NoError) { SET_ERROR(error, 1); return 0; } } node->bitmap &= ~BIT(31 - offset); if (node->bitmap == 0) { _remove_node(&trickle->heads[size_bits], node); } SET_ERROR(error, 0); return _make_cookie(node, offset); }
int ptree_remove(void *v, ptree_node_t **rootp, int (*cmp)(const void *v1, const void *v2), void **oldval) { struct ptree_node *cur; #ifdef VERIFY_TREE assert(_verify_tree(rootp, cmp)); #endif cur = *rootp; if (_walk_to(v, &cur, NULL, cmp) != 0) return 0; _remove_node(rootp, cur, oldval); #ifdef VERIFY_TREE assert(_verify_tree(rootp, cmp)); #endif return 1; }
uint32_t _utspace_split_alloc(allocman_t *alloc, void *_split, uint32_t size_bits, seL4_Word type, cspacepath_t *slot, int *error) { utspace_split_t *split = (utspace_split_t*)_split; uint32_t sel4_size_bits; int sel4_error; struct utspace_split_node *node; /* get size of untyped call */ sel4_size_bits = get_sel4_object_size(type, size_bits); if (size_bits != vka_get_object_size(type, sel4_size_bits) || size_bits == 0) { SET_ERROR(error, 1); return 0; } /* make sure we have an available untyped */ if (_refill_pool(alloc, split, size_bits)) { /* out of memory? */ SET_ERROR(error, 1); return 0; } /* use the first node for lack of a better one */ node = split->heads[size_bits]; /* Perform the untyped retype */ #if defined(CONFIG_KERNEL_STABLE) sel4_error = seL4_Untyped_RetypeAtOffset(node->ut.capPtr, type, 0, sel4_size_bits, slot->root, slot->dest, slot->destDepth, slot->offset, 1); #else sel4_error = seL4_Untyped_Retype(node->ut.capPtr, type, sel4_size_bits, slot->root, slot->dest, slot->destDepth, slot->offset, 1); #endif if (sel4_error != seL4_NoError) { /* Well this shouldn't happen */ SET_ERROR(error, 1); return 0; } /* remove the node */ _remove_node(&split->heads[size_bits], node); SET_ERROR(error, 0); /* return the node as a cookie */ return (uint32_t)node; }
seL4_Word _utspace_split_alloc(allocman_t *alloc, void *_split, size_t size_bits, seL4_Word type, const cspacepath_t *slot, uintptr_t paddr, bool canBeDev, int *error) { utspace_split_t *split = (utspace_split_t*)_split; size_t sel4_size_bits; int sel4_error; struct utspace_split_node *node; /* get size of untyped call */ sel4_size_bits = get_sel4_object_size(type, size_bits); if (size_bits != vka_get_object_size(type, sel4_size_bits) || size_bits == 0) { SET_ERROR(error, 1); return 0; } struct utspace_split_node **head = NULL; /* if we're allocating at a particular paddr then we will just trawl through every pool * and see if we can find out which one has what we want */ if (paddr != ALLOCMAN_NO_PADDR) { if (canBeDev) { head = find_head_for_paddr(split->dev_heads, paddr, size_bits); if (!head) { head = find_head_for_paddr(split->dev_mem_heads, paddr, size_bits); } } if (!head) { head = find_head_for_paddr(split->heads, paddr, size_bits); } if (!head) { SET_ERROR(error, 1); ZF_LOGE("Failed to find any untyped capable of creating an object at address %p", (void*)paddr); return 0; } if (_refill_pool(alloc, split, head, size_bits, paddr)) { /* out of memory? */ SET_ERROR(error, 1); ZF_LOGV("Failed to refill pool to allocate object of size %zu", size_bits); return 0; } /* search for the node we want to use. We have the advantage of knowing that * due to objects being size aligned that the base paddr of the untyped will * be exactly the paddr we want */ for (node = head[size_bits]; node && node->paddr != paddr; node = node->next); /* _refill_pool should not have returned if this wasn't possible */ assert(node); } else { /* if we can use device memory then preference allocating from there */ if (canBeDev) { if (_refill_pool(alloc, split, split->dev_mem_heads, size_bits, ALLOCMAN_NO_PADDR)) { /* out of memory? */ SET_ERROR(error, 1); ZF_LOGV("Failed to refill pool to allocate object of size %zu", size_bits); return 0; } head = split->dev_mem_heads; } if (!head) { head = split->heads; if (_refill_pool(alloc, split, head, size_bits, ALLOCMAN_NO_PADDR)) { /* out of memory? */ SET_ERROR(error, 1); ZF_LOGV("Failed to refill pool to allocate object of size %zu", size_bits); return 0; } } /* use the first node for lack of a better one */ node = head[size_bits]; } /* Perform the untyped retype */ sel4_error = seL4_Untyped_Retype(node->ut.capPtr, type, sel4_size_bits, slot->root, slot->dest, slot->destDepth, slot->offset, 1); if (sel4_error != seL4_NoError) { /* Well this shouldn't happen */ ZF_LOGE("Failed to retype untyped, error %d\n", sel4_error); SET_ERROR(error, 1); return 0; } /* remove the node */ _remove_node(&head[size_bits], node); SET_ERROR(error, 0); /* return the node as a cookie */ return (seL4_Word)node; }
static int _refill_pool(allocman_t *alloc, utspace_split_t *split, struct utspace_split_node **heads, size_t size_bits, uintptr_t paddr) { struct utspace_split_node *node; struct utspace_split_node *left, *right; int sel4_error; if (paddr == ALLOCMAN_NO_PADDR) { /* see if pool is actually empty */ if (heads[size_bits]) { return 0; } } else { /* see if the pool has the paddr we want */ for (node = heads[size_bits]; node; node = node->next) { if (node->paddr <= paddr && paddr < node->paddr + BIT(size_bits)) { return 0; } } } /* ensure we are not the highest pool */ if (size_bits >= sizeof(seL4_Word) * 8 - 2) { /* bugger, no untypeds bigger than us */ ZF_LOGV("Failed to refill pool of size %zu, no larger pools", size_bits); return 1; } /* get something from the highest pool */ if (_refill_pool(alloc, split, heads, size_bits + 1, paddr)) { /* could not fill higher pool */ ZF_LOGV("Failed to refill pool of size %zu", size_bits); return 1; } if (paddr == ALLOCMAN_NO_PADDR) { /* use the first node for lack of a better one */ node = heads[size_bits + 1]; } else { for (node = heads[size_bits + 1]; node && !(node->paddr <= paddr && paddr < node->paddr + BIT(size_bits + 1)); node = node->next); /* _refill_pool should not have returned if this wasn't possible */ assert(node); } /* allocate two new nodes */ left = _new_node(alloc); if (!left) { ZF_LOGV("Failed to allocate left node"); return 1; } right = _new_node(alloc); if (!right) { ZF_LOGV("Failed to allocate right node"); _delete_node(alloc, left); return 1; } /* perform the first retype */ sel4_error = seL4_Untyped_Retype(node->ut.capPtr, seL4_UntypedObject, size_bits, left->ut.root, left->ut.dest, left->ut.destDepth, left->ut.offset, 1); if (sel4_error != seL4_NoError) { _delete_node(alloc, left); _delete_node(alloc, right); /* Well this shouldn't happen */ ZF_LOGE("Failed to retype untyped, error %d\n", sel4_error); return 1; } /* perform the second retype */ sel4_error = seL4_Untyped_Retype(node->ut.capPtr, seL4_UntypedObject, size_bits, right->ut.root, right->ut.dest, right->ut.destDepth, right->ut.offset, 1); if (sel4_error != seL4_NoError) { vka_cnode_delete(&left->ut); _delete_node(alloc, left); _delete_node(alloc, right); /* Well this shouldn't happen */ ZF_LOGE("Failed to retype untyped, error %d\n", sel4_error); return 1; } /* all is done. remove the parent and insert the children */ _remove_node(&heads[size_bits + 1], node); left->parent = right->parent = node; left->sibling = right; left->origin_head = &heads[size_bits]; right->origin_head = &heads[size_bits]; right->sibling = left; if (node->paddr != ALLOCMAN_NO_PADDR) { left->paddr = node->paddr; right->paddr = node->paddr + BIT(size_bits); } else { left->paddr = right->paddr = ALLOCMAN_NO_PADDR; } /* insert in this order so that we end up pulling the untypeds off in order of contiugous * physical address. This makes various allocation problems slightly less likely to happen */ _insert_node(&heads[size_bits], right); _insert_node(&heads[size_bits], left); return 0; }
static int _refill_pool(allocman_t *alloc, utspace_split_t *split, uint32_t size_bits) { struct utspace_split_node *node; struct utspace_split_node *left, *right; int sel4_error; /* see if pool is actually empty */ if (split->heads[size_bits]) { return 0; } /* ensure we are not the highest pool */ if (size_bits >= 30) { /* bugger, no untypeds bigger than us */ return 1; } /* get something from the highest pool */ if (_refill_pool(alloc, split, size_bits + 1)) { /* could not fill higher pool */ return 1; } /* use the first node for lack of a better one */ node = split->heads[size_bits + 1]; /* allocate two new nodes */ left = _new_node(alloc); if (!left) { return 1; } right = _new_node(alloc); if (!right) { _delete_node(alloc, left); return 1; } /* perform the first retype */ #if defined(CONFIG_KERNEL_STABLE) sel4_error = seL4_Untyped_RetypeAtOffset(node->ut.capPtr, seL4_UntypedObject, 0, size_bits, left->ut.root, left->ut.dest, left->ut.destDepth, left->ut.offset, 1); #else sel4_error = seL4_Untyped_Retype(node->ut.capPtr, seL4_UntypedObject, size_bits, left->ut.root, left->ut.dest, left->ut.destDepth, left->ut.offset, 1); #endif if (sel4_error != seL4_NoError) { _delete_node(alloc, left); _delete_node(alloc, right); /* Well this shouldn't happen */ return 1; } /* perform the second retype */ #if defined(CONFIG_KERNEL_STABLE) sel4_error = seL4_Untyped_RetypeAtOffset(node->ut.capPtr, seL4_UntypedObject, BIT(size_bits), size_bits, right->ut.root, right->ut.dest, right->ut.destDepth, right->ut.offset, 1); #else sel4_error = seL4_Untyped_Retype(node->ut.capPtr, seL4_UntypedObject, size_bits, right->ut.root, right->ut.dest, right->ut.destDepth, right->ut.offset, 1); #endif if (sel4_error != seL4_NoError) { vka_cnode_delete(&left->ut); _delete_node(alloc, left); _delete_node(alloc, right); /* Well this shouldn't happen */ return 1; } /* all is done. remove the parent and insert the children */ _remove_node(&split->heads[size_bits + 1], node); left->parent = right->parent = node; left->sibling = right; right->sibling = left; if (node->paddr) { left->paddr = node->paddr; right->paddr = node->paddr + BIT(size_bits); } else { left->paddr = right->paddr = 0; } /* insert in this order so that we end up pulling the untypeds off in order of contiugous * physical address. This makes various allocation problems slightly less likely to happen */ _insert_node(&split->heads[size_bits], right); _insert_node(&split->heads[size_bits], left); return 0; }