/* * ======== un_register_gppsm_seg ======== * Purpose: * UnRegister the SM allocator by freeing all its resources and * nulling cmm mgr table entry. * Note: * This routine is always called within cmm lock crit sect. */ static void un_register_gppsm_seg(struct cmm_allocator *psma) { struct cmm_mnode *mnode_obj = NULL; struct cmm_mnode *next_node = NULL; DBC_REQUIRE(psma != NULL); if (psma->free_list_head != NULL) { /* free nodes on free list */ mnode_obj = (struct cmm_mnode *)lst_first(psma->free_list_head); while (mnode_obj) { next_node = (struct cmm_mnode *)lst_next(psma->free_list_head, (struct list_head *) mnode_obj); lst_remove_elem(psma->free_list_head, (struct list_head *)mnode_obj); kfree((void *)mnode_obj); /* next node. */ mnode_obj = next_node; } kfree(psma->free_list_head); /* delete freelist */ /* free nodes on InUse list */ mnode_obj = (struct cmm_mnode *)lst_first(psma->in_use_list_head); while (mnode_obj) { next_node = (struct cmm_mnode *)lst_next(psma->in_use_list_head, (struct list_head *) mnode_obj); lst_remove_elem(psma->in_use_list_head, (struct list_head *)mnode_obj); kfree((void *)mnode_obj); /* next node. */ mnode_obj = next_node; } kfree(psma->in_use_list_head); /* delete InUse list */ } if ((void *)psma->dw_vm_base != NULL) MEM_UNMAP_LINEAR_ADDRESS((void *)psma->dw_vm_base); /* Free allocator itself */ kfree(psma); }
/* * ======== write_ext_dsp_data ======== * purpose: * Copies buffers to the external memory. * */ int write_ext_dsp_data(struct bridge_dev_context *dev_context, u8 *host_buff, u32 dsp_addr, u32 ul_num_bytes, u32 mem_type, bool dynamic_load) { u32 dw_base_addr = dev_context->dsp_ext_base_addr; u32 dw_offset = 0; u8 temp_byte1, temp_byte2; u8 remain_byte[4]; s32 i; int ret = 0; u32 dw_ext_prog_virt_mem; u32 ul_tlb_base_virt = 0; u32 ul_shm_offset_virt = 0; struct cfg_hostres *host_res = dev_context->resources; bool trace_load = false; temp_byte1 = 0x0; temp_byte2 = 0x0; if (symbols_reloaded) { /* Check if it is a load to Trace section */ ret = dev_get_symbol(dev_context->dev_obj, DSP_TRACESEC_BEG, &ul_trace_sec_beg); if (!ret) ret = dev_get_symbol(dev_context->dev_obj, DSP_TRACESEC_END, &ul_trace_sec_end); } if (!ret) { if ((dsp_addr <= ul_trace_sec_end) && (dsp_addr >= ul_trace_sec_beg)) trace_load = true; } /* If dynamic, force remap/unmap */ if ((dynamic_load || trace_load) && dw_base_addr) { dw_base_addr = 0; MEM_UNMAP_LINEAR_ADDRESS((void *) dev_context->dsp_ext_base_addr); dev_context->dsp_ext_base_addr = 0x0; } if (!dw_base_addr) { if (symbols_reloaded) /* Get SHM_BEG EXT_BEG and EXT_END. */ ret = dev_get_symbol(dev_context->dev_obj, SHMBASENAME, &ul_shm_base_virt); if (dynamic_load) { if (!ret) { if (symbols_reloaded) ret = dev_get_symbol (dev_context->dev_obj, DYNEXTBASE, &ul_ext_base); } if (!ret) { /* DR OMAPS00013235 : DLModules array may be * in EXTMEM. It is expected that DYNEXTMEM and * EXTMEM are contiguous, so checking for the * upper bound at EXTEND should be Ok. */ if (symbols_reloaded) ret = dev_get_symbol (dev_context->dev_obj, EXTEND, &ul_ext_end); } } else { if (symbols_reloaded) { if (!ret) ret = dev_get_symbol (dev_context->dev_obj, EXTBASE, &ul_ext_base); if (!ret) ret = dev_get_symbol (dev_context->dev_obj, EXTEND, &ul_ext_end); } } /* Trace buffer it right after the shm SEG0, so set the * base address to SHMBASE */ if (trace_load) ul_ext_base = ul_shm_base_virt; if (ul_ext_end < ul_ext_base) ret = -EPERM; if (!ret) { ul_tlb_base_virt = dev_context->atlb_entry[0].dsp_va * DSPWORDSIZE; if (symbols_reloaded) { ret = dev_get_symbol (dev_context->dev_obj, DSP_TRACESEC_END, &shm0_end); if (!ret) { ret = dev_get_symbol (dev_context->dev_obj, DYNEXTBASE, &ul_dyn_ext_base); } } ul_shm_offset_virt = ul_shm_base_virt - ul_tlb_base_virt; if (trace_load) { dw_ext_prog_virt_mem = dev_context->atlb_entry[0].gpp_va; } else { dw_ext_prog_virt_mem = host_res->mem_base[1]; dw_ext_prog_virt_mem += (ul_ext_base - ul_dyn_ext_base); } dev_context->dsp_ext_base_addr = (u32) MEM_LINEAR_ADDRESS((void *) dw_ext_prog_virt_mem, ul_ext_end - ul_ext_base); dw_base_addr += dev_context->dsp_ext_base_addr; /* This dsp_ext_base_addr will get cleared only when * the board is stopped. */ if (!dev_context->dsp_ext_base_addr) ret = -EPERM; } } if (!dw_base_addr || !ul_ext_base || !ul_ext_end) ret = -EPERM; if (!ret) { for (i = 0; i < 4; i++) remain_byte[i] = 0x0; dw_offset = dsp_addr - ul_ext_base; /* Also make sure the dsp_addr is < ul_ext_end */ if (dsp_addr > ul_ext_end || dw_offset > dsp_addr) ret = -EPERM; } if (!ret) { if (ul_num_bytes) memcpy((u8 *) dw_base_addr + dw_offset, host_buff, ul_num_bytes); else *((u32 *) host_buff) = dw_base_addr + dw_offset; } /* Unmap here to force remap for other Ext loads */ if ((dynamic_load || trace_load) && dev_context->dsp_ext_base_addr) { MEM_UNMAP_LINEAR_ADDRESS((void *) dev_context->dsp_ext_base_addr); dev_context->dsp_ext_base_addr = 0x0; } symbols_reloaded = false; return ret; }
int write_ext_dsp_data(struct bridge_dev_context *dev_context, u8 *host_buff, u32 dsp_addr, u32 ul_num_bytes, u32 mem_type, bool dynamic_load) { u32 dw_base_addr = dev_context->dsp_ext_base_addr; u32 dw_offset = 0; u8 temp_byte1, temp_byte2; u8 remain_byte[4]; s32 i; int ret = 0; u32 dw_ext_prog_virt_mem; u32 ul_tlb_base_virt = 0; u32 ul_shm_offset_virt = 0; struct cfg_hostres *host_res = dev_context->resources; bool trace_load = false; temp_byte1 = 0x0; temp_byte2 = 0x0; if (symbols_reloaded) { ret = dev_get_symbol(dev_context->dev_obj, DSP_TRACESEC_BEG, &ul_trace_sec_beg); if (!ret) ret = dev_get_symbol(dev_context->dev_obj, DSP_TRACESEC_END, &ul_trace_sec_end); } if (!ret) { if ((dsp_addr <= ul_trace_sec_end) && (dsp_addr >= ul_trace_sec_beg)) trace_load = true; } if ((dynamic_load || trace_load) && dw_base_addr) { dw_base_addr = 0; MEM_UNMAP_LINEAR_ADDRESS((void *) dev_context->dsp_ext_base_addr); dev_context->dsp_ext_base_addr = 0x0; } if (!dw_base_addr) { if (symbols_reloaded) ret = dev_get_symbol(dev_context->dev_obj, SHMBASENAME, &ul_shm_base_virt); if (dynamic_load) { if (!ret) { if (symbols_reloaded) ret = dev_get_symbol (dev_context->dev_obj, DYNEXTBASE, &ul_ext_base); } if (!ret) { if (symbols_reloaded) ret = dev_get_symbol (dev_context->dev_obj, EXTEND, &ul_ext_end); } } else { if (symbols_reloaded) { if (!ret) ret = dev_get_symbol (dev_context->dev_obj, EXTBASE, &ul_ext_base); if (!ret) ret = dev_get_symbol (dev_context->dev_obj, EXTEND, &ul_ext_end); } } if (trace_load) ul_ext_base = ul_shm_base_virt; if (ul_ext_end < ul_ext_base) ret = -EPERM; if (!ret) { ul_tlb_base_virt = dev_context->atlb_entry[0].dsp_va * DSPWORDSIZE; if (symbols_reloaded) { ret = dev_get_symbol (dev_context->dev_obj, DSP_TRACESEC_END, &shm0_end); if (!ret) { ret = dev_get_symbol (dev_context->dev_obj, DYNEXTBASE, &ul_dyn_ext_base); } } ul_shm_offset_virt = ul_shm_base_virt - ul_tlb_base_virt; if (trace_load) { dw_ext_prog_virt_mem = dev_context->atlb_entry[0].gpp_va; } else { dw_ext_prog_virt_mem = host_res->mem_base[1]; dw_ext_prog_virt_mem += (ul_ext_base - ul_dyn_ext_base); } dev_context->dsp_ext_base_addr = (u32) MEM_LINEAR_ADDRESS((void *) dw_ext_prog_virt_mem, ul_ext_end - ul_ext_base); dw_base_addr += dev_context->dsp_ext_base_addr; if (!dev_context->dsp_ext_base_addr) ret = -EPERM; } } if (!dw_base_addr || !ul_ext_base || !ul_ext_end) ret = -EPERM; if (!ret) { for (i = 0; i < 4; i++) remain_byte[i] = 0x0; dw_offset = dsp_addr - ul_ext_base; if (dsp_addr > ul_ext_end || dw_offset > dsp_addr) ret = -EPERM; } if (!ret) { if (ul_num_bytes) memcpy((u8 *) dw_base_addr + dw_offset, host_buff, ul_num_bytes); else *((u32 *) host_buff) = dw_base_addr + dw_offset; } if ((dynamic_load || trace_load) && dev_context->dsp_ext_base_addr) { MEM_UNMAP_LINEAR_ADDRESS((void *) dev_context->dsp_ext_base_addr); dev_context->dsp_ext_base_addr = 0x0; } symbols_reloaded = false; return ret; }