void ccci_md_mem_reserve(void) { void* ptr = NULL; int tmp_md_id = MD_SYS1; parse_ccci_dfo_setting(NULL, 0); #if defined(CONFIG_MTK_ENABLE_MD1) ptr = (void*)arm_memblock_steal(modem_size_list[tmp_md_id], SZ_32M); if(ptr) { md_resv_mem_addr[tmp_md_id] =(unsigned int) ptr; printk("[ccci/ctl] (1)md mem reserve successfully,ptr=%p,size=%d\n",ptr,modem_size_list[tmp_md_id]); }else{ printk("[ccci/ctl] (1)md mem reserve fail.\n"); md_resv_mem_addr[tmp_md_id] =0; } tmp_md_id = MD_SYS2; ptr = NULL; #endif // MTK_ENABLE_MD1 #if defined(MTK_ENABLE_MD2) ptr = (void*)arm_memblock_steal(modem_size_list[tmp_md_id], SZ_32M); if(ptr) { md_resv_mem_addr[tmp_md_id] =(unsigned int) ptr; printk("[ccci/ctl] (2)md mem reserve successfully,ptr=%p,size=%d\n",ptr,modem_size_list[tmp_md_id]); }else{ printk("[ccci/ctl] (2)md mem reserve fail.\n"); md_resv_mem_addr[tmp_md_id] =0; } #endif // MTK_ENABLE_MD2 }
void eemcs_memory_reserve(void){ unsigned int md5_en; parse_eemcs_dfo_setting(NULL, 0); if( (ext_md_usage_case&MD5_EN)== MD5_EN) { //Only MD1 enabled md5_en = 1; ext_md_mem_addr[MD_SYS5-MD_EXT1] = (unsigned int)arm_memblock_steal(ext_md_size_list[MD_SYS5-MD_EXT1], SZ_32M); } else { // No MD is enabled md5_en = 0; ext_md_mem_addr[MD_SYS5-MD_EXT1] = 0; } if ( (ext_md_mem_addr[MD_SYS5-MD_EXT1]&(32*1024*1024 - 1)) != 0 ) printk("[EEMCS/PLAT] md5 memory addr is not 32M align!!!\n"); printk("[EEMCS/PLAT] EN(%d):MemBase(%p)\n", md5_en, ext_md_mem_addr); printk("[EEMCS/PLAT] (0)MemStart(0x%08X):MemSize(0x%08X)\n", \ ext_md_mem_addr[MD_SYS5-MD_EXT1], ext_md_size_list[MD_SYS5-MD_EXT1]); //If meta set before dfo parsing. if(meta_ext_md_support[MD_SYS5-MD_EXT1]){ ext_md_support[MD_SYS5-MD_EXT1] = meta_ext_md_support[MD_SYS5-MD_EXT1]; printk("[EEMCS/PLAT] (0) set md%d type with meta value:%d\n",MD_SYS5+1, meta_ext_md_support[MD_SYS5-MD_EXT1]); meta_ext_md_support[MD_SYS5-MD_EXT1]=0; } // Setting }
void ccci_md_mem_reserve(void) { int reserved_size = 0; phys_addr_t ptr = 0; int i; CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===1.\n"); #if defined(CONFIG_OF) lk_meta_tag_info_collect(); CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===2.\n"); #if defined(FEATURE_DFO_EN) // DFO enable and using device tree lk_dfo_tag_info_collect(); #endif #endif CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===3.\n"); // Get MD memory requirements collect_md_settings(); CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===4.\n"); // For internal MD for(i=0; i<4; i++) {// 0~3 for internal if(modem_size_list[i] == 0) continue; reserved_size = ALIGN(modem_size_list[MD_SYS1+i], SZ_2M); memblock_set_current_limit(0xFFFFFFFF); #ifdef CONFIG_ARM64 ptr = arm64_memblock_steal(reserved_size, CCCI_MEM_ALIGN); #else ptr = arm_memblock_steal(reserved_size, CCCI_MEM_ALIGN); #endif memblock_set_current_limit(MEMBLOCK_ALLOC_ANYWHERE); if(ptr) { md_resv_mem_list[i] = ptr; CCCI_UTIL_INF_MSG("md%d mem reserve successfully, ptr=0x%pa, size=0x%x\n", i+1, &ptr, reserved_size); }else{ CCCI_UTIL_INF_MSG("md%d mem reserve fail.\n", i+1); } } #if 0//def CONFIG_ARM64 memblock_set_current_limit(0xFFFFFFFF); ptr = arm64_memblock_steal(90*1024*1024, CCCI_MEM_ALIGN); md_resv_mem_list[0] = ptr; ptr = arm64_memblock_steal(32*1024*1024, CCCI_MEM_ALIGN); md_resv_mem_list[1] = ptr; memblock_set_current_limit(MEMBLOCK_ALLOC_ANYWHERE); #endif // Parse META setting ccci_parse_meta_md_setting(md_info_tag_val); CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===5.\n"); // Calculate memory layout cal_md_mem_setting(MEM_LAY_OUT_VER); CCCI_UTIL_INF_MSG("ccci_md_mem_reserve===6.\n"); }
/* Allocate the memory to save secure ram */ int __init omap_secure_ram_reserve_memblock(void) { u32 size = OMAP_SECURE_RAM_STORAGE; size = ALIGN(size, SZ_1M); omap_secure_memblock_base = arm_memblock_steal(size, SZ_1M); return 0; }
void __init vdec_dvt_memory_reserve() { mydriver_p = arm_memblock_steal(SZ_512M, SZ_1M); if (mydriver_p) { printk(KERN_ALERT"memblock done, p: %p\n", mydriver_p); } else { printk(KERN_ALERT"memblock fail\n"); } }
VOID __init mtk_wcn_consys_memory_reserve(VOID) { gConEmiPhyBase = arm_memblock_steal(SZ_1M,SZ_1M); if(gConEmiPhyBase) { WMT_PLAT_INFO_FUNC("memblock done: 0x%x\n",gConEmiPhyBase); }else { WMT_PLAT_ERR_FUNC("memblock fail\n"); } }
//Reserve DRAM memory for MD from system void ccci_md_mem_reserve(void) { void* ptr; ptr = (void*)arm_memblock_steal(modem_size_list[0],SZ_32M); if(ptr) { md_resv_mem_addr[MD_SYS1] =(unsigned int) ptr; printk("[ccci/ctl] (0)md mem reserve successfully,ptr=%p,size=%d\n",ptr,modem_size_list[0]); }else{ printk("[ccci/ctl] (0)md mem reserve fail.\n"); md_resv_mem_addr[MD_SYS1] =0; } }
void __init omap_dsp_reserve_sdram_memblock(void) { phys_addr_t size = CONFIG_TIDSPBRIDGE_MEMPOOL_SIZE; phys_addr_t paddr; if (!size) return; paddr = arm_memblock_steal(size, SZ_1M); if (!paddr) { pr_err("%s: failed to reserve %llx bytes\n", __func__, (unsigned long long)size); return; } omap_dsp_phys_mempool_base = paddr; }
void eemcs_memory_reserve(void){ unsigned int md5_en; if( (ext_md_usage_case&MD5_EN)== MD5_EN) { //Only MD1 enabled md5_en = 1; ext_md_mem_addr[MD_SYS5-MD_EXT1] = (unsigned int)arm_memblock_steal(ext_md_size_list[MD_SYS5-MD_EXT1], SZ_32M);; } else { // No MD is enabled md5_en = 0; ext_md_mem_addr[MD_SYS5-MD_EXT1] = 0; } if ( (ext_md_mem_addr[MD_SYS5-MD_EXT1]&(32*1024*1024 - 1)) != 0 ) printk("[EEMCS_HELPER] md5 memory addr is not 32M align!!!\n"); printk("[EEMCS_HELPER] EN(%d):MemBase(0x%08X)\n", md5_en, ext_md_mem_addr[MD_SYS5-MD_EXT1]); printk("[EEMCS_HELPER] (0)MemStart(0x%08X):MemSize(0x%08X)\n", \ ext_md_mem_addr[MD_SYS5-MD_EXT1], ext_md_size_list[MD_SYS5-MD_EXT1]); }
static void __init mx31moboard_reserve(void) { /* reserve 4 MiB for mx3-camera */ mx3_camera_base = arm_memblock_steal(MX3_CAMERA_BUF_SIZE, MX3_CAMERA_BUF_SIZE); }
static void __init mx31_3ds_reserve(void) { /* reserve MX31_3DS_CAMERA_BUF_SIZE bytes for mx3-camera */ mx3_camera_base = arm_memblock_steal(MX31_3DS_CAMERA_BUF_SIZE, MX31_3DS_CAMERA_BUF_SIZE); }
static void __init pcm037_reserve(void) { mx3_camera_base = arm_memblock_steal(MX3_CAMERA_BUF_SIZE, MX3_CAMERA_BUF_SIZE); }