/* * Actual allocator for DMAable memory. */ void *__sym_calloc_dma_unlocked(m_pool_ident_t dev_dmat, int size, char *name) { m_pool_p mp; void *m = NULL; mp = ___get_dma_pool(dev_dmat); if (!mp) mp = ___cre_dma_pool(dev_dmat); if (mp) m = __sym_calloc(mp, size, name); #ifdef SYM_MEM_FREE_UNUSED if (mp && !mp->nump) ___del_dma_pool(mp); #endif return m; }
void __sym_mfree_dma(m_pool_ident_t dev_dmat, void *m, int size, char *name) { unsigned long flags; m_pool_p mp; spin_lock_irqsave(&sym53c8xx_lock, flags); mp = ___get_dma_pool(dev_dmat); if (!mp) goto out; __sym_mfree(mp, m, size, name); #ifdef SYM_MEM_FREE_UNUSED if (!mp->nump) ___del_dma_pool(mp); #endif out: spin_unlock_irqrestore(&sym53c8xx_lock, flags); }
/* * Actual virtual to bus physical address translator * for 32 bit addressable DMAable memory. */ dma_addr_t __vtobus(m_pool_ident_t dev_dmat, void *m) { unsigned long flags; m_pool_p mp; int hc = VTOB_HASH_CODE(m); m_vtob_p vp = NULL; void *a = (void *)((unsigned long)m & ~SYM_MEM_CLUSTER_MASK); dma_addr_t b; spin_lock_irqsave(&sym53c8xx_lock, flags); mp = ___get_dma_pool(dev_dmat); if (mp) { vp = mp->vtob[hc]; while (vp && vp->vaddr != a) vp = vp->next; } if (!vp) panic("sym: VTOBUS FAILED!\n"); b = vp->baddr + (m - a); spin_unlock_irqrestore(&sym53c8xx_lock, flags); return b; }
/* * Actual allocator for DMAable memory. */ void *__sym_calloc_dma(m_pool_ident_t dev_dmat, int size, char *name) { unsigned long flags; m_pool_p mp; void *m = NULL; spin_lock_irqsave(&sym53c8xx_lock, flags); mp = ___get_dma_pool(dev_dmat); if (!mp) mp = ___cre_dma_pool(dev_dmat); if (!mp) goto out; m = __sym_calloc(mp, size, name); #ifdef SYM_MEM_FREE_UNUSED if (!mp->nump) ___del_dma_pool(mp); #endif out: spin_unlock_irqrestore(&sym53c8xx_lock, flags); return m; }