void iser_task_rdma_finalize(struct iscsi_iser_task *iser_task) { int is_rdma_aligned = 1; struct iser_regd_buf *regd; /* if we were reading, copy back to unaligned sglist, * anyway dma_unmap and free the copy */ if (iser_task->data_copy[ISER_DIR_IN].copy_buf != NULL) { is_rdma_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, ISER_DIR_IN); } if (iser_task->data_copy[ISER_DIR_OUT].copy_buf != NULL) { is_rdma_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, ISER_DIR_OUT); } if (iser_task->dir[ISER_DIR_IN]) { regd = &iser_task->rdma_regd[ISER_DIR_IN]; if (regd->reg.is_fmr) iser_unreg_mem(®d->reg); } if (iser_task->dir[ISER_DIR_OUT]) { regd = &iser_task->rdma_regd[ISER_DIR_OUT]; if (regd->reg.is_fmr) iser_unreg_mem(®d->reg); } /* if the data was unaligned, it was already unmapped and then copied */ if (is_rdma_aligned) iser_dma_unmap_task_data(iser_task); }
void iser_task_rdma_finalize(struct iscsi_iser_task *iser_task) { int deferred; int is_rdma_aligned = 1; struct iser_regd_buf *regd; /* if we were reading, copy back to unaligned sglist, * anyway dma_unmap and free the copy */ if (iser_task->data_copy[ISER_DIR_IN].copy_buf != NULL) { is_rdma_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, ISER_DIR_IN); } if (iser_task->data_copy[ISER_DIR_OUT].copy_buf != NULL) { is_rdma_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, ISER_DIR_OUT); } if (iser_task->dir[ISER_DIR_IN]) { regd = &iser_task->rdma_regd[ISER_DIR_IN]; deferred = iser_regd_buff_release(regd); if (deferred) { iser_err("%d references remain for BUF-IN rdma reg\n", atomic_read(®d->ref_count)); } } if (iser_task->dir[ISER_DIR_OUT]) { regd = &iser_task->rdma_regd[ISER_DIR_OUT]; deferred = iser_regd_buff_release(regd); if (deferred) { iser_err("%d references remain for BUF-OUT rdma reg\n", atomic_read(®d->ref_count)); } } /* if the data was unaligned, it was already unmapped and then copied */ if (is_rdma_aligned) iser_dma_unmap_task_data(iser_task); }
void iser_task_rdma_finalize(struct iscsi_iser_task *iser_task) { int is_rdma_data_aligned = 1; int is_rdma_prot_aligned = 1; int prot_count = scsi_prot_sg_count(iser_task->sc); /* if we were reading, copy back to unaligned sglist, * anyway dma_unmap and free the copy */ if (iser_task->data[ISER_DIR_IN].orig_sg) { is_rdma_data_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->data[ISER_DIR_IN], ISER_DIR_IN); } if (iser_task->data[ISER_DIR_OUT].orig_sg) { is_rdma_data_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->data[ISER_DIR_OUT], ISER_DIR_OUT); } if (iser_task->prot[ISER_DIR_IN].orig_sg) { is_rdma_prot_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->prot[ISER_DIR_IN], ISER_DIR_IN); } if (iser_task->prot[ISER_DIR_OUT].orig_sg) { is_rdma_prot_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->prot[ISER_DIR_OUT], ISER_DIR_OUT); } if (iser_task->dir[ISER_DIR_IN]) { iser_unreg_rdma_mem(iser_task, ISER_DIR_IN); if (is_rdma_data_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->data[ISER_DIR_IN], DMA_FROM_DEVICE); if (prot_count && is_rdma_prot_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->prot[ISER_DIR_IN], DMA_FROM_DEVICE); } if (iser_task->dir[ISER_DIR_OUT]) { iser_unreg_rdma_mem(iser_task, ISER_DIR_OUT); if (is_rdma_data_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->data[ISER_DIR_OUT], DMA_TO_DEVICE); if (prot_count && is_rdma_prot_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->prot[ISER_DIR_OUT], DMA_TO_DEVICE); } }
void iser_task_rdma_finalize(struct iscsi_iser_task *iser_task) { struct iser_device *device = iser_task->iser_conn->ib_conn.device; int is_rdma_data_aligned = 1; int is_rdma_prot_aligned = 1; int prot_count = scsi_prot_sg_count(iser_task->sc); /* if we were reading, copy back to unaligned sglist, * anyway dma_unmap and free the copy */ if (iser_task->data_copy[ISER_DIR_IN].copy_buf != NULL) { is_rdma_data_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->data[ISER_DIR_IN], &iser_task->data_copy[ISER_DIR_IN], ISER_DIR_IN); } if (iser_task->data_copy[ISER_DIR_OUT].copy_buf != NULL) { is_rdma_data_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->data[ISER_DIR_OUT], &iser_task->data_copy[ISER_DIR_OUT], ISER_DIR_OUT); } if (iser_task->prot_copy[ISER_DIR_IN].copy_buf != NULL) { is_rdma_prot_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->prot[ISER_DIR_IN], &iser_task->prot_copy[ISER_DIR_IN], ISER_DIR_IN); } if (iser_task->prot_copy[ISER_DIR_OUT].copy_buf != NULL) { is_rdma_prot_aligned = 0; iser_finalize_rdma_unaligned_sg(iser_task, &iser_task->prot[ISER_DIR_OUT], &iser_task->prot_copy[ISER_DIR_OUT], ISER_DIR_OUT); } if (iser_task->dir[ISER_DIR_IN]) { device->iser_unreg_rdma_mem(iser_task, ISER_DIR_IN); if (is_rdma_data_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->data[ISER_DIR_IN]); if (prot_count && is_rdma_prot_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->prot[ISER_DIR_IN]); } if (iser_task->dir[ISER_DIR_OUT]) { device->iser_unreg_rdma_mem(iser_task, ISER_DIR_OUT); if (is_rdma_data_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->data[ISER_DIR_OUT]); if (prot_count && is_rdma_prot_aligned) iser_dma_unmap_task_data(iser_task, &iser_task->prot[ISER_DIR_OUT]); } }