U32 PDrvCryptoUpdateDES(PUBLIC_CRYPTO_DES_DES3_CONTEXT * pDesDes3Ctx, U8 * pSrc, U8 * pDest, U32 nbBlocks, U32 dmaUse) { U32 nbr_of_blocks; U8 * pProcessSrc; U8 * pProcessDest; U32 vTemp; pProcessSrc = pSrc; pProcessDest = pDest; if ((dmaUse == PUBLIC_CRYPTO_DMA_USE_POLLING) || (dmaUse == PUBLIC_CRYPTO_DMA_USE_IRQ)) { return PDrvCryptoUpdateDESWithDMA(pDesDes3Ctx, pSrc, pDest, nbBlocks, dmaUse); } for (nbr_of_blocks = 0; nbr_of_blocks < nbBlocks ; nbr_of_blocks++) { /* (2) : We wait for the input ready */ if (scxPublicCryptoWaitForReadyBit((VU32 *) &g_pDESReg_t->DES_CTRL, DES_CTRL_INPUT_READY_BIT) != PUBLIC_CRYPTO_OPERATION_SUCCESS) { return PUBLIC_CRYPTO_ERR_TIMEOUT; } /* (3) : We copy the 8 bytes of data src->reg */ vTemp = (U32) BYTES_TO_LONG(pProcessSrc); OUTREG32(&g_pDESReg_t->DES_DATA_L, vTemp); pProcessSrc += 4; vTemp = (U32) BYTES_TO_LONG(pProcessSrc); OUTREG32(&g_pDESReg_t->DES_DATA_H, vTemp); pProcessSrc += 4; /* (4) : We wait for the output ready */ if (scxPublicCryptoWaitForReadyBit((VU32 *) &g_pDESReg_t->DES_CTRL, DES_CTRL_OUTPUT_READY_BIT) != PUBLIC_CRYPTO_OPERATION_SUCCESS) { return PUBLIC_CRYPTO_ERR_TIMEOUT; } /* (5) : We copy the 8 bytes of data reg->dest */ vTemp = INREG32(&g_pDESReg_t->DES_DATA_L); LONG_TO_BYTE(pProcessDest, vTemp); pProcessDest += 4; vTemp = INREG32(&g_pDESReg_t->DES_DATA_H); LONG_TO_BYTE(pProcessDest, vTemp); pProcessDest += 4; } return PUBLIC_CRYPTO_OPERATION_SUCCESS; }
void PDrvCryptoInitDES(CryptoInitContext* pDesDes3InitCtx, PUBLIC_CRYPTO_DES_DES3_CONTEXT * pDesDes3Ctx) { U8 * pPassedIv = pDesDes3InitCtx->iv; memset(pDesDes3Ctx, 0, sizeof(PUBLIC_CRYPTO_DES_DES3_CONTEXT)); /* (1) : We map the registers with the global variable */ g_pDESReg_t = (Des3DesReg_t*) IO_ADDRESS(DES1_REGS_HW_ADDR); /* (2) : We initialize the registers from the init parameters */ pDesDes3Ctx->registers.DES_CTRL = pDesDes3InitCtx->hwa_config; pDesDes3Ctx->registers.DES_IV_L = (U32) BYTES_TO_LONG(pPassedIv); pPassedIv += 4; pDesDes3Ctx->registers.DES_IV_H = (U32) BYTES_TO_LONG(pPassedIv); }
bool tf_aes_update(struct tf_crypto_aes_operation_state *aes_state, u8 *src, u8 *dest, u32 nb_blocks) { u32 nbr_of_blocks; u32 temp; u8 *process_src; u8 *process_dest; u32 dma_use = PUBLIC_CRYPTO_DMA_USE_NONE; bool is_kernel = false; /* *Choice of the processing type */ if (nb_blocks * AES_BLOCK_SIZE >= DMA_TRIGGER_IRQ_AES) dma_use = PUBLIC_CRYPTO_DMA_USE_IRQ; dprintk(KERN_INFO "tf_aes_update: " "src=0x%08x, dest=0x%08x, nb_blocks=0x%08x, dma_use=0x%08x\n", (unsigned int)src, (unsigned int)dest, (unsigned int)nb_blocks, (unsigned int)dma_use); if (aes_state->key_is_public) is_kernel = true; if (nb_blocks == 0) { dprintk(KERN_INFO "tf_aes_update: Nothing to process\n"); return true; } if ((AES_CTRL_GET_DIRECTION(INREG32(&paes_reg->AES_CTRL)) != AES_CTRL_GET_DIRECTION(aes_state->CTRL)) && !aes_state->key_is_public) { dprintk(KERN_WARNING "HWA configured for another direction\n"); return false; } /*Restore the registers of the accelerator from the operation state */ tf_aes_restore_registers(aes_state, 0); if (dma_use == PUBLIC_CRYPTO_DMA_USE_IRQ) { /* Perform the update with DMA */ if (!tf_aes_update_dma(src, dest, nb_blocks, aes_state->CTRL, is_kernel)) return false; } else { u8 buf[DMA_TRIGGER_IRQ_AES]; /* * Synchronous Linux crypto API buffers are mapped in kernel * space */ if (is_kernel) { process_src = src; process_dest = dest; } else { if (copy_from_user(buf, src, nb_blocks * AES_BLOCK_SIZE)) return false; process_src = process_dest = buf; } for (nbr_of_blocks = 0; nbr_of_blocks < nb_blocks; nbr_of_blocks++) { /*We wait for the input ready */ /*Crash the system as this should never occur */ if (tf_crypto_wait_for_ready_bit( (u32 *)&paes_reg->AES_CTRL, AES_CTRL_INPUT_READY_BIT) != PUBLIC_CRYPTO_OPERATION_SUCCESS) panic("Wait too long for AES hardware " "accelerator Input data to be ready\n"); /* We copy the 16 bytes of data src->reg */ temp = (u32) BYTES_TO_LONG(process_src); OUTREG32(&paes_reg->AES_DATA_IN_0, temp); process_src += 4; temp = (u32) BYTES_TO_LONG(process_src); OUTREG32(&paes_reg->AES_DATA_IN_1, temp); process_src += 4; temp = (u32) BYTES_TO_LONG(process_src); OUTREG32(&paes_reg->AES_DATA_IN_2, temp); process_src += 4; temp = (u32) BYTES_TO_LONG(process_src); OUTREG32(&paes_reg->AES_DATA_IN_3, temp); process_src += 4; /* We wait for the output ready */ tf_crypto_wait_for_ready_bit_infinitely( (u32 *)&paes_reg->AES_CTRL, AES_CTRL_OUTPUT_READY_BIT); /* We copy the 16 bytes of data reg->dest */ temp = INREG32(&paes_reg->AES_DATA_IN_0); LONG_TO_BYTE(process_dest, temp); process_dest += 4; temp = INREG32(&paes_reg->AES_DATA_IN_1); LONG_TO_BYTE(process_dest, temp); process_dest += 4; temp = INREG32(&paes_reg->AES_DATA_IN_2); LONG_TO_BYTE(process_dest, temp); process_dest += 4; temp = INREG32(&paes_reg->AES_DATA_IN_3); LONG_TO_BYTE(process_dest, temp); process_dest += 4; } #ifdef CONFIG_TF_DRIVER_FAULT_INJECTION tf_aes_fault_injection(paes_reg->AES_CTRL, buf); #endif if (!is_kernel) if (copy_to_user(dest, buf, nb_blocks * AES_BLOCK_SIZE)) return false; } /* Save the accelerator registers into the operation state */ tf_aes_save_registers(aes_state); dprintk(KERN_INFO "tf_aes_update: Done\n"); return true; }