Exemple #1
0
void LoadCipherState(int qid, struct sep_ctx_cipher *pCtx,uint8_t is_zero_iv)
{
	SepCipherPrivateContext_s *pAesPrivateCtx = (SepCipherPrivateContext_s *)pCtx->reserved;
	HwDesc_s desc;
	uint32_t block_size;

	HW_DESC_INIT(&desc);

	switch (ReadContextWord(&pCtx->mode)) {
	case SEP_CIPHER_ECB:
		return;
	case SEP_CIPHER_CTR:
	case SEP_CIPHER_XTS:
	case SEP_CIPHER_OFB:
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_STATE1);
		break;
	case SEP_CIPHER_CMAC:
		HW_DESC_SET_CIPHER_DO(&desc, AES_CMAC_INIT);
		/* fall through */
	default:
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_STATE0);
	}

	HW_DESC_SET_CIPHER_MODE(&desc, ReadContextWord(&pCtx->mode));
	if (ReadContextWord(&pCtx->alg) == SEP_CRYPTO_ALG_AES) {
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp)?ReadContextWord(&pAesPrivateCtx->tunnetDir):ReadContextWord(&pCtx->direction));
		block_size = SEP_AES_BLOCK_SIZE;
		HW_DESC_SET_KEY_SIZE_AES(&desc, ReadContextWord(&pCtx->key_size));
		HW_DESC_SET_CIPHER_CONFIG1(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp));
		if (ReadContextWord(&pCtx->crypto_key_type) == DX_XOR_HDCP_KEY) {
			HW_DESC_SET_AES_XOR_CRYPTO_KEY(&desc);
		}
		if (ReadContextWord(&pAesPrivateCtx->engineCore) == SEP_AES_ENGINE2) {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES2);
		} else {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES);
		}
	} else { /* DES */
		block_size = SEP_DES_IV_SIZE;
		HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_DES);
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pCtx->direction));
	}
	/*if is_zero_iv use ZeroBlock as IV*/
	if (is_zero_iv ==1 ){
 	    HW_DESC_SET_DIN_CONST(&desc, 0, block_size);
	} else {
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->block_state, block_size);
	}
	AddHWDescSequence(qid, &desc);

	#ifdef DX_CONFIG_HW_RESET_CONST_SUPPORT
	    HW_DESC_INIT(&desc);
	    HW_DESC_RESET_CONST_INPUT(&desc);
	    AddHWDescSequence(qid, &desc);
	#endif

}
/*!
 * Generates the initial pool in SRAM.
 * This function should be invoked when resuming DX driver.
 *
 * \param drvdata
 *
 * \return int Zero for success, negative value otherwise.
 */
int dx_ivgen_init_sram_pool(struct dx_drvdata *drvdata)
{
	struct dx_ivgen_ctx *ivgen_ctx = drvdata->ivgen_handle;
	HwDesc_s iv_seq[DX_IVPOOL_SEQ_LEN];
	struct dx_crypto_req dx_req = {0};
	unsigned int iv_seq_len = 0;

	/* Generate initial enc. key/iv */
	get_random_bytes(ivgen_ctx->pool_meta, DX_IVPOOL_META_SIZE);

	/* The first 32B reserved for the enc. Key/IV */
	ivgen_ctx->ctr_key = ivgen_ctx->pool;
	ivgen_ctx->ctr_iv = ivgen_ctx->pool + AES_KEYSIZE_128;

	/* Copy initial enc. key and IV to SRAM at a single descriptor */
	HW_DESC_INIT(&iv_seq[iv_seq_len]);
	HW_DESC_SET_DIN_TYPE(&iv_seq[iv_seq_len], DMA_DLLI,
		ivgen_ctx->pool_meta_dma, DX_IVPOOL_META_SIZE,
		AXI_ID, NS_BIT);
	HW_DESC_SET_DOUT_SRAM(&iv_seq[iv_seq_len], ivgen_ctx->pool,
		DX_IVPOOL_META_SIZE);
	HW_DESC_SET_FLOW_MODE(&iv_seq[iv_seq_len], BYPASS);
	iv_seq_len++;

	/* Generate initial pool */
	dx_ivgen_generate_pool(ivgen_ctx, iv_seq, &iv_seq_len);

	/* Fire-and-forget */
	return send_request(drvdata, &dx_req, iv_seq, iv_seq_len, 0);
}
Exemple #3
0
/*!
 * This function initiates reading of MLLI table in given host memory to
 * the MLLI buffer in SRAM. It pushes DLLI-to-SRAM BYPASS descriptor.
 *
 * \param qid [in] -The queue Id.
 * \param mlliHostAddr [in] - Host DMA address of a structure which represents the 
 *			MLLI table as follow:
 *		     1. A pointer to the first input MLLI table in system RAM
 *		     	and it's size.
 *		     2. The total number of MLLI tables.
 *		     3. The table direction (can be either MLLI_INPUT_TABLE or
 *		     	MLLI_OUTPUT_TABLE).
 * \param tableSize The size in bytes of the pointed MLLI table.
 * \param axiNs The AXI NS bit
 * \param direction Denotes whether this is MLLI for input or for output
*/
void FetchMlliTable(int qid, uint32_t pMlliData, uint32_t size, uint8_t axiNs, MLLIDirection_t direction)
{
	uint32_t mlliAdr;
	HwDesc_s desc;

	/* Check if already allocated by external module */
	if ( DX_GetIsMlliExternalAlloc(qid) == 1 ) {
		DX_PAL_Abort("MLLI workspace is already allocated by external module");
	}
	
	if (size > (MLLI_BUF_SIZE - SEP_LLI_ENTRY_BYTE_SIZE)) {
		DX_PAL_LOG_ERR("Given MLLI size=%u B is too large!\n", (unsigned int)size);
		DX_PAL_Abort("Given MLLI size is too large!");
	}

	mlliAdr = (uint32_t)(DX_GetMLLIWorkspace() + qid * MLLI_IN_OUT_BUF_SIZE);
	mlliAdr += ( direction == MLLI_INPUT_TABLE ? 0 : MLLI_BUF_SIZE );

	/* prepare the first MLLI mlliTable from host */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_DIN_TYPE(&desc, DMA_DLLI, pMlliData, size, QID_TO_AXI_ID(qid), axiNs);
	HW_DESC_SET_DOUT_SRAM(&desc, mlliAdr, size);
	HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
	AddHWDescSequence(qid, &desc);
}
/*!
 * Generates DX_IVPOOL_SIZE of random bytes by
 * encrypting 0's using AES128-CTR.
 *
 * \param ivgen iv-pool context
 * \param iv_seq IN/OUT array to the descriptors sequence
 * \param iv_seq_len IN/OUT pointer to the sequence length
 */
static void dx_ivgen_generate_pool(
	struct dx_ivgen_ctx *ivgen_ctx,
	HwDesc_s iv_seq[],
	unsigned int *iv_seq_len)
{
	unsigned int idx = *iv_seq_len;

	/* Setup key */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_SRAM(&iv_seq[idx], ivgen_ctx->ctr_key, AES_KEYSIZE_128);
	HW_DESC_SET_SETUP_MODE(&iv_seq[idx], SETUP_LOAD_KEY0);
	HW_DESC_SET_CIPHER_CONFIG0(&iv_seq[idx], DESC_DIRECTION_ENCRYPT_ENCRYPT);
	HW_DESC_SET_FLOW_MODE(&iv_seq[idx], S_DIN_to_AES);
	HW_DESC_SET_KEY_SIZE_AES(&iv_seq[idx], SEP_AES_128_BIT_KEY_SIZE);
	HW_DESC_SET_CIPHER_MODE(&iv_seq[idx], SEP_CIPHER_CTR);
	idx++;

	/* Setup cipher state */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_SRAM(&iv_seq[idx], ivgen_ctx->ctr_iv, SEP_AES_IV_SIZE);
	HW_DESC_SET_CIPHER_CONFIG0(&iv_seq[idx], DESC_DIRECTION_ENCRYPT_ENCRYPT);
	HW_DESC_SET_FLOW_MODE(&iv_seq[idx], S_DIN_to_AES);
	HW_DESC_SET_SETUP_MODE(&iv_seq[idx], SETUP_LOAD_STATE1);
	HW_DESC_SET_KEY_SIZE_AES(&iv_seq[idx], SEP_AES_128_BIT_KEY_SIZE);
	HW_DESC_SET_CIPHER_MODE(&iv_seq[idx], SEP_CIPHER_CTR);
	idx++;

	/* Perform dummy encrypt to skip first block */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_CONST(&iv_seq[idx], 0, SEP_AES_IV_SIZE);
	HW_DESC_SET_DOUT_SRAM(&iv_seq[idx], ivgen_ctx->pool, SEP_AES_IV_SIZE);
	HW_DESC_SET_FLOW_MODE(&iv_seq[idx], DIN_AES_DOUT);
	idx++;

	/* Generate IV pool */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_CONST(&iv_seq[idx], 0, DX_IVPOOL_SIZE);
	HW_DESC_SET_DOUT_SRAM(&iv_seq[idx], ivgen_ctx->pool, DX_IVPOOL_SIZE);
	HW_DESC_SET_FLOW_MODE(&iv_seq[idx], DIN_AES_DOUT);
	idx++;

	*iv_seq_len = idx; /* Update sequence length */

	/* queue ordering assures pool readiness */
	ivgen_ctx->next_iv_ofs = DX_IVPOOL_META_SIZE;
}
Exemple #5
0
/*!
 * Revert operation of the last MAC block processing
 * This function is used for AES-XCBC-MAC and AES-CMAC when finalize
 * has not data. It reverts the last block operation in order to allow
 * redoing it as final.
 * 
 * \param qid 
 * \param pCtx 
 *
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
static int RevertLastMacBlock(int qid, struct sep_ctx_cipher *pCtx)
{
	HwDesc_s desc;
	XcbcMacRfcKeys_s *XcbcKeys = (XcbcMacRfcKeys_s*)pCtx->key;

	/* Relevant only for AES-CMAC and AES-XCBC-MAC */
	if ((ReadContextWord(&pCtx->mode) != SEP_CIPHER_XCBC_MAC) && (ReadContextWord(&pCtx->mode) != SEP_CIPHER_CMAC)) {
		DX_PAL_LOG_ERR("Wrong mode for this function (mode %d)\n", ReadContextWord(&pCtx->mode));
		return DX_RET_UNSUPP_ALG_MODE;
	}
	if (ReadContextWord(&pCtx->crypto_key_type) == DX_ROOT_KEY) {
		DX_PAL_LOG_ERR("RKEK not allowed for XCBC-MAC/CMAC\n");
		return DX_RET_UNSUPP_ALG_MODE;
	}
	/* CMAC and XCBC must use 128b keys */
	if ((ReadContextWord(&pCtx->mode) == SEP_CIPHER_XCBC_MAC) && (ReadContextWord(&pCtx->key_size) != SEP_AES_128_BIT_KEY_SIZE)) {
		DX_PAL_LOG_ERR("Bad key for XCBC-MAC %x\n", (unsigned int)ReadContextWord(&pCtx->key_size));
		return DX_RET_INVARG_KEY_SIZE;
	}

	/* Load key for ECB decryption */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, SEP_CIPHER_ECB);
	HW_DESC_SET_CIPHER_CONFIG0(&desc, SEP_CRYPTO_DIRECTION_DECRYPT);
	if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_XCBC_MAC) { /* XCBC K1 key is used (always 128b) */
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)XcbcKeys->K1, SEP_AES_128_BIT_KEY_SIZE);
		HW_DESC_SET_KEY_SIZE_AES(&desc, SEP_AES_128_BIT_KEY_SIZE);
	} else  {/* CMAC */
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->key,
			(ReadContextWord(&pCtx->key_size) == 24) ? SEP_AES_KEY_SIZE_MAX : ReadContextWord(&pCtx->key_size));
		HW_DESC_SET_KEY_SIZE_AES(&desc, ReadContextWord(&pCtx->key_size));
	}
	HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_KEY0);
	AddHWDescSequence(qid, &desc);

	/* Initiate decryption of block state to previous block_state-XOR-M[n] */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->block_state, SEP_AES_BLOCK_SIZE);
	HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)pCtx->block_state, SEP_AES_BLOCK_SIZE);
	HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
	AddHWDescSequence(qid, &desc);

	return DX_RET_OK;
}
/*!
 * Acquires 16 Bytes IV from the iv-pool
 *
 * \param drvdata Driver private context
 * \param iv_out_dma The phys. IV out address
 * \param iv_out_size May be 8 or 16 bytes long
 * \param iv_seq IN/OUT array to the descriptors sequence
 * \param iv_seq_len IN/OUT pointer to the sequence length
 *
 * \return int Zero for success, negative value otherwise.
 */
int dx_ivgen_getiv(
	struct dx_drvdata *drvdata,
	dma_addr_t iv_out_dma,
	unsigned int iv_out_size,
	HwDesc_s iv_seq[],
	unsigned int *iv_seq_len)
{
	struct dx_ivgen_ctx *ivgen_ctx = drvdata->ivgen_handle;
	unsigned int idx = *iv_seq_len;

	if ((iv_out_size != SEP_AES_IV_SIZE) &&
	    (iv_out_size != CTR_RFC3686_IV_SIZE)) {
		return -EINVAL;
	}

	/* Acquire IV from pool */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_SRAM(&iv_seq[idx],
		ivgen_ctx->pool + ivgen_ctx->next_iv_ofs,
		iv_out_size);
	HW_DESC_SET_DOUT_DLLI(&iv_seq[idx], iv_out_dma,
		iv_out_size, AXI_ID, NS_BIT, 0);
	HW_DESC_SET_FLOW_MODE(&iv_seq[idx], BYPASS);
	idx++;

	/* Bypass operation is proceeded by crypto sequence, hence must
	*  assure bypass-write-transaction by a memory barrier */
	HW_DESC_INIT(&iv_seq[idx]);
	HW_DESC_SET_DIN_NO_DMA(&iv_seq[idx], 0, 0xfffff0);
	HW_DESC_SET_DOUT_NO_DMA(&iv_seq[idx], 0, 0, 1);
	idx++;

	*iv_seq_len = idx; /* update seq length */

	/* Update iv index */
	ivgen_ctx->next_iv_ofs += iv_out_size;

	if ((DX_IVPOOL_SIZE - ivgen_ctx->next_iv_ofs) < SEP_AES_IV_SIZE) {
		DX_LOG_DEBUG("Pool exhausted, regenerating iv-pool\n");
		/* pool is drained -regenerate it! */
		dx_ivgen_generate_pool(ivgen_ctx, iv_seq, iv_seq_len);
	}

	return 0;
}
Exemple #7
0
static void CalcXcbcKeys(int qid, struct sep_ctx_cipher *pCtx)
{
	int i;
	HwDesc_s setup_desc;
	HwDesc_s data_desc;
	/* Overload key+xex_key fields with Xcbc keys */
	XcbcMacRfcKeys_s *XcbcKeys = (XcbcMacRfcKeys_s*)pCtx->key;
	//const uint8_t *keyConst = XcbcKeysConst.K1;
	uint8_t *derivedKey = XcbcKeys->K1;
	uint32_t constKey = 0x01010101;

	/* Prepare key setup descriptor (same for all XCBC-MAC keys) */
	HW_DESC_INIT(&setup_desc);
	HW_DESC_SET_CIPHER_MODE(&setup_desc, SEP_CIPHER_ECB);
	HW_DESC_SET_CIPHER_CONFIG0(&setup_desc, SEP_CRYPTO_DIRECTION_ENCRYPT);
	HW_DESC_SET_STATE_DIN_PARAM(&setup_desc, (uint32_t)XcbcKeys->K, SEP_AES_128_BIT_KEY_SIZE);
	HW_DESC_SET_KEY_SIZE_AES(&setup_desc, SEP_AES_128_BIT_KEY_SIZE);
	HW_DESC_SET_FLOW_MODE(&setup_desc, S_DIN_to_AES);
	HW_DESC_SET_SETUP_MODE(&setup_desc, SETUP_LOAD_KEY0);

	/* load user key */
	AddHWDescSequence(qid, &setup_desc);

	HW_DESC_INIT(&data_desc);
	HW_DESC_SET_FLOW_MODE(&data_desc, DIN_AES_DOUT);

	for (i = 0; i < AES_XCBC_MAC_NUM_KEYS ; i++) {

		/* encrypt each XCBC constant with the user given key to get K1, K2, K3 */
		HW_DESC_SET_DIN_CONST(&data_desc, (constKey * (i+1)), SEP_AES_128_BIT_KEY_SIZE);
		HW_DESC_SET_STATE_DOUT_PARAM(&data_desc, (uint32_t)derivedKey, SEP_AES_128_BIT_KEY_SIZE);
		HW_DESC_LOCK_QUEUE(&data_desc, 1); /* Lock until RESET_CONST */
		AddHWDescSequence(qid, &data_desc);
		/* Procede to next derived key calculation */
		derivedKey += SEP_AES_128_BIT_KEY_SIZE;
	}
#ifdef DX_CONFIG_HW_RESET_CONST_SUPPORT
	HW_DESC_INIT(&data_desc);
	HW_DESC_RESET_CONST_INPUT(&data_desc);
	AddHWDescSequence(qid, &data_desc);
#endif
}
/*!
 * Writes the hash digest and hash length back to the Hash context.
 * 
 * \param qid 
 * \param pCtx Hash context
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int StoreHashState(int qid, struct sep_ctx_hash *pCtx)
{
	HwDesc_s desc;
	SepHashPrivateContext_s *PrivateContext = (SepHashPrivateContext_s *)pCtx;
	uint32_t hw_mode;
	uint32_t DigestSize;
	int drvRc = DX_RET_OK;

	drvRc = GetHashHwMode(ReadContextWord(&pCtx->mode), &hw_mode);
	if (drvRc != DX_RET_OK) {
		return drvRc; 
	}

	/* SHA224 uses SHA256 HW mode with different init. val. */
	drvRc = GetHashHwDigestSize(ReadContextWord(&pCtx->mode), &DigestSize);
	if (drvRc != DX_RET_OK) {
		return drvRc; 
	}

	/* store the hash digest result in the context */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);
	HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)pCtx->digest, DigestSize);
	HW_DESC_SET_FLOW_MODE(&desc, S_HASH_to_DOUT);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE0);
	AddHWDescSequence(qid, &desc);

	/* store current hash length in the private context */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);
	HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)PrivateContext->CurrentDigestedLength, sizeof(PrivateContext->CurrentDigestedLength));
	HW_DESC_SET_FLOW_MODE(&desc, S_HASH_to_DOUT);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE1);
	AddHWDescSequence(qid, &desc);

	return drvRc;
}
/*!
 * Copy data buffer indirectly using CC HW descriptors.
 * 
 * \param inType DMA type of the source buffer.
 * \param inAddr Input address of the source buffer, must be word aligned.
 * \param inSize Size in octets of the source buffer, must be multiple of word.
 * \param inAxiNs The AXI bus secure mode of the source buffer.
 * \param outType DMA type of the source buffer.
 * \param outAddr Output address of the destination buffer, must be word aligned.
 * \param outSize Size in octets of the destination buffer, must be multiple of word.
 * \param outAxiNs The AXI bus secure mode of the destination buffer.
 */
static void DescBypass(
	DmaMode_t inType, 
	uint32_t  inAddr, 
	uint32_t inSize,
	uint32_t inAxiNs,
	DmaMode_t outType, 
	uint32_t outAddr,
	uint32_t outSize,
	uint32_t outAxiNs )
{
	HwDesc_s desc;
	
	/* Execute BYPASS operation */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_DIN_TYPE(&desc, inType, inAddr, inSize, QID_TO_AXI_ID(NO_OS_QUEUE_ID), inAxiNs);
	HW_DESC_SET_DOUT_TYPE(&desc, outType, outAddr, outSize, QID_TO_AXI_ID(NO_OS_QUEUE_ID), outAxiNs);
	HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
	AddHWDescSequence(NO_OS_QUEUE_ID, &desc);
}
Exemple #10
0
void StoreCipherState(int qid, struct sep_ctx_cipher *pCtx)
{
	SepCipherPrivateContext_s *pAesPrivateCtx = (SepCipherPrivateContext_s *)pCtx->reserved;
	HwDesc_s desc;
	uint32_t block_size;

	if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_ECB) {
		return;
	}

	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, ReadContextWord(&pCtx->mode));
	switch (ReadContextWord(&pCtx->mode)) {
	case SEP_CIPHER_XTS:
	case SEP_CIPHER_CTR:
	case SEP_CIPHER_OFB:
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE1);
		break;
	default:
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE0);
	}

	if (ReadContextWord(&pCtx->alg) == SEP_CRYPTO_ALG_AES) {
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp)?ReadContextWord(&pAesPrivateCtx->tunnetDir):ReadContextWord(&pCtx->direction));
		block_size = SEP_AES_BLOCK_SIZE;
		HW_DESC_SET_CIPHER_CONFIG1(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp));
	
		if (ReadContextWord(&pAesPrivateCtx->engineCore) == SEP_AES_ENGINE2) {
			HW_DESC_SET_FLOW_MODE(&desc, S_AES2_to_DOUT);
		} else {
			HW_DESC_SET_FLOW_MODE(&desc, S_AES_to_DOUT);
		}
	} else {
		block_size = SEP_DES_IV_SIZE;
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pCtx->direction));
		HW_DESC_SET_FLOW_MODE(&desc, S_DES_to_DOUT);
	}
	HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)pCtx->block_state, block_size);

	AddHWDescSequence(qid, &desc);
}
Exemple #11
0
/*!
 * This function prepares the next MLLI table. It looks for the source
 * address and size of the next LLI table in the first entry of the
 * populated MLLI table in SRAM and overwrites it in the SRAM using
 * "MLLI" transfer.
 *
 * \param qid [in] - The queue Id.
 * \param axiNs [in] - axi secure transactions
 * \param dir [in] - Refers to INPUT or OUTPUT MLLI table in SeP RAM.
 */
void PrepareNextMLLITable(int qid, uint8_t axiNs, MLLIDirection_t direction)
{
	uint32_t mlliAdr;
	HwDesc_s desc;

	/* Check if already allocated by external module */
	if ( DX_GetIsMlliExternalAlloc(qid) == 1 ) {
		DX_PAL_Abort("MLLI workspace is already allocated by external module");
	}
	
	mlliAdr = (uint32_t)(DX_GetMLLIWorkspace() + qid * MLLI_IN_OUT_BUF_SIZE);
	mlliAdr += ( direction == MLLI_INPUT_TABLE ? 0 : MLLI_BUF_SIZE );
	
	/* prepare the next MLLI table. The next table host address already
	   resides in SRAM and needs to be transfered to the same address
	   in SRAM */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_DIN_TYPE(&desc, DMA_MLLI, mlliAdr, 0, QID_TO_AXI_ID(qid), axiNs);
	HW_DESC_SET_DOUT_SRAM(&desc, mlliAdr, MLLI_BUF_SIZE - SEP_LLI_ENTRY_BYTE_SIZE);
	HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
	AddHWDescSequence(qid, &desc);
}
Exemple #12
0
/*!
 * This function is used as finish operation of AES CTS mode.
 * The function may either be called after "InitCipher" or "ProcessCipher".
 * 
 * \param pCtx A pointer to the AES context buffer in SRAM.
 * \param pDmaInputBuffer A structure which represents the DMA input buffer.
 * \param pDmaOutputBuffer A structure which represents the DMA output buffer.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int ProcessCTSFinalizeCipher(struct sep_ctx_cipher *pCtx, DmaBuffer_s *pDmaInputBuffer, DmaBuffer_s *pDmaOutputBuffer)
{
	uint32_t numOfBlocks = 0;
	uint32_t DataInSize = 0;
	uint32_t lastBlockSize = 0;
	uint32_t dataSizeForDLLI = 0;
	DmaMode_t dmaMode = NO_DMA;
	uint8_t *pInputData = NULL, *pOutputData = NULL;
	uint32_t lastBlockOffset = 0,nextToLastBlkOffset = 0;
	HwDesc_s desc;
	uint8_t inAxiNs = pDmaInputBuffer->axiNs;
	uint8_t outAxiNs = pDmaOutputBuffer->axiNs;
	int qid = CURR_QUEUE_ID();
	dmaMode = DMA_BUF_TYPE_TO_MODE(pDmaInputBuffer->dmaBufType);
	int drvRc = DX_RET_OK;
	uint8_t *tempBuff= NULL;


	/*Use context buffer aw temp buffer for internal operations */
	tempBuff = pCtx->xex_key;
 
 	/* check if we have remaining data to process */
	switch (dmaMode) {
	case DMA_MLLI:
	case DMA_SRAM:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		return drvRc;
	case DMA_DLLI:
		DataInSize = pDmaInputBuffer->size;		
		pInputData = (uint8_t*)pDmaInputBuffer->pData;
		pOutputData =(uint8_t*)pDmaOutputBuffer->pData;
		break;
	default:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		return drvRc;
	}

	/*Calculate last block size*/
	lastBlockSize = pDmaInputBuffer->size& AES_BLOCK_MASK;

	if (lastBlockSize == 0) {
		lastBlockSize = SEP_AES_BLOCK_SIZE; 
	}
	/*Calculte dataSizeForDLLI for ProcessCipher operation*/
	if (pDmaInputBuffer->size > SEP_AES_BLOCK_SIZE){
		dataSizeForDLLI = pDmaInputBuffer->size -(lastBlockSize + SEP_AES_BLOCK_SIZE);
	}
	else if(pDmaInputBuffer->size ==SEP_AES_BLOCK_SIZE){
          	dataSizeForDLLI = SEP_AES_BLOCK_SIZE;
    	}
	/*Process data with ProcessCipher */
	if (dataSizeForDLLI>0){
		/*Update data size for ProcessCipher operation*/
		pDmaInputBuffer->size = dataSizeForDLLI;
		pDmaOutputBuffer->size = dataSizeForDLLI;
           
		/*Call ProcessCipher*/
		drvRc = ProcessCipher(pCtx, pDmaInputBuffer, pDmaOutputBuffer);
		if (drvRc != DX_RET_OK) {
			goto EndWithErr;
		}
		/*Revert original value of data szie */
		pDmaInputBuffer->size = DataInSize;
		pDmaOutputBuffer->size =DataInSize;
        
		if (DataInSize ==SEP_AES_BLOCK_SIZE) {
			return drvRc;
		}
	}
 
	/*Calculate offsets of two last blocks */
	numOfBlocks = (DataInSize + SEP_AES_BLOCK_SIZE -1)/SEP_AES_BLOCK_SIZE;
	lastBlockOffset = (numOfBlocks - 1)*SEP_AES_BLOCK_SIZE;
	nextToLastBlkOffset = lastBlockOffset - SEP_AES_BLOCK_SIZE;
	lastBlockSize = DataInSize - lastBlockOffset;
    
	/*Change mode to SEP_CIPHER_CBC*/
	WriteContextWord(&pCtx->mode ,SEP_CIPHER_CBC);
	LoadCipherState(qid, pCtx,0);
	LoadCipherKey(qid, pCtx);

	/*Initialize  context's buffer for internal use*/
	HW_DESC_INIT(&desc);
	HW_DESC_SET_DIN_CONST(&desc, 0, SEP_AES_BLOCK_SIZE*2);
	HW_DESC_SET_DOUT_SRAM(&desc, (uint32_t)pCtx->xex_key, SEP_AES_BLOCK_SIZE*2);
	HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
	AddHWDescSequence(qid, &desc);

	#ifdef DX_CONFIG_HW_RESET_CONST_SUPPORT
	    HW_DESC_INIT(&desc);
	    HW_DESC_RESET_CONST_INPUT(&desc);
	    AddHWDescSequence(qid, &desc);
	#endif

	/*Encrypt mode */
	if(ReadContextWord(&pCtx->direction) == SEP_CRYPTO_DIRECTION_ENCRYPT){
    
		/* process regular AES CBC flow on next to last block */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData+nextToLastBlkOffset,SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode, (uint32_t)pOutputData+nextToLastBlkOffset,SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		AddHWDescSequence(qid, &desc);

		/* 1. Copy next to last block to temp SRAM buff to save it*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc,dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_SRAM(&desc,(uint32_t)tempBuff, SEP_AES_BLOCK_SIZE); 
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);

 		/*2. Copy zero buff to place of second to last block to create zero padding */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff + SEP_AES_BLOCK_SIZE, SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_TYPE(&desc,dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);	
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);
        
		/*3. Copy last block to place of second to last */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc,dmaMode, (uint32_t)pInputData + lastBlockOffset, lastBlockSize, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc,dmaMode,(uint32_t)pOutputData + nextToLastBlkOffset,lastBlockSize, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);


  		/*4.Encrypt padded last block to temp2 buff in sram*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode,(uint32_t)pOutputData + nextToLastBlkOffset,SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_SRAM(&desc,(uint32_t)tempBuff + SEP_AES_BLOCK_SIZE, SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		AddHWDescSequence(qid, &desc);
		/* Perform CTS swaping operation */
  

		/*5. Copy saved in temp1 block to place of second to last output block */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff , SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_TYPE(&desc,dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);	
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);

 		/*6.Copy  using BYPASS, "next to last block" to "last" block and truncate it to last block size*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc,dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, lastBlockSize, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode,(uint32_t)pOutputData + lastBlockOffset, lastBlockSize, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);

		/*7. Copy saved in temp2 block to place of second to last block */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff + SEP_AES_BLOCK_SIZE , SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_TYPE(&desc,dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);	
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);
	
	} else { /*decrypt operation*/
		
		/* Descriptor flow for Decrypt CBC CTS operation (by AlonZ):
		Decrypt the data up to block Cn-2 using regular AES-CBC; save the chaining value (output IV = Cn-2)
		Process the last one-plus-reminder blocks:
		1. BYPASS DMA blocks (Cn-1, Cn) into SRAM (Cn is a partial block of length k)
		2. Decrypt Cn-1 using AES-ECB into Dn-1
		3. BYPASS DMA 16-k last bytes of Dn-1 to the end of Cn producing Cn'
		4. Decrypt Cn' using AES-CBC (IV = Cn-2) to produce Pn-1 (in SRAM)
		5. Decrypt Cn-1 (again) using AES-CBC (IV = Cn') to produce padded Pn (in SRAM)
		6. BYPASS DMA Pn-1, Pn to output */
		
		/* 1 Save Cn-1 in temp buff (in SRAM) */		
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_SRAM(&desc,(uint32_t)tempBuff,SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);	
		 
		/*decrypt the next to last block with ECB operation*/
		/*change context aes mode to ecb*/
 		/*load key for ecb operation*/
		LoadCipherState(qid, pCtx,1);
		LoadCipherKey(qid, pCtx);
		
		/*2 perform ECB decrypt of next to last block*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		AddHWDescSequence(qid, &desc);


		/*3. Copy Cn, using BYPASS, to 16-k last bytes of decrypted with ECB next to last block to produce Cn'*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc,dmaMode, pInputData + lastBlockOffset,lastBlockSize, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode,pOutputData + nextToLastBlkOffset, lastBlockSize, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);
		
		
		/*4.Decrypt Cn' using AES-CBC and save it temp buff(IV = Cn-2, that was save in ctx by TST_StoreCipherState operation) 
		  to produce Dn-1 */
		/*restore aes mode to cbc for next operations*/
		LoadCipherState(qid, pCtx,0);
		LoadCipherKey(qid, pCtx);

		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_SRAM(&desc,(uint32_t)tempBuff + SEP_AES_BLOCK_SIZE,SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		AddHWDescSequence(qid, &desc);


		/*5. Decrypt Cn-1(was saved in temp buff) again using AES-CBC (IV = Cn') to produce padded Dn (in SRAM)*/		
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff, SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_SRAM(&desc,(uint32_t)tempBuff,SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		AddHWDescSequence(qid, &desc);

		/*6. Copy the decrypted data to output*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff, SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode,(uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);

		/*copy partitial block to last block */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc,dmaMode,(uint32_t)pOutputData + nextToLastBlkOffset, lastBlockSize, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode,(uint32_t)pOutputData + lastBlockOffset, lastBlockSize, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);

		/*Copy the last blocks Dn-1 from temp SRAM buffer to output data*/
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_SRAM(&desc,(uint32_t)tempBuff +SEP_AES_BLOCK_SIZE, SEP_AES_BLOCK_SIZE);
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode,(uint32_t)pOutputData + nextToLastBlkOffset, SEP_AES_BLOCK_SIZE, QID_TO_AXI_ID(qid), outAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, BYPASS);
		AddHWDescSequence(qid, &desc);
	} 
EndWithErr:
	return drvRc;
}
Exemple #13
0
/*!
 * This function is used as finish operation of AES on XCBC, CMAC, CBC
 * and other modes besides XTS mode.
 * The function may either be called after "InitCipher" or "ProcessCipher".
 * 
 * \param pCtx A pointer to the AES context buffer in SRAM.
 * \param pDmaInputBuffer A structure which represents the DMA input buffer.
 * \param pDmaOutputBuffer A structure which represents the DMA output buffer.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int FinalizeCipher(struct sep_ctx_cipher *pCtx, DmaBuffer_s *pDmaInputBuffer, DmaBuffer_s *pDmaOutputBuffer)
{
	uint32_t isRemainingData = 0;
	uint32_t DataInSize = 0;
	uint8_t *pInputData = NULL;
	HwDesc_s desc;
	DmaMode_t dmaMode = NO_DMA;
	uint8_t inAxiNs = pDmaInputBuffer->axiNs;
	int qid = CURR_QUEUE_ID(); /* qid is stored in pxTaskTag field */
	int drvRc = DX_RET_OK;
	SepCipherPrivateContext_s *pAesPrivateCtx = (SepCipherPrivateContext_s *)pCtx->reserved;

	HW_DESC_INIT(&desc);

	dmaMode = DMA_BUF_TYPE_TO_MODE(pDmaInputBuffer->dmaBufType);

	/* check if we have remaining data to process */
	switch (dmaMode) {
	case DMA_MLLI:
		isRemainingData = pDmaInputBuffer->nTables;
		DataInSize = 0;
		break;
	case DMA_DLLI:
	case DMA_SRAM:
		isRemainingData = (pDmaInputBuffer->size > 0) ? 1 : 0;
		DataInSize = pDmaInputBuffer->size;
		break;
	case DMA_MODE_NULL:
		break;
	default:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		goto EndWithErr;
	}	
	
	switch(ReadContextWord(&pCtx->mode)) {
	case SEP_CIPHER_CMAC:
	case SEP_CIPHER_XCBC_MAC:
	{
		if (isRemainingData > 1) {
			/* this case only apply to DMA_MLLI mode! */
			pDmaInputBuffer->nTables--;
			ProcessCipher(pCtx, pDmaInputBuffer, pDmaOutputBuffer);
			PrepareNextMLLITable(qid, inAxiNs, MLLI_INPUT_TABLE);
			pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);
			DataInSize = 0;
		} else if (isRemainingData == 1) {
			if (dmaMode == DMA_MLLI) {
				PrepareFirstMLLITable(qid, pDmaInputBuffer, MLLI_INPUT_TABLE);
				pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);
				DataInSize = 0;
			} else {
				pInputData = (uint8_t *)pDmaInputBuffer->pData;
				DataInSize = pDmaInputBuffer->size;
			}
		}


		/* Prepare processing descriptor to be pushed after loading state+key */
		HW_DESC_INIT(&desc);
		if (isRemainingData == 0) {
			if (ReadContextWord(&pAesPrivateCtx->isDataBlockProcessed) == 0) {
				/* MAC for 0 bytes */
				HW_DESC_SET_CIPHER_MODE(&desc, ReadContextWord(&pCtx->mode));
				HW_DESC_SET_KEY_SIZE_AES(&desc, ReadContextWord(&pCtx->key_size));
				HW_DESC_SET_CMAC_SIZE0_MODE(&desc);
				HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES);
			} else {
				/* final with 0 data but MAC total data size > 0 */
				drvRc = RevertLastMacBlock(qid, pCtx); /* Get C[n-1]-xor-M[n] */
				if (drvRc != DX_RET_OK) {
					goto EndWithErr;
				}
				/* Finish with data==0 is identical to "final"
				   op. on the last (prev.) block (XOR with 0) */
				HW_DESC_SET_DIN_CONST(&desc, 0x00, SEP_AES_BLOCK_SIZE);
				HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
			}
		} else {
			HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData, DataInSize, QID_TO_AXI_ID(qid), inAxiNs);
			HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
		}

		/* load AES key and iv length and digest */
		LoadCipherState(qid, pCtx,0);
		LoadCipherKey(qid, pCtx);

		/* Process last block */
		AddHWDescSequence(qid, &desc);
#ifdef DX_CONFIG_HW_RESET_CONST_SUPPORT
		HW_DESC_INIT(&desc);
		HW_DESC_RESET_CONST_INPUT(&desc);
		AddHWDescSequence(qid, &desc);
#endif
		/* get machine state */
		StoreCipherState(qid, pCtx);
		break;
	}
	case SEP_CIPHER_CBC_CTS:
	{ 
		/*In case of data size = SEP_AES_BLOCK_SIZE check that no blocks were processed before*/
		if ((pDmaInputBuffer->size == SEP_AES_BLOCK_SIZE)&&
		    (ReadContextWord(&pAesPrivateCtx->isDataBlockProcessed) == 1)){  
			DX_PAL_LOG_ERR("Invalid dataIn size\n");
			drvRc = DX_RET_INVARG;
			goto EndWithErr;
		}
		/*Call ProcessCTSFinalizeCipher to process AES CTS finalize operation */
		drvRc = ProcessCTSFinalizeCipher(pCtx, pDmaInputBuffer, pDmaOutputBuffer);
		if (drvRc != DX_RET_OK) {
                goto EndWithErr;
		}
		break;
	}
	default:
		if (isRemainingData) {
			/* process all tables and get state from the AES machine */
			drvRc = ProcessCipher(pCtx, pDmaInputBuffer, pDmaOutputBuffer);
			if (drvRc != DX_RET_OK) {
				goto EndWithErr;
			}
		} else if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_CBC_MAC) {
			/* in-case ZERO data has processed the output would be the encrypted IV */
			if (ReadContextWord(&pAesPrivateCtx->isDataBlockProcessed) == 0) {
				/* load AES key and iv length and digest */
				LoadCipherState(qid, pCtx,0);
				LoadCipherKey(qid, pCtx);

				HW_DESC_INIT(&desc);
				HW_DESC_SET_DIN_CONST(&desc, 0x00, SEP_AES_BLOCK_SIZE);
				HW_DESC_SET_FLOW_MODE(&desc, DIN_AES_DOUT);
				AddHWDescSequence(qid, &desc);
#ifdef DX_CONFIG_HW_RESET_CONST_SUPPORT
				HW_DESC_INIT(&desc);
				HW_DESC_RESET_CONST_INPUT(&desc);
				AddHWDescSequence(qid, &desc);
#endif
				/* get mac result */
				StoreCipherState(qid, pCtx);
			}
		}
	}

EndWithErr:
	return drvRc;
}
/*!
 * Loads the hash digest and hash length to the Hash HW machine.
 * 
 * \param qid 
 * \param pCtx Hash context
 * \param paddingSelection enable/disable Hash block padding by the Hash machine,
 *	  should be either HASH_PADDING_DISABLED or HASH_PADDING_ENABLED.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int LoadHashState(int qid, struct sep_ctx_hash *pCtx, enum HashConfig1Padding paddingSelection)
{
	HwDesc_s desc;
	SepHashPrivateContext_s *PrivateContext = (SepHashPrivateContext_s *)pCtx;
	struct sep_ctx_hmac *pCtxHmac = (struct sep_ctx_hmac *)pCtx;
	uint32_t hw_mode;
	uint32_t DigestSize;
	uint32_t tmpSrc = (uint32_t)pCtx->digest;
	int drvRc = DX_RET_OK;

	drvRc = GetHashHwMode(ReadContextWord(&pCtx->mode), &hw_mode);
	if (drvRc != DX_RET_OK) {
		return drvRc; 
	}

	/* SHA224 uses SHA256 HW mode with different init. val. */
	drvRc = GetHashHwDigestSize(ReadContextWord(&pCtx->mode), &DigestSize);
	if (drvRc != DX_RET_OK) {
		return drvRc; 
	}
    
	/* load intermediate hash digest */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);

	if (ReadContextWord(&PrivateContext->hmacFinalization) == 1) {
		tmpSrc = (uint32_t)pCtxHmac->k0;
	} 

	HW_DESC_SET_STATE_DIN_PARAM(&desc, tmpSrc, DigestSize);

	HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_HASH);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_STATE0);
	AddHWDescSequence(qid, &desc);

	/* load the hash current length, should be greater than zero */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);
	HW_DESC_SET_CIPHER_CONFIG1(&desc, paddingSelection);
	HW_DESC_SET_CIPHER_DO(&desc, DO_NOT_PAD);

	tmpSrc = (uint32_t)PrivateContext->CurrentDigestedLength;
	/* The global array is used to set the HASH current length for HMAC finalization */
	if (ReadContextWord(&PrivateContext->hmacFinalization) == 1) {
#ifdef DX_CC_SEP
		tmpSrc = (uint32_t)gOpadCurrentLength;
#else
		HwDesc_s tdesc;
		uint32_t blockSize;

		/* In non SEP products the OPAD digest length constant is not in the SRAM     */
		/* and it might be non contiguous. In order to overcome this problem the FW   */
		/* copies the values into the CurrentDigestLength field. The coping operation */
		/* must be done with constant descriptors to keep the asynchronious mode working */
		HW_DESC_INIT(&tdesc);
		/*clear the current digest */
		HW_DESC_SET_DIN_CONST(&tdesc, 0x00, sizeof(PrivateContext->CurrentDigestedLength));
		HW_DESC_SET_STATE_DOUT_PARAM(&tdesc, tmpSrc, sizeof(PrivateContext->CurrentDigestedLength));
		AddHWDescSequence(qid, &tdesc);

		/* set the current length */
		HW_DESC_INIT(&tdesc);
		/*clear the current digest */
		GetHashBlockSize(ReadContextWord(&pCtx->mode), &blockSize);
		HW_DESC_SET_DIN_CONST(&tdesc, blockSize, sizeof(uint32_t));
		HW_DESC_SET_STATE_DOUT_PARAM(&tdesc, tmpSrc, sizeof(uint32_t));
		AddHWDescSequence(qid, &tdesc);
#endif
	}

	HW_DESC_SET_STATE_DIN_PARAM(&desc, tmpSrc, sizeof(PrivateContext->CurrentDigestedLength));
	HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_HASH);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_KEY0);
	AddHWDescSequence(qid, &desc);

	return drvRc;
}
Exemple #15
0
/*!
 * This function is used to process block(s) of data using the AES machine.
 * 
 * \param pCtx A pointer to the AES context buffer in SRAM.
 * \param pDmaInputBuffer A structure which represents the DMA input buffer.
 * \param pDmaOutputBuffer A structure which represents the DMA output buffer.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int ProcessCipher(struct sep_ctx_cipher *pCtx, DmaBuffer_s *pDmaInputBuffer, DmaBuffer_s *pDmaOutputBuffer)
{
	uint8_t *pInputData = NULL, *pOutputData = NULL;
	uint32_t isNotLastDescriptor = 0;
	uint32_t DataInSize = 0, DataOutSize = 0;
	uint32_t flowMode;
	HwDesc_s desc;
	DmaMode_t dmaMode = NO_DMA;
	uint8_t inAxiNs = pDmaInputBuffer->axiNs;
	uint8_t outAxiNs = pDmaOutputBuffer->axiNs;
	SepCipherPrivateContext_s *pAesPrivateCtx = (SepCipherPrivateContext_s *)pCtx->reserved;
	int nMlliTables = pDmaInputBuffer->nTables;
	int qid = CURR_QUEUE_ID(); /* qid is stored in pxTaskTag field */
	int drvRc = DX_RET_OK;
	const int isInplaceOp = (pDmaInputBuffer->pData == pDmaOutputBuffer->pData ||
				ReadContextWord(&pCtx->mode) == SEP_CIPHER_CBC_MAC ||
				ReadContextWord(&pCtx->mode) == SEP_CIPHER_XCBC_MAC ||
				ReadContextWord(&pCtx->mode) == SEP_CIPHER_CMAC);



	if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_XTS) {
		/* in XTS the key must be loaded first */
		LoadCipherKey(qid, pCtx);
	 	LoadCipherState(qid, pCtx,0);
 	} else if(ReadContextWord(&pCtx->mode) == SEP_CIPHER_CBC_CTS){
		WriteContextWord(&pCtx->mode,SEP_CIPHER_CBC);
	 	LoadCipherState(qid, pCtx,0);
		LoadCipherKey(qid, pCtx);
		WriteContextWord(&pCtx->mode,SEP_CIPHER_CBC_CTS);
	} else {
		LoadCipherState(qid, pCtx,0);
		LoadCipherKey(qid, pCtx);
	}

	/* set the input/output pointers according to the DMA mode */
	if ((!isInplaceOp) && pDmaInputBuffer->dmaBufType != pDmaOutputBuffer->dmaBufType) {
		DX_PAL_LOG_ERR("Inconsistent DMA mode for in/out buffers");
		drvRc = DX_RET_INVARG;
		goto EndWithErr;
	}

	dmaMode = DMA_BUF_TYPE_TO_MODE(pDmaInputBuffer->dmaBufType);

	switch (dmaMode) {
	case DMA_MLLI:	
		pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);
		PrepareFirstMLLITable(qid, pDmaInputBuffer, MLLI_INPUT_TABLE);

		/* get OUT MLLI tables pointer in SRAM (if not inplace operation) */
		if (isInplaceOp == 0) {
			pOutputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_OUTPUT_TABLE);
			PrepareFirstMLLITable(qid, pDmaOutputBuffer, MLLI_OUTPUT_TABLE);
		} else {
			pOutputData = pInputData;
		}

		/* data size is meaningless in DMA-MLLI mode */
		DataInSize = 0;
		DataOutSize = 0;

		break;
	case DMA_DLLI:
	case DMA_SRAM:
		pInputData = (uint8_t *)pDmaInputBuffer->pData;
		if (isInplaceOp == 0) {
			pOutputData  = (uint8_t *)pDmaOutputBuffer->pData;
		} else {
			pOutputData = pInputData;
		}

		/* data processing is done */
		nMlliTables = 0;

		/* set the data size */
		DataInSize = pDmaInputBuffer->size;
		DataOutSize = pDmaOutputBuffer->size; 

		break;
	case DMA_MODE_NULL:
		pInputData = 0;
		pOutputData = 0;

		/* data processing is done */
		nMlliTables = 0;

		/* data size is meaningless in DMA-MLLI mode */
		DataInSize = 0;
		DataOutSize = 0;

		break;
	default:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		goto EndWithErr;
	}

	if ((ReadContextWord(&pCtx->mode) == SEP_CIPHER_CMAC) || (ReadContextWord(&pCtx->mode) == SEP_CIPHER_XCBC_MAC)) {
		isNotLastDescriptor = 1; 
	}

	/* process the AES flow */
	HW_DESC_INIT(&desc);
	
	HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData, DataInSize, QID_TO_AXI_ID(qid), inAxiNs);
	if (isNotLastDescriptor) {
		HW_DESC_SET_DIN_NOT_LAST_INDICATION(&desc);
	}

	switch (ReadContextWord(&pCtx->mode)) {
	case SEP_CIPHER_CBC_MAC:
	case SEP_CIPHER_CMAC:
	case SEP_CIPHER_XCBC_MAC:
		break;
	default:
		HW_DESC_SET_DOUT_TYPE(&desc, dmaMode, (uint32_t)pOutputData, DataOutSize, QID_TO_AXI_ID(qid), outAxiNs);
	}

	flowMode = (ReadContextWord(&pCtx->alg) == SEP_CRYPTO_ALG_AES) ? DIN_AES_DOUT : DIN_DES_DOUT;

	HW_DESC_SET_FLOW_MODE(&desc, flowMode);

#ifdef SEP_PERFORMANCE_TEST
	/* For testing exact HW time */
	HW_QUEUE_WAIT_UNTIL_EMPTY(qid);
	TIMING_MARK(1);
	TIMING_MARK(2);
#endif
	AddHWDescSequence(qid, &desc);
	
#ifdef SEP_PERFORMANCE_TEST
	TIMING_MARK(2);
	HW_QUEUE_WAIT_UNTIL_EMPTY(qid);
	TIMING_MARK(1);
#endif

	/* process each MLLI table (MLLI processing loop only) */
	while (--nMlliTables > 0) {
		/* prepare next input MLLI table in SRAM */
		PrepareNextMLLITable(qid, inAxiNs, MLLI_INPUT_TABLE);
		
		/* prepare next output MLLI table in SRAM */
		if (isInplaceOp == 0) {
			PrepareNextMLLITable(qid, outAxiNs, MLLI_OUTPUT_TABLE);
		}
		
		/* process the AES flow */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, DMA_MLLI, (uint32_t)pInputData, 0, QID_TO_AXI_ID(qid), inAxiNs);
		if (isNotLastDescriptor) {
			HW_DESC_SET_DIN_NOT_LAST_INDICATION(&desc);
		}

		switch (ReadContextWord(&pCtx->mode)) {
		case SEP_CIPHER_CMAC:
		case SEP_CIPHER_CBC_MAC:
		case SEP_CIPHER_XCBC_MAC:
			break;
		default:
			HW_DESC_SET_DOUT_TYPE(&desc, DMA_MLLI, (uint32_t)pOutputData, 0, QID_TO_AXI_ID(qid), outAxiNs);
		}
		HW_DESC_SET_FLOW_MODE(&desc, flowMode);

		AddHWDescSequence(qid, &desc);
	}

	/* at least one block of data processed */
	WriteContextWord(&pAesPrivateCtx->isDataBlockProcessed,1);

	/* get machine state */
	StoreCipherState(qid, pCtx);

EndWithErr:
	return drvRc;
}
/*!
 * This function is used to process a block(s) of data on HASH machine.
 * It accepts an input data aligned to hash block size, any reminder which is not
 * aligned should be passed on calling to "FinalizeHash".
 * 
 * \param pCtx A pointer to the AES context buffer in SRAM.
 * \param pDmaInputBuffer A structure which represents the DMA input buffer.
 * \param pDmaOutputBuffer A structure which represents the DMA output buffer.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int ProcessHash(struct sep_ctx_hash *pCtx, DmaBuffer_s *pDmaInputBuffer, DmaBuffer_s *pDmaOutputBuffer)
{
	uint8_t *pInputData = NULL;
	HwDesc_s desc;
	uint32_t DataInSize = 0;
	DmaMode_t dmaMode = NO_DMA;
	int nMlliTables = pDmaInputBuffer->nTables;
	uint8_t inAxiNs = pDmaInputBuffer->axiNs;
	int qid = CURR_QUEUE_ID(); /* qid is stored in pxTaskTag field */
	int drvRc = DX_RET_OK;


	HW_DESC_INIT(&desc);

	/* load hash length and digest */
	drvRc = LoadHashState(qid, pCtx, HASH_PADDING_DISABLED);
	if (drvRc != DX_RET_OK) {
		goto EndWithErr; 
	}

	dmaMode = DMA_BUF_TYPE_TO_MODE(pDmaInputBuffer->dmaBufType);

	/* set the input pointer according to the DMA mode */
	switch (dmaMode) {
	case DMA_MLLI:
		pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);
		PrepareFirstMLLITable(qid, pDmaInputBuffer, MLLI_INPUT_TABLE);

		/* data size is meaningless in DMA-MLLI mode */
		DataInSize = 0;

		break;
	case DMA_DLLI:
	case DMA_SRAM:
		pInputData = (uint8_t *)pDmaInputBuffer->pData;

		/* data processing is done */
		nMlliTables = 0;

		/* set the data size */
		DataInSize = pDmaInputBuffer->size;

		break;
	default:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		goto EndWithErr;
	}

	/* process the HASH flow */
	HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData, DataInSize, QID_TO_AXI_ID(qid), inAxiNs);
	HW_DESC_SET_FLOW_MODE(&desc, DIN_HASH);
	AddHWDescSequence(qid, &desc);

	/* process the rest of MLLI tables (MLLI processing loop only) */
	while (--nMlliTables > 0) {

		/* prepare next input MLLI table in SRAM */
		PrepareNextMLLITable(qid, inAxiNs, MLLI_INPUT_TABLE);

		/* process the HASH flow */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, DMA_MLLI, (uint32_t)pInputData, 0, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_HASH);
		AddHWDescSequence(qid, &desc);
	}

	/* write back digest and hash length */
	StoreHashState(qid, pCtx);

EndWithErr:
	return drvRc;
}
/*!
 * This function is used as finish operation of the HASH machine.
 * The function may either be called after "InitHash" or "ProcessHash".
 * 
 * \param pCtx A pointer to the AES context buffer in SRAM.
 * \param pDmaInputBuffer A structure which represents the DMA input buffer.
 * \param pDmaOutputBuffer A structure which represents the DMA output buffer.
 * 
 * \return int One of DX_SYM_* error codes defined in dx_error.h.
 */
int FinalizeHash(struct sep_ctx_hash *pCtx, DmaBuffer_s *pDmaInputBuffer, DmaBuffer_s *pDmaOutputBuffer)
{
	HwDesc_s desc;
	uint32_t isRemainingData = 0;
	uint32_t DataInSize = 0;
	DmaMode_t dmaMode = NO_DMA;
	uint8_t *pInputData = NULL;
	uint32_t hw_mode;

	uint32_t DigestSize;
	uint8_t inAxiNs = pDmaInputBuffer->axiNs;

	/* qid is stored in pxTaskTag field */
	int qid = CURR_QUEUE_ID();
	int drvRc = DX_RET_OK;

	HW_DESC_INIT(&desc);

	drvRc = GetHashHwMode(ReadContextWord(&pCtx->mode), &hw_mode);
	if (drvRc != DX_RET_OK) {
		return drvRc; 
	}

	/* SHA224 uses SHA256 HW mode with different init. val. */
	/*same for SHA384 with SHA512*/ 
	drvRc = GetHashHwDigestSize(ReadContextWord(&pCtx->mode), &DigestSize);
	if (drvRc != DX_RET_OK) {
		goto EndWithErr; 
	}

	dmaMode = DMA_BUF_TYPE_TO_MODE(pDmaInputBuffer->dmaBufType);

	/* check if we have remaining data to process */
	switch (dmaMode) {
	case DMA_MLLI:
		isRemainingData = pDmaInputBuffer->nTables;
		DataInSize = 0;
		break;
	case DMA_DLLI:
	case DMA_SRAM:
		isRemainingData = (pDmaInputBuffer->size > 0) ? 1 : 0;
		DataInSize = pDmaInputBuffer->size;
		break;
	case DMA_MODE_NULL:
		break;
	default:
		DX_PAL_LOG_ERR("Invalid DMA mode\n");
		drvRc = DX_RET_INVARG;
		goto EndWithErr;
	}	

	/* check if there is a remainder */
	if (isRemainingData > 1) {
		/* this case only apply to DMA_MLLI mode! */
		pDmaInputBuffer->nTables--;
		ProcessHash(pCtx, pDmaInputBuffer, NULL);

		/* process the last MLLI table */
		PrepareNextMLLITable(qid, inAxiNs, MLLI_INPUT_TABLE);

		/* load hash length and digest */
		drvRc = LoadHashState(qid, pCtx, HASH_PADDING_ENABLED);
		if (drvRc != DX_RET_OK) {
			goto EndWithErr; 
		}

		/* get the pointer of the input MLLI in SRAM */
		pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);

		/* clobber remaining HASH data */
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData, 0, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_HASH);
		AddHWDescSequence(qid, &desc);
	} else if (isRemainingData == 1) {
		/* load hash length and digest */
		drvRc = LoadHashState(qid, pCtx, HASH_PADDING_ENABLED);
		if (drvRc != DX_RET_OK) {
			goto EndWithErr; 
		}

		/* we have a single MLLI table */
		if (dmaMode == DMA_MLLI) {
			pInputData = (uint8_t *)GetFirstLliPtr(qid, MLLI_INPUT_TABLE);
			PrepareFirstMLLITable(qid, pDmaInputBuffer, MLLI_INPUT_TABLE);
		} else {
			pInputData = (uint8_t *)pDmaInputBuffer->pData;

			//* check sram!
		}

		/* clobber remaining HASH data */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_DIN_TYPE(&desc, dmaMode, (uint32_t)pInputData, DataInSize, QID_TO_AXI_ID(qid), inAxiNs);
		HW_DESC_SET_FLOW_MODE(&desc, DIN_HASH);
		AddHWDescSequence(qid, &desc);
	} else {
		/* (isRemainingData == 0) */

		SepHashPrivateContext_s *PrivateContext = (SepHashPrivateContext_s *)pCtx;
		/* load hash length and digest */
		drvRc = LoadHashState(qid, pCtx, HASH_PADDING_DISABLED);
		if (drvRc != DX_RET_OK) {
			goto EndWithErr; 
		}

		/* Workaround: do-pad must be enabled only when writing current length to HW */
		HW_DESC_INIT(&desc);
		HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);
		HW_DESC_SET_CIPHER_CONFIG1(&desc, HASH_PADDING_DISABLED);
		HW_DESC_SET_CIPHER_DO(&desc, DO_PAD);
		HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)PrivateContext->CurrentDigestedLength,
				sizeof(PrivateContext->CurrentDigestedLength));
		HW_DESC_SET_FLOW_MODE(&desc, S_HASH_to_DOUT);
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE1);
		AddHWDescSequence(qid, &desc);
	}



	/* store the hash digest result in the context */
	HW_DESC_INIT(&desc);
	HW_DESC_SET_CIPHER_MODE(&desc, hw_mode);

	HW_DESC_SET_STATE_DOUT_PARAM(&desc, (uint32_t)pCtx->digest, DigestSize);
	HW_DESC_SET_CIPHER_CONFIG0(&desc, HASH_DIGEST_RESULT_LITTLE_ENDIAN);
	HW_DESC_SET_CIPHER_CONFIG1(&desc, HASH_PADDING_DISABLED);
	HW_DESC_SET_CIPHER_DO(&desc, DO_NOT_PAD);
	HW_DESC_SET_FLOW_MODE(&desc, S_HASH_to_DOUT);
	HW_DESC_SET_SETUP_MODE(&desc, SETUP_WRITE_STATE0);
	AddHWDescSequence(qid, &desc);
#ifdef DX_CONFIG_HASH_SHA_512_SUPPORTED
{
	uint32_t tempUint,i;
	if ((ReadContextWord(&pCtx->mode) == SEP_HASH_SHA512 || (ReadContextWord(&pCtx->mode) == SEP_HASH_SHA384))){
		WaitForSequenceCompletion();
		for( i = 0 ; i < DigestSize/sizeof(uint32_t); i++ )
		{
			if(i%2){

				tempUint = ReadContextWord(&((uint32_t*)(pCtx->digest))[i - 1]);
				WriteContextWord(&((uint32_t*)(pCtx->digest))[i - 1],ReadContextWord(&((uint32_t*)(pCtx->digest))[i]));
				WriteContextWord(&((uint32_t*)(pCtx->digest))[i],tempUint);
			}
		}

	}
}
#endif


EndWithErr:
	return drvRc;
}
Exemple #18
0
void LoadCipherKey(int qid, struct sep_ctx_cipher *pCtx)
{
	HwDesc_s desc;
	uint32_t keySize = ReadContextWord(&pCtx->key_size);
	XcbcMacRfcKeys_s *XcbcKeys = (XcbcMacRfcKeys_s*)pCtx->key;
	SepCipherPrivateContext_s *pAesPrivateCtx = (SepCipherPrivateContext_s *)pCtx->reserved;
	enum sep_crypto_direction encDecFlag = ReadContextWord(&pCtx->direction);

	HW_DESC_INIT(&desc);

	/* key size 24 bytes count as 32 bytes, make sure to zero wise upper 8 bytes */
	if (keySize == 24) {
		keySize	= SEP_AES_KEY_SIZE_MAX;
		ClearCtxField(&pCtx->key[24], SEP_AES_KEY_SIZE_MAX - 24);
	}

	HW_DESC_SET_CIPHER_MODE(&desc, ReadContextWord(&pCtx->mode));
	if (ReadContextWord(&pCtx->alg) == SEP_CRYPTO_ALG_AES) {
		if ((ReadContextWord(&pCtx->crypto_key_type) == DX_XOR_HDCP_KEY) &&
		    (ReadContextWord(&pCtx->direction) == SEP_CRYPTO_DIRECTION_DECRYPT)) {
			/* if the crypto operation is DECRYPT we still order the HW for ENCRYPT operation 
			when using HDCP key. The next decriptor which loads the HDCP XOR key will direct DECRYPT
			operation. */
			encDecFlag = SEP_CRYPTO_DIRECTION_ENCRYPT;
		}
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp) ? ReadContextWord(&pAesPrivateCtx->tunnetDir) : encDecFlag);
		HW_DESC_SET_CIPHER_CONFIG1(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp));
		switch (ReadContextWord(&pCtx->mode)) {
		case SEP_CIPHER_XCBC_MAC:
			HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)XcbcKeys->K1, SEP_AES_128_BIT_KEY_SIZE);
			HW_DESC_SET_KEY_SIZE_AES(&desc, SEP_AES_128_BIT_KEY_SIZE);
			if (ReadContextWord(&pCtx->crypto_key_type) == DX_XOR_HDCP_KEY) {
				HW_DESC_SET_AES_XOR_CRYPTO_KEY(&desc);
			}
			break;
		default:
			switch (ReadContextWord(&pCtx->crypto_key_type)) {
			case DX_ROOT_KEY:
				HW_DESC_SET_CIPHER_DO(&desc, ReadContextWord(&pCtx->crypto_key_type));
				break;
			case DX_SESSION_KEY:
				HW_DESC_SET_CIPHER_DO(&desc, SESSION_KEY); //value to be written to DO when session key is used
				break;
			case DX_XOR_HDCP_KEY:
				HW_DESC_SET_AES_XOR_CRYPTO_KEY(&desc);
				/*FALLTHROUGH*/
			case DX_USER_KEY:
			case DX_APPLET_KEY:
			default:
				HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->key, keySize);
			}
			HW_DESC_SET_KEY_SIZE_AES(&desc, ReadContextWord(&pCtx->key_size));
		}

		if (ReadContextWord(&pAesPrivateCtx->engineCore) == SEP_AES_ENGINE2) {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES2);
		} else {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES);
		}
	} else {
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->key, ReadContextWord(&pCtx->key_size));
		HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_DES);
		HW_DESC_SET_KEY_SIZE_DES(&desc, ReadContextWord(&pCtx->key_size));
		HW_DESC_SET_CIPHER_CONFIG0(&desc, encDecFlag);
	}

	HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_KEY0);
	AddHWDescSequence(qid, &desc);

	if ((ReadContextWord(&pCtx->alg) == SEP_CRYPTO_ALG_AES) &&
	    (ReadContextWord(&pCtx->crypto_key_type) == DX_XOR_HDCP_KEY)) {
		/* in HDCP, the user key being XORed with SEP_HDCP_CONST registers on the fly.
		   We reusing the descriptor and overwrite the necessary bits so DO NOT
		   clear the descriptor before. */
		HW_DESC_SET_DIN_NO_DMA(&desc, NO_DMA, 0);
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pCtx->direction)); /* user direction */
		HW_DESC_SET_CIPHER_DO(&desc, ReadContextWord(&pCtx->crypto_key_type));
		AddHWDescSequence(qid, &desc);
	}

	if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_XTS) {
		HW_DESC_INIT(&desc);

		/* load XEX key */
		HW_DESC_SET_CIPHER_MODE(&desc, ReadContextWord(&pCtx->mode));
		HW_DESC_SET_CIPHER_CONFIG0(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp) ? ReadContextWord(&pAesPrivateCtx->tunnetDir) : encDecFlag);
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)pCtx->xex_key, keySize);
		HW_DESC_SET_XEX_DATA_UNIT_SIZE(&desc, ReadContextWord(&pCtx->data_unit_size));
		HW_DESC_SET_CIPHER_CONFIG1(&desc, ReadContextWord(&pAesPrivateCtx->isTunnelOp));
		if (ReadContextWord(&pAesPrivateCtx->engineCore) == SEP_AES_ENGINE2) {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES2);
		} else {
			HW_DESC_SET_FLOW_MODE(&desc, S_DIN_to_AES);
		}
		HW_DESC_SET_KEY_SIZE_AES(&desc, keySize);
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_XEX_KEY);
		AddHWDescSequence(qid, &desc);
	}

	if (ReadContextWord(&pCtx->mode) == SEP_CIPHER_XCBC_MAC) {

		/* load K2 key */
		/* NO init - reuse previous descriptor settings */
		HW_DESC_SET_STATE_DIN_PARAM(&desc,(uint32_t)XcbcKeys->K2, SEP_AES_128_BIT_KEY_SIZE);
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_STATE1);
		AddHWDescSequence(qid, &desc);

		/* load K3 key */
		/* NO init - reuse previous descriptor settings */
		HW_DESC_SET_STATE_DIN_PARAM(&desc, (uint32_t)XcbcKeys->K3, SEP_AES_128_BIT_KEY_SIZE);
		HW_DESC_SET_SETUP_MODE(&desc, SETUP_LOAD_STATE2);
		AddHWDescSequence(qid, &desc);
	}
}