Exemplo n.º 1
0
/* Descriptor for RSA Private operation Form3 */
void *caam_rsa_priv_f3_desc(struct rsa_edesc *edesc)
{
	u32 *desc = edesc->hw_desc;
	struct rsa_priv_frm3_edesc_s *priv_edesc =
			&edesc->dma_u.rsa_priv_f3_edesc;
#ifdef CAAM_DEBUG
	u32 i;
#endif

	init_job_desc_pdb(desc, 0, sizeof(struct rsa_priv_frm3_desc_s) -
			  2 * CAAM_CMD_SZ);
	append_cmd(desc, priv_edesc->sg_flgs.n_len);
	append_ptr(desc, priv_edesc->g_dma);
	append_ptr(desc, priv_edesc->f_dma);
	append_ptr(desc, priv_edesc->c_dma);
	append_ptr(desc, priv_edesc->p_dma);
	append_ptr(desc, priv_edesc->q_dma);
	append_ptr(desc, priv_edesc->dp_dma);
	append_ptr(desc, priv_edesc->dq_dma);
	append_ptr(desc, priv_edesc->tmp1_dma);
	append_ptr(desc, priv_edesc->tmp2_dma);
	append_cmd(desc, (priv_edesc->q_len << RSA_PDB_Q_SHIFT) |
		   priv_edesc->p_len);
	append_operation(desc, OP_TYPE_UNI_PROTOCOL | OP_PCLID_RSADEC_PRVKEY |
			 RSA_PRIV_KEY_FRM_3);

#ifdef CAAM_DEBUG
	for (i = 0; i < desc_len(desc); i++)
		pr_debug("[%d] %x\n", i, desc[i]);
#endif
	return desc;
}
Exemplo n.º 2
0
/* Descriptor for RSA Public operation */
void *caam_rsa_pub_desc(struct rsa_edesc *edesc)
{
	struct rsa_pub_edesc_s *pub_edesc = &edesc->dma_u.rsa_pub_edesc;
	u32 *desc = edesc->hw_desc;
#ifdef CAAM_DEBUG
	u32 i;
#endif

	init_job_desc_pdb(desc, 0, sizeof(struct rsa_pub_desc_s) -
			  2 * CAAM_CMD_SZ);
	append_cmd(desc, (pub_edesc->sg_flgs.e_len << RSA_PDB_E_SHIFT) |
		   pub_edesc->sg_flgs.n_len);
	append_ptr(desc, pub_edesc->f_dma);
	append_ptr(desc, pub_edesc->g_dma);
	append_ptr(desc, pub_edesc->n_dma);
	append_ptr(desc, pub_edesc->e_dma);
	append_cmd(desc, pub_edesc->f_len);
	append_operation(desc, OP_TYPE_UNI_PROTOCOL | OP_PCLID_RSAENC_PUBKEY);

#ifdef CAAM_DEBUG
	for (i = 0; i < desc_len(desc); i++)
		pr_debug("[%d] %x\n", i, desc[i]);
#endif
	return desc;
}
char *close_cmd(char *cmd)
{

	cmd = append_cmd(cmd, "(run)");
	cmd = append_cmd(cmd, MARKER);
	cmd = append_cmd(cmd, "(facts)");
	cmd = append_cmd(cmd, "(exit)");
	return cmd;
}
char *build_initial_cmd()
{
	char *s = empty_cmd();

	s = append_cmd(s, "(clear)");
	s = append_cmd(s, "(load templates.clp)");
	s = append_cmd(s, "(load phutball.clp)");
	s = append_cmd(s, "(reset)");
	return s;
}
/* Descriptor for RSA Public operation */
void init_rsa_pub_desc(u32 *desc, struct rsa_pub_pdb *pdb)
{
	init_job_desc_pdb(desc, 0, sizeof(*pdb));
	append_cmd(desc, pdb->sgf);
	append_ptr(desc, pdb->f_dma);
	append_ptr(desc, pdb->g_dma);
	append_ptr(desc, pdb->n_dma);
	append_ptr(desc, pdb->e_dma);
	append_cmd(desc, pdb->f_len);
	append_operation(desc, OP_TYPE_UNI_PROTOCOL | OP_PCLID_RSAENC_PUBKEY);
}
Exemplo n.º 6
0
/* DSA verify CAAM descriptor */
void *caam_dsa_verify_desc(struct dsa_edesc_s *edesc)
{
	u32 *desc = edesc->hw_desc;
	u32 op = OP_TYPE_UNI_PROTOCOL | OP_PCLID_DSAVERIFY;
#ifdef CAAM_DEBUG
	u32 i;
#endif

	if (edesc->req_type == ECDSA_VERIFY) {
		op |= OP_PCL_PKPROT_ECC;
		if (edesc->curve_type == ECC_BINARY)
			op |= OP_PCL_PKPROT_F2M;

		init_job_desc_pdb(desc, 0, sizeof(struct ecdsa_verify_desc_s) -
				  2 * CAAM_CMD_SZ);
		append_cmd(desc, (edesc->l_len << DSA_PDB_L_SHIFT) |
				 (edesc->n_len & DSA_PDB_N_MASK));
		append_ptr(desc, edesc->q_dma);
		append_ptr(desc, edesc->r_dma);
		append_ptr(desc, edesc->g_dma);
		append_ptr(desc, edesc->key_dma);
		append_ptr(desc, edesc->f_dma);
		append_ptr(desc, edesc->c_dma);
		append_ptr(desc, edesc->d_dma);
		append_ptr(desc, edesc->tmp_dma);
		append_ptr(desc, edesc->ab_dma);
		append_operation(desc, op);
	} else {
		init_job_desc_pdb(desc, 0, sizeof(struct dsa_verify_desc_s) -
				  2 * CAAM_CMD_SZ);
		append_cmd(desc, (edesc->l_len << DSA_PDB_L_SHIFT) |
				 (edesc->n_len & DSA_PDB_N_MASK));
		append_ptr(desc, edesc->q_dma);
		append_ptr(desc, edesc->r_dma);
		append_ptr(desc, edesc->g_dma);
		append_ptr(desc, edesc->key_dma);
		append_ptr(desc, edesc->f_dma);
		append_ptr(desc, edesc->c_dma);
		append_ptr(desc, edesc->d_dma);
		append_ptr(desc, edesc->tmp_dma);
		append_operation(desc, op);
	}

#ifdef CAAM_DEBUG
	pr_debug("DSA Descriptor:\n");
	for (i = 0; i < desc_len(desc); i++)
		pr_debug("[%d] %x\n", i, desc[i]);
#endif
	return desc;
}
Exemplo n.º 7
0
/* DSA/ECDSA/DH/ECDH keygen CAAM descriptor */
void *caam_keygen_desc(struct dsa_edesc_s *edesc)
{
	u32 *desc = edesc->hw_desc;
	u32 sgf_len = (edesc->l_len << DSA_PDB_L_SHIFT) |
		      (edesc->n_len & DSA_PDB_N_MASK);
	u32 op = OP_TYPE_UNI_PROTOCOL | OP_PCLID_PUBLICKEYPAIR;
	dma_addr_t g_dma = edesc->g_dma;
#ifdef CAAM_DEBUG
	u32 i;
#endif

	if (edesc->req_type == ECC_KEYGEN) {
		if (edesc->erratum_A_006899) {
			sgf_len |= DSA_PDB_SGF_G;
			g_dma = edesc->g_sg_dma;
		}

		op |= OP_PCL_PKPROT_ECC;
		if (edesc->curve_type == ECC_BINARY)
			op |= OP_PCL_PKPROT_F2M;

		init_job_desc_pdb(desc, 0, sizeof(struct ecc_keygen_desc_s) -
				  2 * CAAM_CMD_SZ);
		append_cmd(desc, sgf_len);
		append_ptr(desc, edesc->q_dma);
		append_ptr(desc, edesc->r_dma);
		append_ptr(desc, g_dma);
		append_ptr(desc, edesc->s_dma);
		append_ptr(desc, edesc->key_dma);
		append_ptr(desc, edesc->ab_dma);
		append_operation(desc, op);
	} else {
		init_job_desc_pdb(desc, 0, sizeof(struct dlc_keygen_desc_s) -
				  2 * CAAM_CMD_SZ);
		append_cmd(desc, sgf_len);
		append_ptr(desc, edesc->q_dma);
		append_ptr(desc, edesc->r_dma);
		append_ptr(desc, g_dma);
		append_ptr(desc, edesc->s_dma);
		append_ptr(desc, edesc->key_dma);
		append_operation(desc, op);
	}

#ifdef CAAM_DEBUG
	pr_debug("DSA Keygen Descriptor:\n");
	for (i = 0; i < desc_len(desc); i++)
		pr_debug("[%d] %x ", i, desc[i]);
#endif
	return desc;
}
Exemplo n.º 8
0
/* DH sign CAAM descriptor */
void *caam_dh_key_desc(struct dh_edesc_s *edesc)
{
	u32 *desc = edesc->hw_desc;
	u32 op = OP_TYPE_UNI_PROTOCOL | OP_PCLID_DH;
#ifdef CAAM_DEBUG
	u32 i;
#endif

	init_job_desc_pdb(desc, 0, sizeof(struct dh_key_desc_s) -
			  2 * CAAM_CMD_SZ);
	append_cmd(desc, (edesc->l_len << DH_PDB_L_SHIFT) |
			 (edesc->n_len & DH_PDB_N_MASK));
	append_ptr(desc, edesc->q_dma);
	/* pointer to r (unused) */
	append_ptr(desc, 0);
	append_ptr(desc, edesc->w_dma);
	append_ptr(desc, edesc->s_dma);
	append_ptr(desc, edesc->z_dma);
	if (edesc->req_type == ECDH_COMPUTE_KEY) {
		append_ptr(desc, edesc->ab_dma);
		op |= OP_PCL_PKPROT_ECC;
		if (edesc->curve_type == ECC_BINARY)
			op |= OP_PCL_PKPROT_F2M;
	}
	append_operation(desc, op);

#ifdef CAAM_DEBUG
	pr_debug("DH Descriptor:\n");
	for (i = 0; i < desc_len(desc); i++)
		pr_debug("[%d] %x\n", i, desc[i]);
#endif
	return desc;
}
/**
 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
 *                             (non-protocol).
 * @desc: pointer to buffer used for descriptor construction
 * @cdata: pointer to block cipher transform definitions
 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
 * @icvsize: integrity check value (ICV) size (truncated or full)
 */
void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
			       unsigned int icvsize)
{
	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;

	init_sh_desc(desc, HDR_SHARE_SERIAL);

	/* Skip key loading if it is loaded due to sharing */
	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
				   JUMP_COND_SHRD);
	if (cdata->key_inline)
		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
	else
		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
			   KEY_DEST_CLASS_REG);
	set_jump_tgt_here(desc, key_jump_cmd);

	/* Class 1 operation */
	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT);

	/* assoclen + cryptlen = seqinlen */
	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);

	/*
	 * MOVE_LEN opcode is not available in all SEC HW revisions,
	 * thus need to do some magic, i.e. self-patch the descriptor
	 * buffer.
	 */
	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
				    (0x6 << MOVE_LEN_SHIFT));
	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
				     (0x8 << MOVE_LEN_SHIFT));

	/* Will read assoclen + cryptlen bytes */
	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);

	/* Will write assoclen + cryptlen bytes */
	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);

	/* Read and write assoclen + cryptlen bytes */
	aead_append_src_dst(desc, FIFOLD_TYPE_AAD);

	set_move_tgt_here(desc, read_move_cmd);
	set_move_tgt_here(desc, write_move_cmd);
	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
	/* Move payload data to OFIFO */
	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);

	/* Write ICV */
	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
			 LDST_SRCDST_BYTE_CONTEXT);

#ifdef DEBUG
	print_hex_dump(KERN_ERR,
		       "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
#endif
}
char *add_world(char *cmd, struct world *w)
{
	char *new_cmd = NULL;
	char *men = NULL;
	char *tmp;
	int i, j;

	asprintf(&men, "(men");
	for (i = 0; i < w->lx; i++)
		for (j = 0; j < w->ly; j++)
			if (w->world[i][j] == MAN) {
				tmp = strdup(men);
				free(men);
				asprintf(&men, "%s %d %d -", tmp, i, j);
				free(tmp);
			}
	tmp = strdup(men);
	free(men);
	asprintf(&men, "%s)", tmp);
	free(tmp);

	asprintf(&new_cmd, "(assert (world (id %s) (limit %d %d) (ball %d %d) %s))",
			w->id, w->lx, w->ly, w->bx, w->by, men);
	free(men);

	cmd = append_cmd(cmd, new_cmd);
	free(new_cmd);

	return cmd;
}
Exemplo n.º 11
0
void inline_cnstr_jobdesc_hash(uint32_t *desc,
			  const uint8_t *msg, uint32_t msgsz, uint8_t *digest,
			  u32 alg_type, uint32_t alg_size, int sg_tbl)
{
	/* SHA 256 , output is of length 32 words */
	uint32_t storelen = alg_size;
	u32 options;
	dma_addr_t dma_addr_in, dma_addr_out;

	dma_addr_in = virt_to_phys((void *)msg);
	dma_addr_out = virt_to_phys((void *)digest);

	init_job_desc(desc, 0);
	append_operation(desc, OP_TYPE_CLASS2_ALG |
			 OP_ALG_AAI_HASH | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT | OP_ALG_ICV_OFF | alg_type);

	options = LDST_CLASS_2_CCB | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST2;
	if (sg_tbl)
		options |= FIFOLDST_SGF;
	if (msgsz > 0xffff) {
		options |= FIFOLDST_EXT;
		append_fifo_load(desc, dma_addr_in, 0, options);
		append_cmd(desc, msgsz);
	} else {
		append_fifo_load(desc, dma_addr_in, msgsz, options);
	}

	append_store(desc, dma_addr_out, storelen,
		     LDST_CLASS_2_CCB | LDST_SRCDST_BYTE_CONTEXT);
}
/* Descriptor for RSA Private operation - Private Key Form #1 */
void init_rsa_priv_f1_desc(u32 *desc, struct rsa_priv_f1_pdb *pdb)
{
	init_job_desc_pdb(desc, 0, sizeof(*pdb));
	append_cmd(desc, pdb->sgf);
	append_ptr(desc, pdb->g_dma);
	append_ptr(desc, pdb->f_dma);
	append_ptr(desc, pdb->n_dma);
	append_ptr(desc, pdb->d_dma);
	append_operation(desc, OP_TYPE_UNI_PROTOCOL | OP_PCLID_RSADEC_PRVKEY |
			 RSA_PRIV_KEY_FRM_1);
}
Exemplo n.º 13
0
/* Descriptor for RSA Private operation Form2 */
void *caam_rsa_priv_f2_desc(struct rsa_edesc *edesc)
{
	u32 *desc = edesc->hw_desc;
	struct rsa_priv_frm2_edesc_s *priv_edesc =
			&edesc->dma_u.rsa_priv_f2_edesc;

	init_job_desc_pdb(desc, 0, sizeof(struct rsa_priv_frm2_desc_s) -
			  2 * CAAM_CMD_SZ);
	append_cmd(desc, (priv_edesc->sg_flgs.d_len << RSA_PDB_D_SHIFT) |
			 priv_edesc->sg_flgs.n_len);
	append_ptr(desc, priv_edesc->g_dma);
	append_ptr(desc, priv_edesc->f_dma);
	append_ptr(desc, priv_edesc->d_dma);
	append_ptr(desc, priv_edesc->p_dma);
	append_ptr(desc, priv_edesc->q_dma);
	append_ptr(desc, priv_edesc->tmp1_dma);
	append_ptr(desc, priv_edesc->tmp2_dma);
	append_cmd(desc, (priv_edesc->q_len << RSA_PDB_Q_SHIFT) |
			 priv_edesc->p_len);
	append_operation(desc, OP_TYPE_UNI_PROTOCOL | OP_PCLID_RSADEC_PRVKEY |
			 RSA_PRIV_KEY_FRM_2);
	return desc;
}
/**
 * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
 *                                   with HW-generated initialization vector.
 * @desc: pointer to buffer used for descriptor construction
 * @cdata: pointer to block cipher transform definitions
 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
 *         with OP_ALG_AAI_CBC.
 * @ivsize: initialization vector size
 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
 * @ctx1_iv_off: IV offset in CONTEXT1 register
 */
void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
				     unsigned int ivsize, const bool is_rfc3686,
				     const u32 ctx1_iv_off)
{
	u32 *key_jump_cmd, geniv;

	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
	/* Skip if already shared */
	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
				   JUMP_COND_SHRD);

	/* Load class1 key only */
	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);

	/* Load Nonce into CONTEXT1 reg */
	if (is_rfc3686) {
		u8 *nonce = cdata->key_virt + cdata->keylen;

		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
				   LDST_CLASS_IND_CCB |
				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
	}
	set_jump_tgt_here(desc, key_jump_cmd);

	/* Generate IV */
	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
		(ivsize << NFIFOENTRY_DLEN_SHIFT);
	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
	append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
		    MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
		    (ctx1_iv_off << MOVE_OFFSET_SHIFT));
	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);

	/* Copy generated IV to memory */
	append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
			 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));

	/* Load Counter into CONTEXT1 reg */
	if (is_rfc3686)
		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
				     LDST_SRCDST_BYTE_CONTEXT |
				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
				      LDST_OFFSET_SHIFT));

	if (ctx1_iv_off)
		append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
			    (1 << JUMP_OFFSET_SHIFT));

	/* Load operation */
	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT);

	/* Perform operation */
	ablkcipher_append_src_dst(desc);

#ifdef DEBUG
	print_hex_dump(KERN_ERR,
		       "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
#endif
}
/**
 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
 *                               (non-protocol) with no (null) encryption.
 * @desc: pointer to buffer used for descriptor construction
 * @adata: pointer to authentication transform definitions. Note that since a
 *         split key is to be used, the size of the split key itself is
 *         specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
 *         SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
 * @icvsize: integrity check value (ICV) size (truncated or full)
 *
 * Note: Requires an MDHA split key.
 */
void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
				 unsigned int icvsize)
{
	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;

	init_sh_desc(desc, HDR_SHARE_SERIAL);

	/* Skip if already shared */
	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
				   JUMP_COND_SHRD);
	if (adata->key_inline)
		append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
				  adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
				  KEY_ENC);
	else
		append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
			   KEY_DEST_MDHA_SPLIT | KEY_ENC);
	set_jump_tgt_here(desc, key_jump_cmd);

	/* assoclen + cryptlen = seqinlen */
	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);

	/* Prepare to read and write cryptlen + assoclen bytes */
	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);

	/*
	 * MOVE_LEN opcode is not available in all SEC HW revisions,
	 * thus need to do some magic, i.e. self-patch the descriptor
	 * buffer.
	 */
	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
				    MOVE_DEST_MATH3 |
				    (0x6 << MOVE_LEN_SHIFT));
	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
				     MOVE_DEST_DESCBUF |
				     MOVE_WAITCOMP |
				     (0x8 << MOVE_LEN_SHIFT));

	/* Class 2 operation */
	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT);

	/* Read and write cryptlen bytes */
	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);

	set_move_tgt_here(desc, read_move_cmd);
	set_move_tgt_here(desc, write_move_cmd);
	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
		    MOVE_AUX_LS);

	/* Write ICV */
	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
			 LDST_SRCDST_BYTE_CONTEXT);

#ifdef DEBUG
	print_hex_dump(KERN_ERR,
		       "aead null enc shdesc@" __stringify(__LINE__)": ",
		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
#endif
}
Exemplo n.º 16
0
int inline_cnstr_jobdesc_blob_dek(uint32_t *desc, const uint8_t *plain_txt,
				       uint8_t *dek_blob, uint32_t in_sz)
{
	ccsr_sec_t *sec = (void *)CONFIG_SYS_FSL_SEC_ADDR;
	uint32_t sm_vid = SM_VERSION(sec_in32(&sec->smvid));
	uint32_t jr_id = 0;

	uint32_t ret = 0;
	u32 aad_w1, aad_w2;
	/* output blob will have 32 bytes key blob in beginning and
	 * 16 byte HMAC identifier at end of data blob */
	uint32_t out_sz = in_sz + KEY_BLOB_SIZE + MAC_SIZE;
	/* Setting HDR for blob */
	uint8_t wrapped_key_hdr[8] = {HDR_TAG, 0x00, WRP_HDR_SIZE + out_sz,
			     HDR_PAR, HAB_MOD, HAB_ALG, in_sz, HAB_FLG};

	/* initialize the blob array */
	memset(dek_blob, 0, out_sz + 8);
	/* Copy the header into the DEK blob buffer */
	memcpy(dek_blob, wrapped_key_hdr, sizeof(wrapped_key_hdr));

	/* allocating secure memory */
	ret = caam_page_alloc(PAGE_1, PARTITION_1);
	if (ret)
		return ret;

	/* Write DEK to secure memory */
	memcpy((uint32_t *)SEC_MEM_PAGE1, (uint32_t *)plain_txt, in_sz);

	unsigned long start = (unsigned long)SEC_MEM_PAGE1 &
				~(ARCH_DMA_MINALIGN - 1);
	unsigned long end = ALIGN(start + 0x1000, ARCH_DMA_MINALIGN);
	flush_dcache_range(start, end);

	/* Now configure the access rights of the partition */
	sec_out32(CAAM_SMAG1JR(sm_vid, jr_id, PARTITION_1), KS_G1);
	sec_out32(CAAM_SMAG2JR(sm_vid, jr_id, PARTITION_1), 0);
	sec_out32(CAAM_SMAPJR(sm_vid, jr_id, PARTITION_1), PERM);

	/* construct aad for AES */
	aad_w1 = (in_sz << OP_ALG_ALGSEL_SHIFT) | KEY_AES_SRC | LD_CCM_MODE;
	aad_w2 = 0x0;

	init_job_desc(desc, 0);

	append_cmd(desc, CMD_LOAD | CLASS_2 | KEY_IMM | KEY_ENC |
				(0x0c << LDST_OFFSET_SHIFT) | 0x08);

	append_u32(desc, aad_w1);

	append_u32(desc, aad_w2);

	append_cmd_ptr(desc, (dma_addr_t)SEC_MEM_PAGE1, in_sz, CMD_SEQ_IN_PTR);

	append_cmd_ptr(desc, (dma_addr_t)dek_blob + 8, out_sz, CMD_SEQ_OUT_PTR);

	append_operation(desc, OP_TYPE_ENCAP_PROTOCOL | OP_PCLID_BLOB |
						OP_PCLID_SECMEM);

	return ret;
}
/**
 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
 *                             (non-protocol) with HW-generated initialization
 *                             vector.
 * @desc: pointer to buffer used for descriptor construction
 * @cdata: pointer to block cipher transform definitions
 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
 * @adata: pointer to authentication transform definitions. Note that since a
 *         split key is to be used, the size of the split key itself is
 *         specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
 *         SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
 * @ivsize: initialization vector size
 * @icvsize: integrity check value (ICV) size (truncated or full)
 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
 * @nonce: pointer to rfc3686 nonce
 * @ctx1_iv_off: IV offset in CONTEXT1 register
 * @is_qi: true when called from caam/qi
 *
 * Note: Requires an MDHA split key.
 */
void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
			       struct alginfo *adata, unsigned int ivsize,
			       unsigned int icvsize, const bool is_rfc3686,
			       u32 *nonce, const u32 ctx1_iv_off,
			       const bool is_qi)
{
	u32 geniv, moveiv;

	/* Note: Context registers are saved. */
	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);

	if (is_qi) {
		u32 *wait_load_cmd;

		/* REG3 = assoclen */
		append_seq_load(desc, 4, LDST_CLASS_DECO |
				LDST_SRCDST_WORD_DECO_MATH3 |
				(4 << LDST_OFFSET_SHIFT));

		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
					    JUMP_COND_CALM | JUMP_COND_NCP |
					    JUMP_COND_NOP | JUMP_COND_NIP |
					    JUMP_COND_NIFP);
		set_jump_tgt_here(desc, wait_load_cmd);
	}

	if (is_rfc3686) {
		if (is_qi)
			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
					LDST_SRCDST_BYTE_CONTEXT |
					(ctx1_iv_off << LDST_OFFSET_SHIFT));

		goto copy_iv;
	}

	/* Generate IV */
	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
		NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
	append_move(desc, MOVE_WAITCOMP |
		    MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
		    (ivsize << MOVE_LEN_SHIFT));
	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);

copy_iv:
	/* Copy IV to class 1 context */
	append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
		    (ivsize << MOVE_LEN_SHIFT));

	/* Return to encryption */
	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT);

	/* Read and write assoclen bytes */
	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);

	/* Skip assoc data */
	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);

	/* read assoc before reading payload */
	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
			     KEY_VLF);

	/* Copy iv from outfifo to class 2 fifo */
	moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
		 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
	append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
	append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
			    LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);

	/* Load Counter into CONTEXT1 reg */
	if (is_rfc3686)
		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
				     LDST_SRCDST_BYTE_CONTEXT |
				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
				      LDST_OFFSET_SHIFT));

	/* Class 1 operation */
	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
			 OP_ALG_ENCRYPT);

	/* Will write ivsize + cryptlen */
	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);

	/* Not need to reload iv */
	append_seq_fifo_load(desc, ivsize,
			     FIFOLD_CLASS_SKIP);

	/* Will read cryptlen */
	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
			     FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);

	/* Write ICV */
	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
			 LDST_SRCDST_BYTE_CONTEXT);

#ifdef DEBUG
	print_hex_dump(KERN_ERR,
		       "aead givenc shdesc@" __stringify(__LINE__)": ",
		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
#endif
}