@@ -880,11 +880,26 @@ static inline int build_auth_sg_fd(
struct qbman_fle *fle, *sge, *ip_fle, *op_fle;
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
+ int data_len, data_offset;
uint8_t *old_digest;
struct rte_mbuf *mbuf;
PMD_INIT_FUNC_TRACE();
+ data_len = sym_op->auth.data.length;
+ data_offset = sym_op->auth.data.offset;
+
+ if (sess->auth_alg == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
+ sess->auth_alg == RTE_CRYPTO_AUTH_ZUC_EIA3) {
+ if ((data_len & 7) || (data_offset & 7)) {
+ DPAA2_SEC_ERR("AUTH: len/offset must be full bytes");
+ return -1;
+ }
+
+ data_len = data_len >> 3;
+ data_offset = data_offset >> 3;
+ }
+
mbuf = sym_op->m_src;
fle = (struct qbman_fle *)rte_malloc(NULL, FLE_SG_MEM_SIZE,
RTE_CACHE_LINE_SIZE);
@@ -914,25 +929,51 @@ static inline int build_auth_sg_fd(
/* i/p fle */
DPAA2_SET_FLE_SG_EXT(ip_fle);
DPAA2_SET_FLE_ADDR(ip_fle, DPAA2_VADDR_TO_IOVA(sge));
- /* i/p 1st seg */
- DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
- DPAA2_SET_FLE_OFFSET(sge, sym_op->auth.data.offset + mbuf->data_off);
- sge->length = mbuf->data_len - sym_op->auth.data.offset;
+ ip_fle->length = data_len;
- /* i/p segs */
- mbuf = mbuf->next;
- while (mbuf) {
+ if (sess->iv.length) {
+ uint8_t *iv_ptr;
+
+ iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sess->iv.offset);
+
+ if (sess->auth_alg == RTE_CRYPTO_AUTH_SNOW3G_UIA2) {
+ iv_ptr = conv_to_snow_f9_iv(iv_ptr);
+ sge->length = 12;
+ } else if (sess->auth_alg == RTE_CRYPTO_AUTH_ZUC_EIA3) {
+ iv_ptr = conv_to_zuc_eia_iv(iv_ptr);
+ sge->length = 8;
+ } else {
+ sge->length = sess->iv.length;
+ }
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
+ ip_fle->length += sge->length;
sge++;
- DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
- DPAA2_SET_FLE_OFFSET(sge, mbuf->data_off);
- sge->length = mbuf->data_len;
- mbuf = mbuf->next;
}
- if (sess->dir == DIR_ENC) {
- /* Digest calculation case */
- sge->length -= sess->digest_length;
- ip_fle->length = sym_op->auth.data.length;
+ /* i/p 1st seg */
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
+ DPAA2_SET_FLE_OFFSET(sge, data_offset + mbuf->data_off);
+
+ if (data_len <= (mbuf->data_len - data_offset)) {
+ sge->length = data_len;
+ data_len = 0;
} else {
+ sge->length = mbuf->data_len - data_offset;
+
+ /* remaining i/p segs */
+ while ((data_len = data_len - sge->length) &&
+ (mbuf = mbuf->next)) {
+ sge++;
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
+ DPAA2_SET_FLE_OFFSET(sge, mbuf->data_off);
+ if (data_len > mbuf->data_len)
+ sge->length = mbuf->data_len;
+ else
+ sge->length = data_len;
+ }
+ }
+
+ if (sess->dir == DIR_DEC) {
/* Digest verification case */
sge++;
old_digest = (uint8_t *)(sge + 1);
@@ -940,8 +981,7 @@ static inline int build_auth_sg_fd(
sess->digest_length);
DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(old_digest));
sge->length = sess->digest_length;
- ip_fle->length = sym_op->auth.data.length +
- sess->digest_length;
+ ip_fle->length += sess->digest_length;
}
DPAA2_SET_FLE_FIN(sge);
DPAA2_SET_FLE_FIN(ip_fle);
@@ -958,11 +998,26 @@ build_auth_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
struct qbman_fle *fle, *sge;
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
+ int data_len, data_offset;
uint8_t *old_digest;
int retval;
PMD_INIT_FUNC_TRACE();
+ data_len = sym_op->auth.data.length;
+ data_offset = sym_op->auth.data.offset;
+
+ if (sess->auth_alg == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
+ sess->auth_alg == RTE_CRYPTO_AUTH_ZUC_EIA3) {
+ if ((data_len & 7) || (data_offset & 7)) {
+ DPAA2_SEC_ERR("AUTH: len/offset must be full bytes");
+ return -1;
+ }
+
+ data_len = data_len >> 3;
+ data_offset = data_offset >> 3;
+ }
+
retval = rte_mempool_get(priv->fle_pool, (void **)(&fle));
if (retval) {
DPAA2_SEC_ERR("AUTH Memory alloc failed for SGE");
@@ -978,64 +1033,72 @@ build_auth_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
DPAA2_SET_FLE_ADDR(fle, (size_t)op);
DPAA2_FLE_SAVE_CTXT(fle, (ptrdiff_t)priv);
fle = fle + 1;
+ sge = fle + 2;
if (likely(bpid < MAX_BPID)) {
DPAA2_SET_FD_BPID(fd, bpid);
DPAA2_SET_FLE_BPID(fle, bpid);
DPAA2_SET_FLE_BPID(fle + 1, bpid);
+ DPAA2_SET_FLE_BPID(sge, bpid);
+ DPAA2_SET_FLE_BPID(sge + 1, bpid);
} else {
DPAA2_SET_FD_IVP(fd);
DPAA2_SET_FLE_IVP(fle);
DPAA2_SET_FLE_IVP((fle + 1));
+ DPAA2_SET_FLE_IVP(sge);
+ DPAA2_SET_FLE_IVP((sge + 1));
}
+
flc = &priv->flc_desc[DESC_INITFINAL].flc;
DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc));
+ DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
+ DPAA2_SET_FD_COMPOUND_FMT(fd);
DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sym_op->auth.digest.data));
fle->length = sess->digest_length;
-
- DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
- DPAA2_SET_FD_COMPOUND_FMT(fd);
fle++;
- if (sess->dir == DIR_ENC) {
- DPAA2_SET_FLE_ADDR(fle,
- DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
- DPAA2_SET_FLE_OFFSET(fle, sym_op->auth.data.offset +
- sym_op->m_src->data_off);
- DPAA2_SET_FD_LEN(fd, sym_op->auth.data.length);
- fle->length = sym_op->auth.data.length;
- } else {
- sge = fle + 2;
- DPAA2_SET_FLE_SG_EXT(fle);
- DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
+ /* Setting input FLE */
+ DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
+ DPAA2_SET_FLE_SG_EXT(fle);
+ fle->length = data_len;
+
+ if (sess->iv.length) {
+ uint8_t *iv_ptr;
+
+ iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sess->iv.offset);
- if (likely(bpid < MAX_BPID)) {
- DPAA2_SET_FLE_BPID(sge, bpid);
- DPAA2_SET_FLE_BPID(sge + 1, bpid);
+ if (sess->auth_alg == RTE_CRYPTO_AUTH_SNOW3G_UIA2) {
+ iv_ptr = conv_to_snow_f9_iv(iv_ptr);
+ sge->length = 12;
} else {
- DPAA2_SET_FLE_IVP(sge);
- DPAA2_SET_FLE_IVP((sge + 1));
+ sge->length = sess->iv.length;
}
- DPAA2_SET_FLE_ADDR(sge,
- DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
- DPAA2_SET_FLE_OFFSET(sge, sym_op->auth.data.offset +
- sym_op->m_src->data_off);
- DPAA2_SET_FD_LEN(fd, sym_op->auth.data.length +
- sess->digest_length);
- sge->length = sym_op->auth.data.length;
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
+ fle->length = fle->length + sge->length;
+ sge++;
+ }
+
+ /* Setting data to authenticate */
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
+ DPAA2_SET_FLE_OFFSET(sge, data_offset + sym_op->m_src->data_off);
+ sge->length = data_len;
+
+ if (sess->dir == DIR_DEC) {
sge++;
old_digest = (uint8_t *)(sge + 1);
rte_memcpy(old_digest, sym_op->auth.digest.data,
sess->digest_length);
DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(old_digest));
sge->length = sess->digest_length;
- fle->length = sym_op->auth.data.length +
- sess->digest_length;
- DPAA2_SET_FLE_FIN(sge);
+ fle->length = fle->length + sess->digest_length;
}
+
+ DPAA2_SET_FLE_FIN(sge);
DPAA2_SET_FLE_FIN(fle);
+ DPAA2_SET_FD_LEN(fd, fle->length);
return 0;
}
@@ -1046,6 +1109,7 @@ build_cipher_sg_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
{
struct rte_crypto_sym_op *sym_op = op->sym;
struct qbman_fle *ip_fle, *op_fle, *sge, *fle;
+ int data_len, data_offset;
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
struct rte_mbuf *mbuf;
@@ -1054,6 +1118,20 @@ build_cipher_sg_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
PMD_INIT_FUNC_TRACE();
+ data_len = sym_op->cipher.data.length;
+ data_offset = sym_op->cipher.data.offset;
+
+ if (sess->cipher_alg == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
+ sess->cipher_alg == RTE_CRYPTO_CIPHER_ZUC_EEA3) {
+ if ((data_len & 7) || (data_offset & 7)) {
+ DPAA2_SEC_ERR("CIPHER: len/offset must be full bytes");
+ return -1;
+ }
+
+ data_len = data_len >> 3;
+ data_offset = data_offset >> 3;
+ }
+
if (sym_op->m_dst)
mbuf = sym_op->m_dst;
else
@@ -1079,20 +1157,20 @@ build_cipher_sg_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
DPAA2_SEC_DP_DEBUG(
"CIPHER SG: cipher_off: 0x%x/length %d, ivlen=%d"
" data_off: 0x%x\n",
- sym_op->cipher.data.offset,
- sym_op->cipher.data.length,
+ data_offset,
+ data_len,
sess->iv.length,
sym_op->m_src->data_off);
/* o/p fle */
DPAA2_SET_FLE_ADDR(op_fle, DPAA2_VADDR_TO_IOVA(sge));
- op_fle->length = sym_op->cipher.data.length;
+ op_fle->length = data_len;
DPAA2_SET_FLE_SG_EXT(op_fle);
/* o/p 1st seg */
DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
- DPAA2_SET_FLE_OFFSET(sge, sym_op->cipher.data.offset + mbuf->data_off);
- sge->length = mbuf->data_len - sym_op->cipher.data.offset;
+ DPAA2_SET_FLE_OFFSET(sge, data_offset + mbuf->data_off);
+ sge->length = mbuf->data_len - data_offset;
mbuf = mbuf->next;
/* o/p segs */
@@ -1114,7 +1192,7 @@ build_cipher_sg_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
mbuf = sym_op->m_src;
sge++;
DPAA2_SET_FLE_ADDR(ip_fle, DPAA2_VADDR_TO_IOVA(sge));
- ip_fle->length = sess->iv.length + sym_op->cipher.data.length;
+ ip_fle->length = sess->iv.length + data_len;
DPAA2_SET_FLE_SG_EXT(ip_fle);
/* i/p IV */
@@ -1126,9 +1204,8 @@ build_cipher_sg_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
/* i/p 1st seg */
DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(mbuf));
- DPAA2_SET_FLE_OFFSET(sge, sym_op->cipher.data.offset +
- mbuf->data_off);
- sge->length = mbuf->data_len - sym_op->cipher.data.offset;
+ DPAA2_SET_FLE_OFFSET(sge, data_offset + mbuf->data_off);
+ sge->length = mbuf->data_len - data_offset;
mbuf = mbuf->next;
/* i/p segs */
@@ -1165,7 +1242,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
{
struct rte_crypto_sym_op *sym_op = op->sym;
struct qbman_fle *fle, *sge;
- int retval;
+ int retval, data_len, data_offset;
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
@@ -1174,6 +1251,20 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
PMD_INIT_FUNC_TRACE();
+ data_len = sym_op->cipher.data.length;
+ data_offset = sym_op->cipher.data.offset;
+
+ if (sess->cipher_alg == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
+ sess->cipher_alg == RTE_CRYPTO_CIPHER_ZUC_EEA3) {
+ if ((data_len & 7) || (data_offset & 7)) {
+ DPAA2_SEC_ERR("CIPHER: len/offset must be full bytes");
+ return -1;
+ }
+
+ data_len = data_len >> 3;
+ data_offset = data_offset >> 3;
+ }
+
if (sym_op->m_dst)
dst = sym_op->m_dst;
else
@@ -1212,24 +1303,22 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
flc = &priv->flc_desc[0].flc;
DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
- DPAA2_SET_FD_LEN(fd, sym_op->cipher.data.length +
- sess->iv.length);
+ DPAA2_SET_FD_LEN(fd, data_len + sess->iv.length);
DPAA2_SET_FD_COMPOUND_FMT(fd);
DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc));
DPAA2_SEC_DP_DEBUG(
"CIPHER: cipher_off: 0x%x/length %d, ivlen=%d,"
" data_off: 0x%x\n",
- sym_op->cipher.data.offset,
- sym_op->cipher.data.length,
+ data_offset,
+ data_len,
sess->iv.length,
sym_op->m_src->data_off);
DPAA2_SET_FLE_ADDR(fle, DPAA2_MBUF_VADDR_TO_IOVA(dst));
- DPAA2_SET_FLE_OFFSET(fle, sym_op->cipher.data.offset +
- dst->data_off);
+ DPAA2_SET_FLE_OFFSET(fle, data_offset + dst->data_off);
- fle->length = sym_op->cipher.data.length + sess->iv.length;
+ fle->length = data_len + sess->iv.length;
DPAA2_SEC_DP_DEBUG(
"CIPHER: 1 - flc = %p, fle = %p FLEaddr = %x-%x, length %d\n",
@@ -1239,7 +1328,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
fle++;
DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
- fle->length = sym_op->cipher.data.length + sess->iv.length;
+ fle->length = data_len + sess->iv.length;
DPAA2_SET_FLE_SG_EXT(fle);
@@ -1248,10 +1337,9 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
sge++;
DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
- DPAA2_SET_FLE_OFFSET(sge, sym_op->cipher.data.offset +
- sym_op->m_src->data_off);
+ DPAA2_SET_FLE_OFFSET(sge, data_offset + sym_op->m_src->data_off);
- sge->length = sym_op->cipher.data.length;
+ sge->length = data_len;
DPAA2_SET_FLE_FIN(sge);
DPAA2_SET_FLE_FIN(fle);
@@ -1762,32 +1850,60 @@ dpaa2_sec_cipher_init(struct rte_cryptodev *dev,
/* Set IV parameters */
session->iv.offset = xform->cipher.iv.offset;
session->iv.length = xform->cipher.iv.length;
+ session->dir = (xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) ?
+ DIR_ENC : DIR_DEC;
switch (xform->cipher.algo) {
case RTE_CRYPTO_CIPHER_AES_CBC:
cipherdata.algtype = OP_ALG_ALGSEL_AES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CBC;
+ bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0,
+ SHR_NEVER, &cipherdata, NULL,
+ session->iv.length,
+ session->dir);
break;
case RTE_CRYPTO_CIPHER_3DES_CBC:
cipherdata.algtype = OP_ALG_ALGSEL_3DES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_3DES_CBC;
+ bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0,
+ SHR_NEVER, &cipherdata, NULL,
+ session->iv.length,
+ session->dir);
break;
case RTE_CRYPTO_CIPHER_AES_CTR:
cipherdata.algtype = OP_ALG_ALGSEL_AES;
cipherdata.algmode = OP_ALG_AAI_CTR;
session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CTR;
+ bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0,
+ SHR_NEVER, &cipherdata, NULL,
+ session->iv.length,
+ session->dir);
break;
case RTE_CRYPTO_CIPHER_3DES_CTR:
+ cipherdata.algtype = OP_ALG_ALGSEL_3DES;
+ cipherdata.algmode = OP_ALG_AAI_CTR;
+ session->cipher_alg = RTE_CRYPTO_CIPHER_3DES_CTR;
+ bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0,
+ SHR_NEVER, &cipherdata, NULL,
+ session->iv.length,
+ session->dir);
+ break;
+ case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
+ cipherdata.algtype = OP_ALG_ALGSEL_SNOW_F8;
+ session->cipher_alg = RTE_CRYPTO_CIPHER_SNOW3G_UEA2;
+ bufsize = cnstr_shdsc_snow_f8(priv->flc_desc[0].desc, 1, 0,
+ &cipherdata,
+ session->dir);
+ break;
+ case RTE_CRYPTO_CIPHER_KASUMI_F8:
+ case RTE_CRYPTO_CIPHER_ZUC_EEA3:
+ case RTE_CRYPTO_CIPHER_AES_F8:
case RTE_CRYPTO_CIPHER_AES_ECB:
case RTE_CRYPTO_CIPHER_3DES_ECB:
case RTE_CRYPTO_CIPHER_AES_XTS:
- case RTE_CRYPTO_CIPHER_AES_F8:
case RTE_CRYPTO_CIPHER_ARC4:
- case RTE_CRYPTO_CIPHER_KASUMI_F8:
- case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
- case RTE_CRYPTO_CIPHER_ZUC_EEA3:
case RTE_CRYPTO_CIPHER_NULL:
DPAA2_SEC_ERR("Crypto: Unsupported Cipher alg %u",
xform->cipher.algo);
@@ -1797,12 +1913,7 @@ dpaa2_sec_cipher_init(struct rte_cryptodev *dev,
xform->cipher.algo);
goto error_out;
}
- session->dir = (xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) ?
- DIR_ENC : DIR_DEC;
- bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0, SHR_NEVER,
- &cipherdata, NULL, session->iv.length,
- session->dir);
if (bufsize < 0) {
DPAA2_SEC_ERR("Crypto: Descriptor build failed");
goto error_out;
@@ -1865,40 +1976,77 @@ dpaa2_sec_auth_init(struct rte_cryptodev *dev,
authdata.key_type = RTA_DATA_IMM;
session->digest_length = xform->auth.digest_length;
+ session->dir = (xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE) ?
+ DIR_ENC : DIR_DEC;
switch (xform->auth.algo) {
case RTE_CRYPTO_AUTH_SHA1_HMAC:
authdata.algtype = OP_ALG_ALGSEL_SHA1;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
case RTE_CRYPTO_AUTH_MD5_HMAC:
authdata.algtype = OP_ALG_ALGSEL_MD5;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_MD5_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
case RTE_CRYPTO_AUTH_SHA256_HMAC:
authdata.algtype = OP_ALG_ALGSEL_SHA256;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
case RTE_CRYPTO_AUTH_SHA384_HMAC:
authdata.algtype = OP_ALG_ALGSEL_SHA384;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
case RTE_CRYPTO_AUTH_SHA512_HMAC:
authdata.algtype = OP_ALG_ALGSEL_SHA512;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
case RTE_CRYPTO_AUTH_SHA224_HMAC:
authdata.algtype = OP_ALG_ALGSEL_SHA224;
authdata.algmode = OP_ALG_AAI_HMAC;
session->auth_alg = RTE_CRYPTO_AUTH_SHA224_HMAC;
+ bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, SHR_NEVER, &authdata,
+ !session->dir,
+ session->digest_length);
break;
- case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
+ authdata.algtype = OP_ALG_ALGSEL_SNOW_F9;
+ authdata.algmode = OP_ALG_AAI_F9;
+ session->auth_alg = RTE_CRYPTO_AUTH_SNOW3G_UIA2;
+ session->iv.offset = xform->auth.iv.offset;
+ session->iv.length = xform->auth.iv.length;
+ bufsize = cnstr_shdsc_snow_f9(priv->flc_desc[DESC_INITFINAL].desc,
+ 1, 0, &authdata,
+ !session->dir,
+ session->digest_length);
+ break;
+ case RTE_CRYPTO_AUTH_KASUMI_F9:
+ case RTE_CRYPTO_AUTH_ZUC_EIA3:
case RTE_CRYPTO_AUTH_NULL:
case RTE_CRYPTO_AUTH_SHA1:
case RTE_CRYPTO_AUTH_SHA256:
@@ -1907,10 +2055,9 @@ dpaa2_sec_auth_init(struct rte_cryptodev *dev,
case RTE_CRYPTO_AUTH_SHA384:
case RTE_CRYPTO_AUTH_MD5:
case RTE_CRYPTO_AUTH_AES_GMAC:
- case RTE_CRYPTO_AUTH_KASUMI_F9:
+ case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
case RTE_CRYPTO_AUTH_AES_CMAC:
case RTE_CRYPTO_AUTH_AES_CBC_MAC:
- case RTE_CRYPTO_AUTH_ZUC_EIA3:
DPAA2_SEC_ERR("Crypto: Unsupported auth alg %un",
xform->auth.algo);
goto error_out;
@@ -1919,12 +2066,7 @@ dpaa2_sec_auth_init(struct rte_cryptodev *dev,
xform->auth.algo);
goto error_out;
}
- session->dir = (xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE) ?
- DIR_ENC : DIR_DEC;
- bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
- 1, 0, SHR_NEVER, &authdata, !session->dir,
- session->digest_length);
if (bufsize < 0) {
DPAA2_SEC_ERR("Crypto: Invalid buffer length");
goto error_out;
@@ -410,7 +410,51 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
}, }
}, }
},
-
+ { /* SNOW 3G (UIA2) */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+ {.auth = {
+ .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .digest_size = {
+ .min = 4,
+ .max = 4,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* SNOW 3G (UEA2) */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
@@ -57,6 +57,36 @@ cnstr_shdsc_snow_f8(uint32_t *descbuf, bool ps, bool swap,
return PROGRAM_FINALIZE(p);
}
+/**
+ * conv_to_snow_f9_iv - SNOW/f9 (UIA2) IV 16bit to 12 bit convert
+ * funtion for 3G.
+ * @iv: 16 bit original IV data
+ *
+ * Return: 12 bit IV data as understood by SEC HW
+ */
+
+static inline uint8_t *conv_to_snow_f9_iv(uint8_t *iv)
+{
+ uint8_t temp = (iv[8] == iv[0]) ? 0 : 4;
+
+ iv[12] = iv[4];
+ iv[13] = iv[5];
+ iv[14] = iv[6];
+ iv[15] = iv[7];
+
+ iv[8] = temp;
+ iv[9] = 0x00;
+ iv[10] = 0x00;
+ iv[11] = 0x00;
+
+ iv[4] = iv[0];
+ iv[5] = iv[1];
+ iv[6] = iv[2];
+ iv[7] = iv[3];
+
+ return (iv + 4);
+}
+
/**
* cnstr_shdsc_snow_f9 - SNOW/f9 (UIA2) as a shared descriptor
* @descbuf: pointer to descriptor-under-construction buffer