2 * This file is part of the Chelsio T6 Crypto driver for Linux.
4 * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
6 * This software is available to you under a choice of one of two
7 * licenses. You may choose to be licensed under the terms of the GNU
8 * General Public License (GPL) Version 2, available from the file
9 * COPYING in the main directory of this source tree, or the
10 * OpenIB.org BSD license below:
12 * Redistribution and use in source and binary forms, with or
13 * without modification, are permitted provided that the following
16 * - Redistributions of source code must retain the above
17 * copyright notice, this list of conditions and the following
20 * - Redistributions in binary form must reproduce the above
21 * copyright notice, this list of conditions and the following
22 * disclaimer in the documentation and/or other materials
23 * provided with the distribution.
25 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
34 * Written and Maintained by:
35 * Manoj Malviya (manojmalviya@chelsio.com)
36 * Atul Gupta (atul.gupta@chelsio.com)
37 * Jitendra Lulla (jlulla@chelsio.com)
38 * Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39 * Harsh Jain (harsh@chelsio.com)
42 #define pr_fmt(fmt) "chcr:" fmt
44 #include <linux/kernel.h>
45 #include <linux/module.h>
46 #include <linux/crypto.h>
47 #include <linux/cryptohash.h>
48 #include <linux/skbuff.h>
49 #include <linux/rtnetlink.h>
50 #include <linux/highmem.h>
51 #include <linux/scatterlist.h>
53 #include <crypto/aes.h>
54 #include <crypto/algapi.h>
55 #include <crypto/hash.h>
56 #include <crypto/sha.h>
57 #include <crypto/authenc.h>
58 #include <crypto/internal/aead.h>
59 #include <crypto/null.h>
60 #include <crypto/internal/skcipher.h>
61 #include <crypto/aead.h>
62 #include <crypto/scatterwalk.h>
63 #include <crypto/internal/hash.h>
67 #include "chcr_core.h"
68 #include "chcr_algo.h"
69 #include "chcr_crypto.h"
71 static inline struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
73 return ctx->crypto_ctx->aeadctx;
76 static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
78 return ctx->crypto_ctx->ablkctx;
81 static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
83 return ctx->crypto_ctx->hmacctx;
86 static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
88 return gctx->ctx->gcm;
91 static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
93 return gctx->ctx->authenc;
96 static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
98 return ctx->dev->u_ctx;
101 static inline int is_ofld_imm(const struct sk_buff *skb)
103 return (skb->len <= CRYPTO_MAX_IMM_TX_PKT_LEN);
107 * sgl_len - calculates the size of an SGL of the given capacity
108 * @n: the number of SGL entries
109 * Calculates the number of flits needed for a scatter/gather list that
110 * can hold the given number of entries.
112 static inline unsigned int sgl_len(unsigned int n)
115 return (3 * n) / 2 + (n & 1) + 2;
118 static void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
120 u8 temp[SHA512_DIGEST_SIZE];
121 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
122 int authsize = crypto_aead_authsize(tfm);
123 struct cpl_fw6_pld *fw6_pld;
126 fw6_pld = (struct cpl_fw6_pld *)input;
127 if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
128 (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
129 cmp = memcmp(&fw6_pld->data[2], (fw6_pld + 1), authsize);
132 sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
133 authsize, req->assoclen +
134 req->cryptlen - authsize);
135 cmp = memcmp(temp, (fw6_pld + 1), authsize);
144 * chcr_handle_resp - Unmap the DMA buffers associated with the request
145 * @req: crypto request
147 int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
150 struct crypto_tfm *tfm = req->tfm;
151 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
152 struct uld_ctx *u_ctx = ULD_CTX(ctx);
153 struct chcr_req_ctx ctx_req;
154 struct cpl_fw6_pld *fw6_pld;
155 unsigned int digestsize, updated_digestsize;
157 switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
158 case CRYPTO_ALG_TYPE_AEAD:
159 ctx_req.req.aead_req = (struct aead_request *)req;
160 ctx_req.ctx.reqctx = aead_request_ctx(ctx_req.req.aead_req);
161 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.ctx.reqctx->dst,
162 ctx_req.ctx.reqctx->dst_nents, DMA_FROM_DEVICE);
163 if (ctx_req.ctx.reqctx->skb) {
164 kfree_skb(ctx_req.ctx.reqctx->skb);
165 ctx_req.ctx.reqctx->skb = NULL;
167 if (ctx_req.ctx.reqctx->verify == VERIFY_SW) {
168 chcr_verify_tag(ctx_req.req.aead_req, input,
170 ctx_req.ctx.reqctx->verify = VERIFY_HW;
174 case CRYPTO_ALG_TYPE_ABLKCIPHER:
175 ctx_req.req.ablk_req = (struct ablkcipher_request *)req;
176 ctx_req.ctx.ablk_ctx =
177 ablkcipher_request_ctx(ctx_req.req.ablk_req);
179 fw6_pld = (struct cpl_fw6_pld *)input;
180 memcpy(ctx_req.req.ablk_req->info, &fw6_pld->data[2],
183 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.req.ablk_req->dst,
184 ctx_req.ctx.ablk_ctx->dst_nents, DMA_FROM_DEVICE);
185 if (ctx_req.ctx.ablk_ctx->skb) {
186 kfree_skb(ctx_req.ctx.ablk_ctx->skb);
187 ctx_req.ctx.ablk_ctx->skb = NULL;
191 case CRYPTO_ALG_TYPE_AHASH:
192 ctx_req.req.ahash_req = (struct ahash_request *)req;
193 ctx_req.ctx.ahash_ctx =
194 ahash_request_ctx(ctx_req.req.ahash_req);
196 crypto_ahash_digestsize(crypto_ahash_reqtfm(
197 ctx_req.req.ahash_req));
198 updated_digestsize = digestsize;
199 if (digestsize == SHA224_DIGEST_SIZE)
200 updated_digestsize = SHA256_DIGEST_SIZE;
201 else if (digestsize == SHA384_DIGEST_SIZE)
202 updated_digestsize = SHA512_DIGEST_SIZE;
203 if (ctx_req.ctx.ahash_ctx->skb) {
204 kfree_skb(ctx_req.ctx.ahash_ctx->skb);
205 ctx_req.ctx.ahash_ctx->skb = NULL;
207 if (ctx_req.ctx.ahash_ctx->result == 1) {
208 ctx_req.ctx.ahash_ctx->result = 0;
209 memcpy(ctx_req.req.ahash_req->result, input +
210 sizeof(struct cpl_fw6_pld),
213 memcpy(ctx_req.ctx.ahash_ctx->partial_hash, input +
214 sizeof(struct cpl_fw6_pld),
223 * calc_tx_flits_ofld - calculate # of flits for an offload packet
225 * Returns the number of flits needed for the given offload packet.
226 * These packets are already fully constructed and no additional headers
229 static inline unsigned int calc_tx_flits_ofld(const struct sk_buff *skb)
231 unsigned int flits, cnt;
233 if (is_ofld_imm(skb))
234 return DIV_ROUND_UP(skb->len, 8);
236 flits = skb_transport_offset(skb) / 8; /* headers */
237 cnt = skb_shinfo(skb)->nr_frags;
238 if (skb_tail_pointer(skb) != skb_transport_header(skb))
240 return flits + sgl_len(cnt);
243 static inline void get_aes_decrypt_key(unsigned char *dec_key,
244 const unsigned char *key,
245 unsigned int keylength)
253 case AES_KEYLENGTH_128BIT:
254 nk = KEYLENGTH_4BYTES;
255 nr = NUMBER_OF_ROUNDS_10;
257 case AES_KEYLENGTH_192BIT:
258 nk = KEYLENGTH_6BYTES;
259 nr = NUMBER_OF_ROUNDS_12;
261 case AES_KEYLENGTH_256BIT:
262 nk = KEYLENGTH_8BYTES;
263 nr = NUMBER_OF_ROUNDS_14;
268 for (i = 0; i < nk; i++)
269 w_ring[i] = be32_to_cpu(*(u32 *)&key[4 * i]);
272 temp = w_ring[nk - 1];
273 while (i + nk < (nr + 1) * 4) {
276 temp = (temp << 8) | (temp >> 24);
277 temp = aes_ks_subword(temp);
278 temp ^= round_constant[i / nk];
279 } else if (nk == 8 && (i % 4 == 0)) {
280 temp = aes_ks_subword(temp);
282 w_ring[i % nk] ^= temp;
283 temp = w_ring[i % nk];
287 for (k = 0, j = i % nk; k < nk; k++) {
288 *((u32 *)dec_key + k) = htonl(w_ring[j]);
295 static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
297 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
300 case SHA1_DIGEST_SIZE:
301 base_hash = crypto_alloc_shash("sha1", 0, 0);
303 case SHA224_DIGEST_SIZE:
304 base_hash = crypto_alloc_shash("sha224", 0, 0);
306 case SHA256_DIGEST_SIZE:
307 base_hash = crypto_alloc_shash("sha256", 0, 0);
309 case SHA384_DIGEST_SIZE:
310 base_hash = crypto_alloc_shash("sha384", 0, 0);
312 case SHA512_DIGEST_SIZE:
313 base_hash = crypto_alloc_shash("sha512", 0, 0);
320 static int chcr_compute_partial_hash(struct shash_desc *desc,
321 char *iopad, char *result_hash,
324 struct sha1_state sha1_st;
325 struct sha256_state sha256_st;
326 struct sha512_state sha512_st;
329 if (digest_size == SHA1_DIGEST_SIZE) {
330 error = crypto_shash_init(desc) ?:
331 crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
332 crypto_shash_export(desc, (void *)&sha1_st);
333 memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
334 } else if (digest_size == SHA224_DIGEST_SIZE) {
335 error = crypto_shash_init(desc) ?:
336 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
337 crypto_shash_export(desc, (void *)&sha256_st);
338 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
340 } else if (digest_size == SHA256_DIGEST_SIZE) {
341 error = crypto_shash_init(desc) ?:
342 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
343 crypto_shash_export(desc, (void *)&sha256_st);
344 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
346 } else if (digest_size == SHA384_DIGEST_SIZE) {
347 error = crypto_shash_init(desc) ?:
348 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
349 crypto_shash_export(desc, (void *)&sha512_st);
350 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
352 } else if (digest_size == SHA512_DIGEST_SIZE) {
353 error = crypto_shash_init(desc) ?:
354 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
355 crypto_shash_export(desc, (void *)&sha512_st);
356 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
359 pr_err("Unknown digest size %d\n", digest_size);
364 static void chcr_change_order(char *buf, int ds)
368 if (ds == SHA512_DIGEST_SIZE) {
369 for (i = 0; i < (ds / sizeof(u64)); i++)
370 *((__be64 *)buf + i) =
371 cpu_to_be64(*((u64 *)buf + i));
373 for (i = 0; i < (ds / sizeof(u32)); i++)
374 *((__be32 *)buf + i) =
375 cpu_to_be32(*((u32 *)buf + i));
379 static inline int is_hmac(struct crypto_tfm *tfm)
381 struct crypto_alg *alg = tfm->__crt_alg;
382 struct chcr_alg_template *chcr_crypto_alg =
383 container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
385 if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
390 static void write_phys_cpl(struct cpl_rx_phys_dsgl *phys_cpl,
391 struct scatterlist *sg,
392 struct phys_sge_parm *sg_param)
394 struct phys_sge_pairs *to;
395 int out_buf_size = sg_param->obsize;
396 unsigned int nents = sg_param->nents, i, j = 0;
398 phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
399 | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
400 phys_cpl->pcirlxorder_to_noofsgentr =
401 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
402 CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
403 CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
404 CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
405 CPL_RX_PHYS_DSGL_DCAID_V(0) |
406 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(nents));
407 phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
408 phys_cpl->rss_hdr_int.qid = htons(sg_param->qid);
409 phys_cpl->rss_hdr_int.hash_val = 0;
410 to = (struct phys_sge_pairs *)((unsigned char *)phys_cpl +
411 sizeof(struct cpl_rx_phys_dsgl));
413 for (i = 0; nents; to++) {
414 for (j = 0; j < 8 && nents; j++, nents--) {
415 out_buf_size -= sg_dma_len(sg);
416 to->len[j] = htons(sg_dma_len(sg));
417 to->addr[j] = cpu_to_be64(sg_dma_address(sg));
424 to->len[j] = htons(ntohs(to->len[j]) + (out_buf_size));
428 static inline int map_writesg_phys_cpl(struct device *dev,
429 struct cpl_rx_phys_dsgl *phys_cpl,
430 struct scatterlist *sg,
431 struct phys_sge_parm *sg_param)
433 if (!sg || !sg_param->nents)
436 sg_param->nents = dma_map_sg(dev, sg, sg_param->nents, DMA_FROM_DEVICE);
437 if (sg_param->nents == 0) {
438 pr_err("CHCR : DMA mapping failed\n");
441 write_phys_cpl(phys_cpl, sg, sg_param);
445 static inline int get_aead_subtype(struct crypto_aead *aead)
447 struct aead_alg *alg = crypto_aead_alg(aead);
448 struct chcr_alg_template *chcr_crypto_alg =
449 container_of(alg, struct chcr_alg_template, alg.aead);
450 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
453 static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm)
455 struct crypto_alg *alg = tfm->__crt_alg;
456 struct chcr_alg_template *chcr_crypto_alg =
457 container_of(alg, struct chcr_alg_template, alg.crypto);
459 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
462 static inline void write_buffer_to_skb(struct sk_buff *skb,
468 skb->data_len += bfr_len;
469 skb->truesize += bfr_len;
470 get_page(virt_to_page(bfr));
471 skb_fill_page_desc(skb, *frags, virt_to_page(bfr),
472 offset_in_page(bfr), bfr_len);
478 write_sg_to_skb(struct sk_buff *skb, unsigned int *frags,
479 struct scatterlist *sg, unsigned int count)
482 unsigned int page_len;
485 skb->data_len += count;
486 skb->truesize += count;
489 if (!sg || (!(sg->length)))
493 page_len = min(sg->length, count);
494 skb_fill_page_desc(skb, *frags, spage, sg->offset, page_len);
501 static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
502 struct _key_ctx *key_ctx)
504 if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
505 memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
508 ablkctx->key + (ablkctx->enckey_len >> 1),
509 ablkctx->enckey_len >> 1);
510 memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
511 ablkctx->rrkey, ablkctx->enckey_len >> 1);
516 static inline void create_wreq(struct chcr_context *ctx,
517 struct chcr_wr *chcr_req,
518 void *req, struct sk_buff *skb,
519 int kctx_len, int hash_sz,
523 struct uld_ctx *u_ctx = ULD_CTX(ctx);
524 int iv_loc = IV_DSGL;
525 int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
526 unsigned int immdatalen = 0, nr_frags = 0;
528 if (is_ofld_imm(skb)) {
529 immdatalen = skb->data_len;
530 iv_loc = IV_IMMEDIATE;
532 nr_frags = skb_shinfo(skb)->nr_frags;
535 chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE(immdatalen,
536 ((sizeof(chcr_req->key_ctx) + kctx_len) >> 4));
537 chcr_req->wreq.pld_size_hash_size =
538 htonl(FW_CRYPTO_LOOKASIDE_WR_PLD_SIZE_V(sgl_lengths[nr_frags]) |
539 FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
540 chcr_req->wreq.len16_pkd =
541 htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(
542 (calc_tx_flits_ofld(skb) * 8), 16)));
543 chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
544 chcr_req->wreq.rx_chid_to_rx_q_id =
545 FILL_WR_RX_Q_ID(ctx->dev->rx_channel_id, qid,
546 is_iv ? iv_loc : IV_NOP, ctx->tx_qidx);
548 chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->dev->tx_channel_id,
550 chcr_req->ulptx.len = htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb) * 8),
551 16) - ((sizeof(chcr_req->wreq)) >> 4)));
553 chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(immdatalen);
554 chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
555 sizeof(chcr_req->key_ctx) +
556 kctx_len + sc_len + immdatalen);
560 * create_cipher_wr - form the WR for cipher operations
562 * @ctx: crypto driver context of the request.
563 * @qid: ingress qid where response of this WR should be received.
564 * @op_type: encryption or decryption
566 static struct sk_buff
567 *create_cipher_wr(struct ablkcipher_request *req,
569 unsigned short op_type)
571 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
572 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
573 struct uld_ctx *u_ctx = ULD_CTX(ctx);
574 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
575 struct sk_buff *skb = NULL;
576 struct chcr_wr *chcr_req;
577 struct cpl_rx_phys_dsgl *phys_cpl;
578 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
579 struct phys_sge_parm sg_param;
580 unsigned int frags = 0, transhdr_len, phys_dsgl;
581 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm), kctx_len;
582 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586 return ERR_PTR(-EINVAL);
587 reqctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes);
588 if (reqctx->dst_nents <= 0) {
589 pr_err("AES:Invalid Destination sg lists\n");
590 return ERR_PTR(-EINVAL);
592 if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
593 (req->nbytes <= 0) || (req->nbytes % AES_BLOCK_SIZE)) {
594 pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
595 ablkctx->enckey_len, req->nbytes, ivsize);
596 return ERR_PTR(-EINVAL);
599 phys_dsgl = get_space_for_phys_dsgl(reqctx->dst_nents);
601 kctx_len = (DIV_ROUND_UP(ablkctx->enckey_len, 16) * 16);
602 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
603 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
605 return ERR_PTR(-ENOMEM);
606 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
607 chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
608 memset(chcr_req, 0, transhdr_len);
609 chcr_req->sec_cpl.op_ivinsrtofst =
610 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2, 1);
612 chcr_req->sec_cpl.pldlen = htonl(ivsize + req->nbytes);
613 chcr_req->sec_cpl.aadstart_cipherstop_hi =
614 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, ivsize + 1, 0);
616 chcr_req->sec_cpl.cipherstop_lo_authinsert =
617 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
618 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type, 0,
621 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
624 chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
625 if (op_type == CHCR_DECRYPT_OP) {
626 generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
628 if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
629 memcpy(chcr_req->key_ctx.key, ablkctx->key,
630 ablkctx->enckey_len);
632 memcpy(chcr_req->key_ctx.key, ablkctx->key +
633 (ablkctx->enckey_len >> 1),
634 ablkctx->enckey_len >> 1);
635 memcpy(chcr_req->key_ctx.key +
636 (ablkctx->enckey_len >> 1),
638 ablkctx->enckey_len >> 1);
641 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
642 sg_param.nents = reqctx->dst_nents;
643 sg_param.obsize = req->nbytes;
646 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, req->dst,
650 skb_set_transport_header(skb, transhdr_len);
651 memcpy(reqctx->iv, req->info, ivsize);
652 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
653 write_sg_to_skb(skb, &frags, req->src, req->nbytes);
654 create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
655 sizeof(struct cpl_rx_phys_dsgl) + phys_dsgl);
661 return ERR_PTR(-ENOMEM);
664 static int chcr_aes_cbc_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
667 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
668 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
669 unsigned int ck_size, context_size;
672 if (keylen == AES_KEYSIZE_128) {
673 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
674 } else if (keylen == AES_KEYSIZE_192) {
676 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
677 } else if (keylen == AES_KEYSIZE_256) {
678 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
682 memcpy(ablkctx->key, key, keylen);
683 ablkctx->enckey_len = keylen;
684 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
685 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
686 keylen + alignment) >> 4;
688 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
690 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
693 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
694 ablkctx->enckey_len = 0;
698 static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
700 struct adapter *adap = netdev2adap(dev);
701 struct sge_uld_txq_info *txq_info =
702 adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
703 struct sge_uld_txq *txq;
707 txq = &txq_info->uldtxq[idx];
708 spin_lock(&txq->sendq.lock);
711 spin_unlock(&txq->sendq.lock);
716 static int chcr_aes_encrypt(struct ablkcipher_request *req)
718 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
719 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
720 struct uld_ctx *u_ctx = ULD_CTX(ctx);
723 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
725 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
729 skb = create_cipher_wr(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx],
732 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
735 skb->dev = u_ctx->lldi.ports[0];
736 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
741 static int chcr_aes_decrypt(struct ablkcipher_request *req)
743 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
744 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
745 struct uld_ctx *u_ctx = ULD_CTX(ctx);
748 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
750 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
754 skb = create_cipher_wr(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx],
757 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
760 skb->dev = u_ctx->lldi.ports[0];
761 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
766 static int chcr_device_init(struct chcr_context *ctx)
768 struct uld_ctx *u_ctx;
769 struct adapter *adap;
771 int txq_perchan, txq_idx, ntxq;
772 int err = 0, rxq_perchan, rxq_idx;
774 id = smp_processor_id();
776 err = assign_chcr_device(&ctx->dev);
778 pr_err("chcr device assignment fails\n");
781 u_ctx = ULD_CTX(ctx);
782 adap = padap(ctx->dev);
783 ntxq = min_not_zero((unsigned int)u_ctx->lldi.nrxq,
784 adap->vres.ncrypto_fc);
785 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
786 txq_perchan = ntxq / u_ctx->lldi.nchan;
787 rxq_idx = ctx->dev->tx_channel_id * rxq_perchan;
788 rxq_idx += id % rxq_perchan;
789 txq_idx = ctx->dev->tx_channel_id * txq_perchan;
790 txq_idx += id % txq_perchan;
791 spin_lock(&ctx->dev->lock_chcr_dev);
792 ctx->rx_qidx = rxq_idx;
793 ctx->tx_qidx = txq_idx;
794 ctx->dev->tx_channel_id = !ctx->dev->tx_channel_id;
795 ctx->dev->rx_channel_id = 0;
796 spin_unlock(&ctx->dev->lock_chcr_dev);
802 static int chcr_cra_init(struct crypto_tfm *tfm)
804 tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
805 return chcr_device_init(crypto_tfm_ctx(tfm));
808 static int get_alg_config(struct algo_param *params,
809 unsigned int auth_size)
812 case SHA1_DIGEST_SIZE:
813 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
814 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
815 params->result_size = SHA1_DIGEST_SIZE;
817 case SHA224_DIGEST_SIZE:
818 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
819 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
820 params->result_size = SHA256_DIGEST_SIZE;
822 case SHA256_DIGEST_SIZE:
823 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
824 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
825 params->result_size = SHA256_DIGEST_SIZE;
827 case SHA384_DIGEST_SIZE:
828 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
829 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
830 params->result_size = SHA512_DIGEST_SIZE;
832 case SHA512_DIGEST_SIZE:
833 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
834 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
835 params->result_size = SHA512_DIGEST_SIZE;
838 pr_err("chcr : ERROR, unsupported digest size\n");
844 static inline void chcr_free_shash(struct crypto_shash *base_hash)
846 crypto_free_shash(base_hash);
850 * create_hash_wr - Create hash work request
851 * @req - Cipher req base
853 static struct sk_buff *create_hash_wr(struct ahash_request *req,
854 struct hash_wr_param *param)
856 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
857 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
858 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
859 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
860 struct sk_buff *skb = NULL;
861 struct chcr_wr *chcr_req;
862 unsigned int frags = 0, transhdr_len, iopad_alignment = 0;
863 unsigned int digestsize = crypto_ahash_digestsize(tfm);
864 unsigned int kctx_len = 0;
865 u8 hash_size_in_response = 0;
866 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
869 iopad_alignment = KEYCTX_ALIGN_PAD(digestsize);
870 kctx_len = param->alg_prm.result_size + iopad_alignment;
871 if (param->opad_needed)
872 kctx_len += param->alg_prm.result_size + iopad_alignment;
875 hash_size_in_response = digestsize;
877 hash_size_in_response = param->alg_prm.result_size;
878 transhdr_len = HASH_TRANSHDR_SIZE(kctx_len);
879 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
883 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
884 chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
885 memset(chcr_req, 0, transhdr_len);
887 chcr_req->sec_cpl.op_ivinsrtofst =
888 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2, 0);
889 chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
891 chcr_req->sec_cpl.aadstart_cipherstop_hi =
892 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
893 chcr_req->sec_cpl.cipherstop_lo_authinsert =
894 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
895 chcr_req->sec_cpl.seqno_numivs =
896 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
897 param->opad_needed, 0);
899 chcr_req->sec_cpl.ivgen_hdrlen =
900 FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
902 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
903 param->alg_prm.result_size);
905 if (param->opad_needed)
906 memcpy(chcr_req->key_ctx.key +
907 ((param->alg_prm.result_size <= 32) ? 32 :
908 CHCR_HASH_MAX_DIGEST_SIZE),
909 hmacctx->opad, param->alg_prm.result_size);
911 chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
912 param->alg_prm.mk_size, 0,
915 sizeof(chcr_req->key_ctx)) >> 4));
916 chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
918 skb_set_transport_header(skb, transhdr_len);
919 if (param->bfr_len != 0)
920 write_buffer_to_skb(skb, &frags, req_ctx->reqbfr,
922 if (param->sg_len != 0)
923 write_sg_to_skb(skb, &frags, req->src, param->sg_len);
925 create_wreq(ctx, chcr_req, req, skb, kctx_len, hash_size_in_response, 0,
932 static int chcr_ahash_update(struct ahash_request *req)
934 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
935 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
936 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
937 struct uld_ctx *u_ctx = NULL;
939 u8 remainder = 0, bs;
940 unsigned int nbytes = req->nbytes;
941 struct hash_wr_param params;
943 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
945 u_ctx = ULD_CTX(ctx);
946 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
948 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
952 if (nbytes + req_ctx->reqlen >= bs) {
953 remainder = (nbytes + req_ctx->reqlen) % bs;
954 nbytes = nbytes + req_ctx->reqlen - remainder;
956 sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
957 + req_ctx->reqlen, nbytes, 0);
958 req_ctx->reqlen += nbytes;
962 params.opad_needed = 0;
965 params.sg_len = nbytes - req_ctx->reqlen;
966 params.bfr_len = req_ctx->reqlen;
968 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
970 req_ctx->data_len += params.sg_len + params.bfr_len;
971 skb = create_hash_wr(req, ¶ms);
978 temp = req_ctx->reqbfr;
979 req_ctx->reqbfr = req_ctx->skbfr;
980 req_ctx->skbfr = temp;
981 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
982 req_ctx->reqbfr, remainder, req->nbytes -
985 req_ctx->reqlen = remainder;
986 skb->dev = u_ctx->lldi.ports[0];
987 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
993 static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
995 memset(bfr_ptr, 0, bs);
998 *(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1 << 3);
1000 *(__be64 *)(bfr_ptr + 120) = cpu_to_be64(scmd1 << 3);
1003 static int chcr_ahash_final(struct ahash_request *req)
1005 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1006 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1007 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1008 struct hash_wr_param params;
1009 struct sk_buff *skb;
1010 struct uld_ctx *u_ctx = NULL;
1011 u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1013 u_ctx = ULD_CTX(ctx);
1014 if (is_hmac(crypto_ahash_tfm(rtfm)))
1015 params.opad_needed = 1;
1017 params.opad_needed = 0;
1019 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1020 req_ctx->result = 1;
1021 params.bfr_len = req_ctx->reqlen;
1022 req_ctx->data_len += params.bfr_len + params.sg_len;
1023 if (req_ctx->reqlen == 0) {
1024 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1028 params.bfr_len = bs;
1031 params.scmd1 = req_ctx->data_len;
1035 skb = create_hash_wr(req, ¶ms);
1039 skb->dev = u_ctx->lldi.ports[0];
1040 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1042 return -EINPROGRESS;
1045 static int chcr_ahash_finup(struct ahash_request *req)
1047 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1048 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1049 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1050 struct uld_ctx *u_ctx = NULL;
1051 struct sk_buff *skb;
1052 struct hash_wr_param params;
1055 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1056 u_ctx = ULD_CTX(ctx);
1058 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1060 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1064 if (is_hmac(crypto_ahash_tfm(rtfm)))
1065 params.opad_needed = 1;
1067 params.opad_needed = 0;
1069 params.sg_len = req->nbytes;
1070 params.bfr_len = req_ctx->reqlen;
1071 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1072 req_ctx->data_len += params.bfr_len + params.sg_len;
1073 req_ctx->result = 1;
1074 if ((req_ctx->reqlen + req->nbytes) == 0) {
1075 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1079 params.bfr_len = bs;
1081 params.scmd1 = req_ctx->data_len;
1086 skb = create_hash_wr(req, ¶ms);
1090 skb->dev = u_ctx->lldi.ports[0];
1091 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1094 return -EINPROGRESS;
1097 static int chcr_ahash_digest(struct ahash_request *req)
1099 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1100 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1101 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1102 struct uld_ctx *u_ctx = NULL;
1103 struct sk_buff *skb;
1104 struct hash_wr_param params;
1108 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1110 u_ctx = ULD_CTX(ctx);
1111 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1113 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1117 if (is_hmac(crypto_ahash_tfm(rtfm)))
1118 params.opad_needed = 1;
1120 params.opad_needed = 0;
1124 params.sg_len = req->nbytes;
1127 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1128 req_ctx->result = 1;
1129 req_ctx->data_len += params.bfr_len + params.sg_len;
1131 if (req->nbytes == 0) {
1132 create_last_hash_block(req_ctx->reqbfr, bs, 0);
1134 params.bfr_len = bs;
1137 skb = create_hash_wr(req, ¶ms);
1141 skb->dev = u_ctx->lldi.ports[0];
1142 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1144 return -EINPROGRESS;
1147 static int chcr_ahash_export(struct ahash_request *areq, void *out)
1149 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1150 struct chcr_ahash_req_ctx *state = out;
1152 state->reqlen = req_ctx->reqlen;
1153 state->data_len = req_ctx->data_len;
1154 memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
1155 memcpy(state->partial_hash, req_ctx->partial_hash,
1156 CHCR_HASH_MAX_DIGEST_SIZE);
1160 static int chcr_ahash_import(struct ahash_request *areq, const void *in)
1162 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1163 struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
1165 req_ctx->reqlen = state->reqlen;
1166 req_ctx->data_len = state->data_len;
1167 req_ctx->reqbfr = req_ctx->bfr1;
1168 req_ctx->skbfr = req_ctx->bfr2;
1169 memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
1170 memcpy(req_ctx->partial_hash, state->partial_hash,
1171 CHCR_HASH_MAX_DIGEST_SIZE);
1175 static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
1176 unsigned int keylen)
1178 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1179 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1180 unsigned int digestsize = crypto_ahash_digestsize(tfm);
1181 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1182 unsigned int i, err = 0, updated_digestsize;
1184 SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
1186 /* use the key to calculate the ipad and opad. ipad will sent with the
1187 * first request's data. opad will be sent with the final hash result
1188 * ipad in hmacctx->ipad and opad in hmacctx->opad location
1190 shash->tfm = hmacctx->base_hash;
1191 shash->flags = crypto_shash_get_flags(hmacctx->base_hash);
1193 err = crypto_shash_digest(shash, key, keylen,
1197 keylen = digestsize;
1199 memcpy(hmacctx->ipad, key, keylen);
1201 memset(hmacctx->ipad + keylen, 0, bs - keylen);
1202 memcpy(hmacctx->opad, hmacctx->ipad, bs);
1204 for (i = 0; i < bs / sizeof(int); i++) {
1205 *((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
1206 *((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
1209 updated_digestsize = digestsize;
1210 if (digestsize == SHA224_DIGEST_SIZE)
1211 updated_digestsize = SHA256_DIGEST_SIZE;
1212 else if (digestsize == SHA384_DIGEST_SIZE)
1213 updated_digestsize = SHA512_DIGEST_SIZE;
1214 err = chcr_compute_partial_hash(shash, hmacctx->ipad,
1215 hmacctx->ipad, digestsize);
1218 chcr_change_order(hmacctx->ipad, updated_digestsize);
1220 err = chcr_compute_partial_hash(shash, hmacctx->opad,
1221 hmacctx->opad, digestsize);
1224 chcr_change_order(hmacctx->opad, updated_digestsize);
1229 static int chcr_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
1230 unsigned int key_len)
1232 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
1233 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1234 unsigned short context_size = 0;
1236 if ((key_len != (AES_KEYSIZE_128 << 1)) &&
1237 (key_len != (AES_KEYSIZE_256 << 1))) {
1238 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
1239 CRYPTO_TFM_RES_BAD_KEY_LEN);
1240 ablkctx->enckey_len = 0;
1245 memcpy(ablkctx->key, key, key_len);
1246 ablkctx->enckey_len = key_len;
1247 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
1248 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
1249 ablkctx->key_ctx_hdr =
1250 FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
1251 CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
1252 CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
1253 CHCR_KEYCTX_NO_KEY, 1,
1255 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
1259 static int chcr_sha_init(struct ahash_request *areq)
1261 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1262 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1263 int digestsize = crypto_ahash_digestsize(tfm);
1265 req_ctx->data_len = 0;
1266 req_ctx->reqlen = 0;
1267 req_ctx->reqbfr = req_ctx->bfr1;
1268 req_ctx->skbfr = req_ctx->bfr2;
1269 req_ctx->skb = NULL;
1270 req_ctx->result = 0;
1271 copy_hash_init_values(req_ctx->partial_hash, digestsize);
1275 static int chcr_sha_cra_init(struct crypto_tfm *tfm)
1277 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1278 sizeof(struct chcr_ahash_req_ctx));
1279 return chcr_device_init(crypto_tfm_ctx(tfm));
1282 static int chcr_hmac_init(struct ahash_request *areq)
1284 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1285 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
1286 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1287 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1288 unsigned int digestsize = crypto_ahash_digestsize(rtfm);
1289 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1291 chcr_sha_init(areq);
1292 req_ctx->data_len = bs;
1293 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1294 if (digestsize == SHA224_DIGEST_SIZE)
1295 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1296 SHA256_DIGEST_SIZE);
1297 else if (digestsize == SHA384_DIGEST_SIZE)
1298 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1299 SHA512_DIGEST_SIZE);
1301 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1307 static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
1309 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1310 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1311 unsigned int digestsize =
1312 crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
1314 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1315 sizeof(struct chcr_ahash_req_ctx));
1316 hmacctx->base_hash = chcr_alloc_shash(digestsize);
1317 if (IS_ERR(hmacctx->base_hash))
1318 return PTR_ERR(hmacctx->base_hash);
1319 return chcr_device_init(crypto_tfm_ctx(tfm));
1322 static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
1324 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1325 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1327 if (hmacctx->base_hash) {
1328 chcr_free_shash(hmacctx->base_hash);
1329 hmacctx->base_hash = NULL;
1333 static int chcr_copy_assoc(struct aead_request *req,
1334 struct chcr_aead_ctx *ctx)
1336 SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
1338 skcipher_request_set_tfm(skreq, ctx->null);
1339 skcipher_request_set_callback(skreq, aead_request_flags(req),
1341 skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
1344 return crypto_skcipher_encrypt(skreq);
1346 static int chcr_aead_need_fallback(struct aead_request *req, int src_nent,
1347 int aadmax, int wrlen,
1348 unsigned short op_type)
1350 unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
1352 if (((req->cryptlen - (op_type ? authsize : 0)) == 0) ||
1353 (req->assoclen > aadmax) ||
1354 (src_nent > MAX_SKB_FRAGS) ||
1355 (wrlen > MAX_WR_SIZE))
1360 static int chcr_aead_fallback(struct aead_request *req, unsigned short op_type)
1362 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1363 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1364 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1365 struct aead_request *subreq = aead_request_ctx(req);
1367 aead_request_set_tfm(subreq, aeadctx->sw_cipher);
1368 aead_request_set_callback(subreq, req->base.flags,
1369 req->base.complete, req->base.data);
1370 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
1372 aead_request_set_ad(subreq, req->assoclen);
1373 return op_type ? crypto_aead_decrypt(subreq) :
1374 crypto_aead_encrypt(subreq);
1377 static struct sk_buff *create_authenc_wr(struct aead_request *req,
1380 unsigned short op_type)
1382 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1383 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1384 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1385 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1386 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
1387 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1388 struct sk_buff *skb = NULL;
1389 struct chcr_wr *chcr_req;
1390 struct cpl_rx_phys_dsgl *phys_cpl;
1391 struct phys_sge_parm sg_param;
1392 struct scatterlist *src;
1393 unsigned int frags = 0, transhdr_len;
1394 unsigned int ivsize = crypto_aead_ivsize(tfm), dst_size = 0;
1395 unsigned int kctx_len = 0;
1396 unsigned short stop_offset = 0;
1397 unsigned int assoclen = req->assoclen;
1398 unsigned int authsize = crypto_aead_authsize(tfm);
1399 int err = -EINVAL, src_nent;
1401 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1404 if (aeadctx->enckey_len == 0 || (req->cryptlen == 0))
1407 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1409 src_nent = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
1412 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, req->assoclen);
1415 if (req->src != req->dst) {
1416 err = chcr_copy_assoc(req, aeadctx);
1418 return ERR_PTR(err);
1419 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd, req->dst,
1422 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_NULL) {
1426 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
1427 (op_type ? -authsize : authsize));
1428 if (reqctx->dst_nents < 0) {
1429 pr_err("AUTHENC:Invalid Destination sg entries\n");
1432 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1433 kctx_len = (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx->key_ctx_hdr)) << 4)
1434 - sizeof(chcr_req->key_ctx);
1435 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1436 if (chcr_aead_need_fallback(req, src_nent + MIN_AUTH_SG,
1438 transhdr_len + (sgl_len(src_nent + MIN_AUTH_SG) * 8),
1440 return ERR_PTR(chcr_aead_fallback(req, op_type));
1442 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1446 /* LLD is going to write the sge hdr. */
1447 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1450 chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1451 memset(chcr_req, 0, transhdr_len);
1453 stop_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1456 * Input order is AAD,IV and Payload. where IV should be included as
1457 * the part of authdata. All other fields should be filled according
1458 * to the hardware spec
1460 chcr_req->sec_cpl.op_ivinsrtofst =
1461 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2,
1462 (ivsize ? (assoclen + 1) : 0));
1463 chcr_req->sec_cpl.pldlen = htonl(assoclen + ivsize + req->cryptlen);
1464 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1465 assoclen ? 1 : 0, assoclen,
1466 assoclen + ivsize + 1,
1467 (stop_offset & 0x1F0) >> 4);
1468 chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
1470 null ? 0 : assoclen + ivsize + 1,
1471 stop_offset, stop_offset);
1472 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1473 (op_type == CHCR_ENCRYPT_OP) ? 1 : 0,
1474 CHCR_SCMD_CIPHER_MODE_AES_CBC,
1475 actx->auth_mode, aeadctx->hmac_ctrl,
1477 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1480 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1481 if (op_type == CHCR_ENCRYPT_OP)
1482 memcpy(chcr_req->key_ctx.key, aeadctx->key,
1483 aeadctx->enckey_len);
1485 memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
1486 aeadctx->enckey_len);
1488 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) <<
1489 4), actx->h_iopad, kctx_len -
1490 (DIV_ROUND_UP(aeadctx->enckey_len, 16) << 4));
1492 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1493 sg_param.nents = reqctx->dst_nents;
1494 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1497 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, reqctx->dst,
1501 skb_set_transport_header(skb, transhdr_len);
1505 write_sg_to_skb(skb, &frags, req->src, assoclen);
1508 write_buffer_to_skb(skb, &frags, req->iv, ivsize);
1509 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1510 create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1511 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1520 return ERR_PTR(-EINVAL);
1523 static int set_msg_len(u8 *block, unsigned int msglen, int csize)
1527 memset(block, 0, csize);
1532 else if (msglen > (unsigned int)(1 << (8 * csize)))
1535 data = cpu_to_be32(msglen);
1536 memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
1541 static void generate_b0(struct aead_request *req,
1542 struct chcr_aead_ctx *aeadctx,
1543 unsigned short op_type)
1545 unsigned int l, lp, m;
1547 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1548 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1549 u8 *b0 = reqctx->scratch_pad;
1551 m = crypto_aead_authsize(aead);
1553 memcpy(b0, reqctx->iv, 16);
1558 /* set m, bits 3-5 */
1559 *b0 |= (8 * ((m - 2) / 2));
1561 /* set adata, bit 6, if associated data is used */
1564 rc = set_msg_len(b0 + 16 - l,
1565 (op_type == CHCR_DECRYPT_OP) ?
1566 req->cryptlen - m : req->cryptlen, l);
1569 static inline int crypto_ccm_check_iv(const u8 *iv)
1571 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1572 if (iv[0] < 1 || iv[0] > 7)
1578 static int ccm_format_packet(struct aead_request *req,
1579 struct chcr_aead_ctx *aeadctx,
1580 unsigned int sub_type,
1581 unsigned short op_type)
1583 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1586 if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1588 memcpy(reqctx->iv + 1, &aeadctx->salt[0], 3);
1589 memcpy(reqctx->iv + 4, req->iv, 8);
1590 memset(reqctx->iv + 12, 0, 4);
1591 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1592 htons(req->assoclen - 8);
1594 memcpy(reqctx->iv, req->iv, 16);
1595 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1596 htons(req->assoclen);
1598 generate_b0(req, aeadctx, op_type);
1599 /* zero the ctr value */
1600 memset(reqctx->iv + 15 - reqctx->iv[0], 0, reqctx->iv[0] + 1);
1604 static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
1605 unsigned int dst_size,
1606 struct aead_request *req,
1607 unsigned short op_type,
1608 struct chcr_context *chcrctx)
1610 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1611 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
1612 unsigned int ivsize = AES_BLOCK_SIZE;
1613 unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
1614 unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
1615 unsigned int c_id = chcrctx->dev->rx_channel_id;
1616 unsigned int ccm_xtra;
1617 unsigned char tag_offset = 0, auth_offset = 0;
1618 unsigned int assoclen;
1620 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1621 assoclen = req->assoclen - 8;
1623 assoclen = req->assoclen;
1624 ccm_xtra = CCM_B0_SIZE +
1625 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
1627 auth_offset = req->cryptlen ?
1628 (assoclen + ivsize + 1 + ccm_xtra) : 0;
1629 if (op_type == CHCR_DECRYPT_OP) {
1630 if (crypto_aead_authsize(tfm) != req->cryptlen)
1631 tag_offset = crypto_aead_authsize(tfm);
1637 sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(c_id,
1638 2, (ivsize ? (assoclen + 1) : 0) +
1641 htonl(assoclen + ivsize + req->cryptlen + ccm_xtra);
1642 /* For CCM there wil be b0 always. So AAD start will be 1 always */
1643 sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1644 1, assoclen + ccm_xtra, assoclen
1645 + ivsize + 1 + ccm_xtra, 0);
1647 sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
1648 auth_offset, tag_offset,
1649 (op_type == CHCR_ENCRYPT_OP) ? 0 :
1650 crypto_aead_authsize(tfm));
1651 sec_cpl->seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1652 (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
1653 cipher_mode, mac_mode,
1654 aeadctx->hmac_ctrl, ivsize >> 1);
1656 sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
1660 int aead_ccm_validate_input(unsigned short op_type,
1661 struct aead_request *req,
1662 struct chcr_aead_ctx *aeadctx,
1663 unsigned int sub_type)
1665 if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1666 if (crypto_ccm_check_iv(req->iv)) {
1667 pr_err("CCM: IV check fails\n");
1671 if (req->assoclen != 16 && req->assoclen != 20) {
1672 pr_err("RFC4309: Invalid AAD length %d\n",
1677 if (aeadctx->enckey_len == 0) {
1678 pr_err("CCM: Encryption key not set\n");
1684 unsigned int fill_aead_req_fields(struct sk_buff *skb,
1685 struct aead_request *req,
1686 struct scatterlist *src,
1687 unsigned int ivsize,
1688 struct chcr_aead_ctx *aeadctx)
1690 unsigned int frags = 0;
1691 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1692 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1693 /* b0 and aad length(if available) */
1695 write_buffer_to_skb(skb, &frags, reqctx->scratch_pad, CCM_B0_SIZE +
1696 (req->assoclen ? CCM_AAD_FIELD_SIZE : 0));
1697 if (req->assoclen) {
1698 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1699 write_sg_to_skb(skb, &frags, req->src,
1702 write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1704 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1706 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1711 static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
1714 unsigned short op_type)
1716 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1717 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1718 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1719 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1720 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1721 struct sk_buff *skb = NULL;
1722 struct chcr_wr *chcr_req;
1723 struct cpl_rx_phys_dsgl *phys_cpl;
1724 struct phys_sge_parm sg_param;
1725 struct scatterlist *src;
1726 unsigned int frags = 0, transhdr_len, ivsize = AES_BLOCK_SIZE;
1727 unsigned int dst_size = 0, kctx_len;
1728 unsigned int sub_type;
1729 unsigned int authsize = crypto_aead_authsize(tfm);
1730 int err = -EINVAL, src_nent;
1731 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1735 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1737 src_nent = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
1741 sub_type = get_aead_subtype(tfm);
1742 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, req->assoclen);
1745 if (req->src != req->dst) {
1746 err = chcr_copy_assoc(req, aeadctx);
1748 pr_err("AAD copy to destination buffer fails\n");
1749 return ERR_PTR(err);
1751 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd, req->dst,
1754 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
1755 (op_type ? -authsize : authsize));
1756 if (reqctx->dst_nents < 0) {
1757 pr_err("CCM:Invalid Destination sg entries\n");
1762 if (aead_ccm_validate_input(op_type, req, aeadctx, sub_type))
1765 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1766 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) * 2;
1767 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1768 if (chcr_aead_need_fallback(req, src_nent + MIN_CCM_SG,
1769 T6_MAX_AAD_SIZE - 18,
1770 transhdr_len + (sgl_len(src_nent + MIN_CCM_SG) * 8),
1772 return ERR_PTR(chcr_aead_fallback(req, op_type));
1775 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1780 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1782 chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1783 memset(chcr_req, 0, transhdr_len);
1785 fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, op_type, ctx);
1787 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1788 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1789 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1790 16), aeadctx->key, aeadctx->enckey_len);
1792 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1793 if (ccm_format_packet(req, aeadctx, sub_type, op_type))
1796 sg_param.nents = reqctx->dst_nents;
1797 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1800 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, reqctx->dst,
1804 skb_set_transport_header(skb, transhdr_len);
1805 frags = fill_aead_req_fields(skb, req, src, ivsize, aeadctx);
1806 create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
1807 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1815 return ERR_PTR(-EINVAL);
1818 static struct sk_buff *create_gcm_wr(struct aead_request *req,
1821 unsigned short op_type)
1823 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1824 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1825 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1826 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1827 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1828 struct sk_buff *skb = NULL;
1829 struct chcr_wr *chcr_req;
1830 struct cpl_rx_phys_dsgl *phys_cpl;
1831 struct phys_sge_parm sg_param;
1832 struct scatterlist *src;
1833 unsigned int frags = 0, transhdr_len;
1834 unsigned int ivsize = AES_BLOCK_SIZE;
1835 unsigned int dst_size = 0, kctx_len;
1836 unsigned char tag_offset = 0;
1837 unsigned int crypt_len = 0;
1838 unsigned int authsize = crypto_aead_authsize(tfm);
1839 int err = -EINVAL, src_nent;
1840 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1843 /* validate key size */
1844 if (aeadctx->enckey_len == 0)
1847 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1849 src_nent = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
1853 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, req->assoclen);
1855 if (req->src != req->dst) {
1856 err = chcr_copy_assoc(req, aeadctx);
1858 return ERR_PTR(err);
1859 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd, req->dst,
1864 /* null-payload is not supported in the hardware.
1865 * software is sending block size
1867 crypt_len = AES_BLOCK_SIZE;
1869 crypt_len = req->cryptlen;
1870 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
1871 (op_type ? -authsize : authsize));
1872 if (reqctx->dst_nents < 0) {
1873 pr_err("GCM:Invalid Destination sg entries\n");
1878 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1879 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) +
1881 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1882 if (chcr_aead_need_fallback(req, src_nent + MIN_GCM_SG,
1884 transhdr_len + (sgl_len(src_nent + MIN_GCM_SG) * 8),
1886 return ERR_PTR(chcr_aead_fallback(req, op_type));
1888 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1892 /* NIC driver is going to write the sge hdr. */
1893 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1895 chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
1896 memset(chcr_req, 0, transhdr_len);
1898 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
1901 tag_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1902 chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
1903 ctx->dev->rx_channel_id, 2, (ivsize ?
1904 (req->assoclen + 1) : 0));
1905 chcr_req->sec_cpl.pldlen =
1906 htonl(req->assoclen + ivsize + req->cryptlen);
1907 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1908 req->assoclen ? 1 : 0, req->assoclen,
1909 req->assoclen + ivsize + 1, 0);
1910 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1911 FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + ivsize + 1,
1912 tag_offset, tag_offset);
1913 chcr_req->sec_cpl.seqno_numivs =
1914 FILL_SEC_CPL_SCMD0_SEQNO(op_type, (op_type ==
1915 CHCR_ENCRYPT_OP) ? 1 : 0,
1916 CHCR_SCMD_CIPHER_MODE_AES_GCM,
1917 CHCR_SCMD_AUTH_MODE_GHASH,
1918 aeadctx->hmac_ctrl, ivsize >> 1);
1919 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1921 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1922 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1923 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1924 16), GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
1926 /* prepare a 16 byte iv */
1927 /* S A L T | IV | 0x00000001 */
1928 if (get_aead_subtype(tfm) ==
1929 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
1930 memcpy(reqctx->iv, aeadctx->salt, 4);
1931 memcpy(reqctx->iv + 4, req->iv, 8);
1933 memcpy(reqctx->iv, req->iv, 12);
1935 *((unsigned int *)(reqctx->iv + 12)) = htonl(0x01);
1937 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1938 sg_param.nents = reqctx->dst_nents;
1939 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1942 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, reqctx->dst,
1946 skb_set_transport_header(skb, transhdr_len);
1948 write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1950 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1951 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1952 create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1953 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1968 static int chcr_aead_cra_init(struct crypto_aead *tfm)
1970 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1971 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1972 struct aead_alg *alg = crypto_aead_alg(tfm);
1974 aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
1975 CRYPTO_ALG_NEED_FALLBACK);
1976 if (IS_ERR(aeadctx->sw_cipher))
1977 return PTR_ERR(aeadctx->sw_cipher);
1978 crypto_aead_set_reqsize(tfm, max(sizeof(struct chcr_aead_reqctx),
1979 sizeof(struct aead_request) +
1980 crypto_aead_reqsize(aeadctx->sw_cipher)));
1981 aeadctx->null = crypto_get_default_null_skcipher();
1982 if (IS_ERR(aeadctx->null))
1983 return PTR_ERR(aeadctx->null);
1984 return chcr_device_init(ctx);
1987 static void chcr_aead_cra_exit(struct crypto_aead *tfm)
1989 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1990 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1992 crypto_put_default_null_skcipher();
1993 crypto_free_aead(aeadctx->sw_cipher);
1996 static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
1997 unsigned int authsize)
1999 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2001 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
2002 aeadctx->mayverify = VERIFY_HW;
2003 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2005 static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
2006 unsigned int authsize)
2008 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2009 u32 maxauth = crypto_aead_maxauthsize(tfm);
2011 /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
2012 * true for sha1. authsize == 12 condition should be before
2013 * authsize == (maxauth >> 1)
2015 if (authsize == ICV_4) {
2016 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2017 aeadctx->mayverify = VERIFY_HW;
2018 } else if (authsize == ICV_6) {
2019 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2020 aeadctx->mayverify = VERIFY_HW;
2021 } else if (authsize == ICV_10) {
2022 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2023 aeadctx->mayverify = VERIFY_HW;
2024 } else if (authsize == ICV_12) {
2025 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2026 aeadctx->mayverify = VERIFY_HW;
2027 } else if (authsize == ICV_14) {
2028 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2029 aeadctx->mayverify = VERIFY_HW;
2030 } else if (authsize == (maxauth >> 1)) {
2031 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2032 aeadctx->mayverify = VERIFY_HW;
2033 } else if (authsize == maxauth) {
2034 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2035 aeadctx->mayverify = VERIFY_HW;
2037 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2038 aeadctx->mayverify = VERIFY_SW;
2040 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2044 static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
2046 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2050 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2051 aeadctx->mayverify = VERIFY_HW;
2054 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2055 aeadctx->mayverify = VERIFY_HW;
2058 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2059 aeadctx->mayverify = VERIFY_HW;
2062 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2063 aeadctx->mayverify = VERIFY_HW;
2066 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2067 aeadctx->mayverify = VERIFY_HW;
2071 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2072 aeadctx->mayverify = VERIFY_SW;
2076 crypto_tfm_set_flags((struct crypto_tfm *) tfm,
2077 CRYPTO_TFM_RES_BAD_KEY_LEN);
2080 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2083 static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
2084 unsigned int authsize)
2086 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2090 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2091 aeadctx->mayverify = VERIFY_HW;
2094 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2095 aeadctx->mayverify = VERIFY_HW;
2098 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2099 aeadctx->mayverify = VERIFY_HW;
2102 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2103 CRYPTO_TFM_RES_BAD_KEY_LEN);
2106 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2109 static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
2110 unsigned int authsize)
2112 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2116 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2117 aeadctx->mayverify = VERIFY_HW;
2120 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2121 aeadctx->mayverify = VERIFY_HW;
2124 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2125 aeadctx->mayverify = VERIFY_HW;
2128 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2129 aeadctx->mayverify = VERIFY_HW;
2132 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2133 aeadctx->mayverify = VERIFY_HW;
2136 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2137 aeadctx->mayverify = VERIFY_HW;
2140 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2141 aeadctx->mayverify = VERIFY_HW;
2144 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2145 CRYPTO_TFM_RES_BAD_KEY_LEN);
2148 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2151 static int chcr_ccm_common_setkey(struct crypto_aead *aead,
2153 unsigned int keylen)
2155 struct chcr_context *ctx = crypto_aead_ctx(aead);
2156 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2157 unsigned char ck_size, mk_size;
2158 int key_ctx_size = 0;
2160 key_ctx_size = sizeof(struct _key_ctx) +
2161 ((DIV_ROUND_UP(keylen, 16)) << 4) * 2;
2162 if (keylen == AES_KEYSIZE_128) {
2163 mk_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2164 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2165 } else if (keylen == AES_KEYSIZE_192) {
2166 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2167 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
2168 } else if (keylen == AES_KEYSIZE_256) {
2169 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2170 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
2172 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2173 CRYPTO_TFM_RES_BAD_KEY_LEN);
2174 aeadctx->enckey_len = 0;
2177 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
2179 memcpy(aeadctx->key, key, keylen);
2180 aeadctx->enckey_len = keylen;
2185 static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
2187 unsigned int keylen)
2189 struct chcr_context *ctx = crypto_aead_ctx(aead);
2190 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2193 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2194 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
2195 CRYPTO_TFM_REQ_MASK);
2196 error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2197 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
2198 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
2199 CRYPTO_TFM_RES_MASK);
2202 return chcr_ccm_common_setkey(aead, key, keylen);
2205 static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
2206 unsigned int keylen)
2208 struct chcr_context *ctx = crypto_aead_ctx(aead);
2209 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2212 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2213 CRYPTO_TFM_RES_BAD_KEY_LEN);
2214 aeadctx->enckey_len = 0;
2218 memcpy(aeadctx->salt, key + keylen, 3);
2219 return chcr_ccm_common_setkey(aead, key, keylen);
2222 static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
2223 unsigned int keylen)
2225 struct chcr_context *ctx = crypto_aead_ctx(aead);
2226 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2227 struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
2228 struct crypto_cipher *cipher;
2229 unsigned int ck_size;
2230 int ret = 0, key_ctx_size = 0;
2232 aeadctx->enckey_len = 0;
2233 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2234 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead)
2235 & CRYPTO_TFM_REQ_MASK);
2236 ret = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2237 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
2238 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
2239 CRYPTO_TFM_RES_MASK);
2243 if (get_aead_subtype(aead) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
2245 keylen -= 4; /* nonce/salt is present in the last 4 bytes */
2246 memcpy(aeadctx->salt, key + keylen, 4);
2248 if (keylen == AES_KEYSIZE_128) {
2249 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2250 } else if (keylen == AES_KEYSIZE_192) {
2251 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2252 } else if (keylen == AES_KEYSIZE_256) {
2253 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2255 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2256 CRYPTO_TFM_RES_BAD_KEY_LEN);
2257 pr_err("GCM: Invalid key length %d\n", keylen);
2262 memcpy(aeadctx->key, key, keylen);
2263 aeadctx->enckey_len = keylen;
2264 key_ctx_size = sizeof(struct _key_ctx) +
2265 ((DIV_ROUND_UP(keylen, 16)) << 4) +
2267 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
2268 CHCR_KEYCTX_MAC_KEY_SIZE_128,
2271 /* Calculate the H = CIPH(K, 0 repeated 16 times).
2272 * It will go in key context
2274 cipher = crypto_alloc_cipher("aes-generic", 0, 0);
2275 if (IS_ERR(cipher)) {
2276 aeadctx->enckey_len = 0;
2281 ret = crypto_cipher_setkey(cipher, key, keylen);
2283 aeadctx->enckey_len = 0;
2286 memset(gctx->ghash_h, 0, AEAD_H_SIZE);
2287 crypto_cipher_encrypt_one(cipher, gctx->ghash_h, gctx->ghash_h);
2290 crypto_free_cipher(cipher);
2295 static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
2296 unsigned int keylen)
2298 struct chcr_context *ctx = crypto_aead_ctx(authenc);
2299 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2300 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2301 /* it contains auth and cipher key both*/
2302 struct crypto_authenc_keys keys;
2304 unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
2305 int err = 0, i, key_ctx_len = 0;
2306 unsigned char ck_size = 0;
2307 unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
2308 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
2309 struct algo_param param;
2313 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2314 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
2315 & CRYPTO_TFM_REQ_MASK);
2316 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2317 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
2318 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
2319 & CRYPTO_TFM_RES_MASK);
2323 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2324 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2328 if (get_alg_config(¶m, max_authsize)) {
2329 pr_err("chcr : Unsupported digest size\n");
2332 if (keys.enckeylen == AES_KEYSIZE_128) {
2333 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2334 } else if (keys.enckeylen == AES_KEYSIZE_192) {
2335 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2336 } else if (keys.enckeylen == AES_KEYSIZE_256) {
2337 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2339 pr_err("chcr : Unsupported cipher key\n");
2343 /* Copy only encryption key. We use authkey to generate h(ipad) and
2344 * h(opad) so authkey is not needed again. authkeylen size have the
2345 * size of the hash digest size.
2347 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2348 aeadctx->enckey_len = keys.enckeylen;
2349 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2350 aeadctx->enckey_len << 3);
2352 base_hash = chcr_alloc_shash(max_authsize);
2353 if (IS_ERR(base_hash)) {
2354 pr_err("chcr : Base driver cannot be loaded\n");
2355 aeadctx->enckey_len = 0;
2359 SHASH_DESC_ON_STACK(shash, base_hash);
2360 shash->tfm = base_hash;
2361 shash->flags = crypto_shash_get_flags(base_hash);
2362 bs = crypto_shash_blocksize(base_hash);
2363 align = KEYCTX_ALIGN_PAD(max_authsize);
2364 o_ptr = actx->h_iopad + param.result_size + align;
2366 if (keys.authkeylen > bs) {
2367 err = crypto_shash_digest(shash, keys.authkey,
2371 pr_err("chcr : Base driver cannot be loaded\n");
2374 keys.authkeylen = max_authsize;
2376 memcpy(o_ptr, keys.authkey, keys.authkeylen);
2378 /* Compute the ipad-digest*/
2379 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2380 memcpy(pad, o_ptr, keys.authkeylen);
2381 for (i = 0; i < bs >> 2; i++)
2382 *((unsigned int *)pad + i) ^= IPAD_DATA;
2384 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
2387 /* Compute the opad-digest */
2388 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2389 memcpy(pad, o_ptr, keys.authkeylen);
2390 for (i = 0; i < bs >> 2; i++)
2391 *((unsigned int *)pad + i) ^= OPAD_DATA;
2393 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
2396 /* convert the ipad and opad digest to network order */
2397 chcr_change_order(actx->h_iopad, param.result_size);
2398 chcr_change_order(o_ptr, param.result_size);
2399 key_ctx_len = sizeof(struct _key_ctx) +
2400 ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4) +
2401 (param.result_size + align) * 2;
2402 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
2403 0, 1, key_ctx_len >> 4);
2404 actx->auth_mode = param.auth_mode;
2405 chcr_free_shash(base_hash);
2410 aeadctx->enckey_len = 0;
2411 if (!IS_ERR(base_hash))
2412 chcr_free_shash(base_hash);
2416 static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
2417 const u8 *key, unsigned int keylen)
2419 struct chcr_context *ctx = crypto_aead_ctx(authenc);
2420 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2421 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2422 struct crypto_authenc_keys keys;
2424 /* it contains auth and cipher key both*/
2425 int key_ctx_len = 0;
2426 unsigned char ck_size = 0;
2428 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2429 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
2430 & CRYPTO_TFM_REQ_MASK);
2431 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2432 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
2433 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
2434 & CRYPTO_TFM_RES_MASK);
2438 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2439 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2442 if (keys.enckeylen == AES_KEYSIZE_128) {
2443 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2444 } else if (keys.enckeylen == AES_KEYSIZE_192) {
2445 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2446 } else if (keys.enckeylen == AES_KEYSIZE_256) {
2447 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2449 pr_err("chcr : Unsupported cipher key\n");
2452 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2453 aeadctx->enckey_len = keys.enckeylen;
2454 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2455 aeadctx->enckey_len << 3);
2456 key_ctx_len = sizeof(struct _key_ctx)
2457 + ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4);
2459 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
2460 0, key_ctx_len >> 4);
2461 actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
2464 aeadctx->enckey_len = 0;
2467 static int chcr_aead_encrypt(struct aead_request *req)
2469 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2470 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2472 reqctx->verify = VERIFY_HW;
2474 switch (get_aead_subtype(tfm)) {
2475 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2476 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2477 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2479 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2480 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2481 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2482 create_aead_ccm_wr);
2484 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2489 static int chcr_aead_decrypt(struct aead_request *req)
2491 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2492 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2493 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2496 if (aeadctx->mayverify == VERIFY_SW) {
2497 size = crypto_aead_maxauthsize(tfm);
2498 reqctx->verify = VERIFY_SW;
2501 reqctx->verify = VERIFY_HW;
2504 switch (get_aead_subtype(tfm)) {
2505 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2506 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2507 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2509 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2510 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2511 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2512 create_aead_ccm_wr);
2514 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2519 static int chcr_aead_op(struct aead_request *req,
2520 unsigned short op_type,
2522 create_wr_t create_wr_fn)
2524 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2525 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2526 struct uld_ctx *u_ctx;
2527 struct sk_buff *skb;
2530 pr_err("chcr : %s : No crypto device.\n", __func__);
2533 u_ctx = ULD_CTX(ctx);
2534 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
2536 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
2540 /* Form a WR from req */
2541 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx], size,
2544 if (IS_ERR(skb) || !skb)
2545 return PTR_ERR(skb);
2547 skb->dev = u_ctx->lldi.ports[0];
2548 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
2550 return -EINPROGRESS;
2552 static struct chcr_alg_template driver_algs[] = {
2555 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2558 .cra_name = "cbc(aes)",
2559 .cra_driver_name = "cbc-aes-chcr",
2560 .cra_priority = CHCR_CRA_PRIORITY,
2561 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2563 .cra_blocksize = AES_BLOCK_SIZE,
2564 .cra_ctxsize = sizeof(struct chcr_context)
2565 + sizeof(struct ablk_ctx),
2567 .cra_type = &crypto_ablkcipher_type,
2568 .cra_module = THIS_MODULE,
2569 .cra_init = chcr_cra_init,
2571 .cra_u.ablkcipher = {
2572 .min_keysize = AES_MIN_KEY_SIZE,
2573 .max_keysize = AES_MAX_KEY_SIZE,
2574 .ivsize = AES_BLOCK_SIZE,
2575 .setkey = chcr_aes_cbc_setkey,
2576 .encrypt = chcr_aes_encrypt,
2577 .decrypt = chcr_aes_decrypt,
2582 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2585 .cra_name = "xts(aes)",
2586 .cra_driver_name = "xts-aes-chcr",
2587 .cra_priority = CHCR_CRA_PRIORITY,
2588 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2590 .cra_blocksize = AES_BLOCK_SIZE,
2591 .cra_ctxsize = sizeof(struct chcr_context) +
2592 sizeof(struct ablk_ctx),
2594 .cra_type = &crypto_ablkcipher_type,
2595 .cra_module = THIS_MODULE,
2596 .cra_init = chcr_cra_init,
2600 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2601 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2602 .ivsize = AES_BLOCK_SIZE,
2603 .setkey = chcr_aes_xts_setkey,
2604 .encrypt = chcr_aes_encrypt,
2605 .decrypt = chcr_aes_decrypt,
2612 .type = CRYPTO_ALG_TYPE_AHASH,
2615 .halg.digestsize = SHA1_DIGEST_SIZE,
2618 .cra_driver_name = "sha1-chcr",
2619 .cra_blocksize = SHA1_BLOCK_SIZE,
2624 .type = CRYPTO_ALG_TYPE_AHASH,
2627 .halg.digestsize = SHA256_DIGEST_SIZE,
2629 .cra_name = "sha256",
2630 .cra_driver_name = "sha256-chcr",
2631 .cra_blocksize = SHA256_BLOCK_SIZE,
2636 .type = CRYPTO_ALG_TYPE_AHASH,
2639 .halg.digestsize = SHA224_DIGEST_SIZE,
2641 .cra_name = "sha224",
2642 .cra_driver_name = "sha224-chcr",
2643 .cra_blocksize = SHA224_BLOCK_SIZE,
2648 .type = CRYPTO_ALG_TYPE_AHASH,
2651 .halg.digestsize = SHA384_DIGEST_SIZE,
2653 .cra_name = "sha384",
2654 .cra_driver_name = "sha384-chcr",
2655 .cra_blocksize = SHA384_BLOCK_SIZE,
2660 .type = CRYPTO_ALG_TYPE_AHASH,
2663 .halg.digestsize = SHA512_DIGEST_SIZE,
2665 .cra_name = "sha512",
2666 .cra_driver_name = "sha512-chcr",
2667 .cra_blocksize = SHA512_BLOCK_SIZE,
2673 .type = CRYPTO_ALG_TYPE_HMAC,
2676 .halg.digestsize = SHA1_DIGEST_SIZE,
2678 .cra_name = "hmac(sha1)",
2679 .cra_driver_name = "hmac-sha1-chcr",
2680 .cra_blocksize = SHA1_BLOCK_SIZE,
2685 .type = CRYPTO_ALG_TYPE_HMAC,
2688 .halg.digestsize = SHA224_DIGEST_SIZE,
2690 .cra_name = "hmac(sha224)",
2691 .cra_driver_name = "hmac-sha224-chcr",
2692 .cra_blocksize = SHA224_BLOCK_SIZE,
2697 .type = CRYPTO_ALG_TYPE_HMAC,
2700 .halg.digestsize = SHA256_DIGEST_SIZE,
2702 .cra_name = "hmac(sha256)",
2703 .cra_driver_name = "hmac-sha256-chcr",
2704 .cra_blocksize = SHA256_BLOCK_SIZE,
2709 .type = CRYPTO_ALG_TYPE_HMAC,
2712 .halg.digestsize = SHA384_DIGEST_SIZE,
2714 .cra_name = "hmac(sha384)",
2715 .cra_driver_name = "hmac-sha384-chcr",
2716 .cra_blocksize = SHA384_BLOCK_SIZE,
2721 .type = CRYPTO_ALG_TYPE_HMAC,
2724 .halg.digestsize = SHA512_DIGEST_SIZE,
2726 .cra_name = "hmac(sha512)",
2727 .cra_driver_name = "hmac-sha512-chcr",
2728 .cra_blocksize = SHA512_BLOCK_SIZE,
2732 /* Add AEAD Algorithms */
2734 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
2738 .cra_name = "gcm(aes)",
2739 .cra_driver_name = "gcm-aes-chcr",
2741 .cra_priority = CHCR_AEAD_PRIORITY,
2742 .cra_ctxsize = sizeof(struct chcr_context) +
2743 sizeof(struct chcr_aead_ctx) +
2744 sizeof(struct chcr_gcm_ctx),
2747 .maxauthsize = GHASH_DIGEST_SIZE,
2748 .setkey = chcr_gcm_setkey,
2749 .setauthsize = chcr_gcm_setauthsize,
2753 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
2757 .cra_name = "rfc4106(gcm(aes))",
2758 .cra_driver_name = "rfc4106-gcm-aes-chcr",
2760 .cra_priority = CHCR_AEAD_PRIORITY + 1,
2761 .cra_ctxsize = sizeof(struct chcr_context) +
2762 sizeof(struct chcr_aead_ctx) +
2763 sizeof(struct chcr_gcm_ctx),
2767 .maxauthsize = GHASH_DIGEST_SIZE,
2768 .setkey = chcr_gcm_setkey,
2769 .setauthsize = chcr_4106_4309_setauthsize,
2773 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
2777 .cra_name = "ccm(aes)",
2778 .cra_driver_name = "ccm-aes-chcr",
2780 .cra_priority = CHCR_AEAD_PRIORITY,
2781 .cra_ctxsize = sizeof(struct chcr_context) +
2782 sizeof(struct chcr_aead_ctx),
2785 .ivsize = AES_BLOCK_SIZE,
2786 .maxauthsize = GHASH_DIGEST_SIZE,
2787 .setkey = chcr_aead_ccm_setkey,
2788 .setauthsize = chcr_ccm_setauthsize,
2792 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
2796 .cra_name = "rfc4309(ccm(aes))",
2797 .cra_driver_name = "rfc4309-ccm-aes-chcr",
2799 .cra_priority = CHCR_AEAD_PRIORITY + 1,
2800 .cra_ctxsize = sizeof(struct chcr_context) +
2801 sizeof(struct chcr_aead_ctx),
2805 .maxauthsize = GHASH_DIGEST_SIZE,
2806 .setkey = chcr_aead_rfc4309_setkey,
2807 .setauthsize = chcr_4106_4309_setauthsize,
2811 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2815 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2817 "authenc-hmac-sha1-cbc-aes-chcr",
2818 .cra_blocksize = AES_BLOCK_SIZE,
2819 .cra_priority = CHCR_AEAD_PRIORITY,
2820 .cra_ctxsize = sizeof(struct chcr_context) +
2821 sizeof(struct chcr_aead_ctx) +
2822 sizeof(struct chcr_authenc_ctx),
2825 .ivsize = AES_BLOCK_SIZE,
2826 .maxauthsize = SHA1_DIGEST_SIZE,
2827 .setkey = chcr_authenc_setkey,
2828 .setauthsize = chcr_authenc_setauthsize,
2832 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2837 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2839 "authenc-hmac-sha256-cbc-aes-chcr",
2840 .cra_blocksize = AES_BLOCK_SIZE,
2841 .cra_priority = CHCR_AEAD_PRIORITY,
2842 .cra_ctxsize = sizeof(struct chcr_context) +
2843 sizeof(struct chcr_aead_ctx) +
2844 sizeof(struct chcr_authenc_ctx),
2847 .ivsize = AES_BLOCK_SIZE,
2848 .maxauthsize = SHA256_DIGEST_SIZE,
2849 .setkey = chcr_authenc_setkey,
2850 .setauthsize = chcr_authenc_setauthsize,
2854 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2858 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2860 "authenc-hmac-sha224-cbc-aes-chcr",
2861 .cra_blocksize = AES_BLOCK_SIZE,
2862 .cra_priority = CHCR_AEAD_PRIORITY,
2863 .cra_ctxsize = sizeof(struct chcr_context) +
2864 sizeof(struct chcr_aead_ctx) +
2865 sizeof(struct chcr_authenc_ctx),
2867 .ivsize = AES_BLOCK_SIZE,
2868 .maxauthsize = SHA224_DIGEST_SIZE,
2869 .setkey = chcr_authenc_setkey,
2870 .setauthsize = chcr_authenc_setauthsize,
2874 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2878 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2880 "authenc-hmac-sha384-cbc-aes-chcr",
2881 .cra_blocksize = AES_BLOCK_SIZE,
2882 .cra_priority = CHCR_AEAD_PRIORITY,
2883 .cra_ctxsize = sizeof(struct chcr_context) +
2884 sizeof(struct chcr_aead_ctx) +
2885 sizeof(struct chcr_authenc_ctx),
2888 .ivsize = AES_BLOCK_SIZE,
2889 .maxauthsize = SHA384_DIGEST_SIZE,
2890 .setkey = chcr_authenc_setkey,
2891 .setauthsize = chcr_authenc_setauthsize,
2895 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2899 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2901 "authenc-hmac-sha512-cbc-aes-chcr",
2902 .cra_blocksize = AES_BLOCK_SIZE,
2903 .cra_priority = CHCR_AEAD_PRIORITY,
2904 .cra_ctxsize = sizeof(struct chcr_context) +
2905 sizeof(struct chcr_aead_ctx) +
2906 sizeof(struct chcr_authenc_ctx),
2909 .ivsize = AES_BLOCK_SIZE,
2910 .maxauthsize = SHA512_DIGEST_SIZE,
2911 .setkey = chcr_authenc_setkey,
2912 .setauthsize = chcr_authenc_setauthsize,
2916 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_NULL,
2920 .cra_name = "authenc(digest_null,cbc(aes))",
2922 "authenc-digest_null-cbc-aes-chcr",
2923 .cra_blocksize = AES_BLOCK_SIZE,
2924 .cra_priority = CHCR_AEAD_PRIORITY,
2925 .cra_ctxsize = sizeof(struct chcr_context) +
2926 sizeof(struct chcr_aead_ctx) +
2927 sizeof(struct chcr_authenc_ctx),
2930 .ivsize = AES_BLOCK_SIZE,
2932 .setkey = chcr_aead_digest_null_setkey,
2933 .setauthsize = chcr_authenc_null_setauthsize,
2939 * chcr_unregister_alg - Deregister crypto algorithms with
2942 static int chcr_unregister_alg(void)
2946 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2947 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
2948 case CRYPTO_ALG_TYPE_ABLKCIPHER:
2949 if (driver_algs[i].is_registered)
2950 crypto_unregister_alg(
2951 &driver_algs[i].alg.crypto);
2953 case CRYPTO_ALG_TYPE_AEAD:
2954 if (driver_algs[i].is_registered)
2955 crypto_unregister_aead(
2956 &driver_algs[i].alg.aead);
2958 case CRYPTO_ALG_TYPE_AHASH:
2959 if (driver_algs[i].is_registered)
2960 crypto_unregister_ahash(
2961 &driver_algs[i].alg.hash);
2964 driver_algs[i].is_registered = 0;
2969 #define SZ_AHASH_CTX sizeof(struct chcr_context)
2970 #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
2971 #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
2972 #define AHASH_CRA_FLAGS (CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC)
2975 * chcr_register_alg - Register crypto algorithms with kernel framework.
2977 static int chcr_register_alg(void)
2979 struct crypto_alg ai;
2980 struct ahash_alg *a_hash;
2984 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2985 if (driver_algs[i].is_registered)
2987 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
2988 case CRYPTO_ALG_TYPE_ABLKCIPHER:
2989 err = crypto_register_alg(&driver_algs[i].alg.crypto);
2990 name = driver_algs[i].alg.crypto.cra_driver_name;
2992 case CRYPTO_ALG_TYPE_AEAD:
2993 driver_algs[i].alg.aead.base.cra_flags =
2994 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
2995 CRYPTO_ALG_NEED_FALLBACK;
2996 driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
2997 driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
2998 driver_algs[i].alg.aead.init = chcr_aead_cra_init;
2999 driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
3000 driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
3001 err = crypto_register_aead(&driver_algs[i].alg.aead);
3002 name = driver_algs[i].alg.aead.base.cra_driver_name;
3004 case CRYPTO_ALG_TYPE_AHASH:
3005 a_hash = &driver_algs[i].alg.hash;
3006 a_hash->update = chcr_ahash_update;
3007 a_hash->final = chcr_ahash_final;
3008 a_hash->finup = chcr_ahash_finup;
3009 a_hash->digest = chcr_ahash_digest;
3010 a_hash->export = chcr_ahash_export;
3011 a_hash->import = chcr_ahash_import;
3012 a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
3013 a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
3014 a_hash->halg.base.cra_module = THIS_MODULE;
3015 a_hash->halg.base.cra_flags = AHASH_CRA_FLAGS;
3016 a_hash->halg.base.cra_alignmask = 0;
3017 a_hash->halg.base.cra_exit = NULL;
3018 a_hash->halg.base.cra_type = &crypto_ahash_type;
3020 if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
3021 a_hash->halg.base.cra_init = chcr_hmac_cra_init;
3022 a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
3023 a_hash->init = chcr_hmac_init;
3024 a_hash->setkey = chcr_ahash_setkey;
3025 a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
3027 a_hash->init = chcr_sha_init;
3028 a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
3029 a_hash->halg.base.cra_init = chcr_sha_cra_init;
3031 err = crypto_register_ahash(&driver_algs[i].alg.hash);
3032 ai = driver_algs[i].alg.hash.halg.base;
3033 name = ai.cra_driver_name;
3037 pr_err("chcr : %s : Algorithm registration failed\n",
3041 driver_algs[i].is_registered = 1;
3047 chcr_unregister_alg();
3052 * start_crypto - Register the crypto algorithms.
3053 * This should called once when the first device comesup. After this
3054 * kernel will start calling driver APIs for crypto operations.
3056 int start_crypto(void)
3058 return chcr_register_alg();
3062 * stop_crypto - Deregister all the crypto algorithms with kernel.
3063 * This should be called once when the last device goes down. After this
3064 * kernel will not call the driver API for crypto operations.
3066 int stop_crypto(void)
3068 chcr_unregister_alg();