2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/semaphore.h>
21 #include <crypto/algapi.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/des.h>
27 #include "ssi_config.h"
28 #include "ssi_driver.h"
29 #include "cc_lli_defs.h"
30 #include "ssi_buffer_mgr.h"
31 #include "ssi_cipher.h"
32 #include "ssi_request_mgr.h"
33 #include "ssi_sysfs.h"
34 #include "ssi_fips_local.h"
36 #define MAX_ABLKCIPHER_SEQ_LEN 6
38 #define template_ablkcipher template_u.ablkcipher
40 #define SSI_MIN_AES_XTS_SIZE 0x10
41 #define SSI_MAX_AES_XTS_SIZE 0x2000
42 struct ssi_blkcipher_handle {
43 struct list_head blkcipher_alg_list;
46 struct cc_user_key_info {
48 dma_addr_t key_dma_addr;
50 struct cc_hw_key_info {
51 enum cc_hw_crypto_key key1_slot;
52 enum cc_hw_crypto_key key2_slot;
55 struct ssi_ablkcipher_ctx {
56 struct ssi_drvdata *drvdata;
62 struct blkcipher_req_ctx *sync_ctx;
63 struct cc_user_key_info user;
64 struct cc_hw_key_info hw;
65 struct crypto_shash *shash_tfm;
68 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
71 static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, u32 size) {
72 switch (ctx_p->flow_mode) {
75 case CC_AES_128_BIT_KEY_SIZE:
76 case CC_AES_192_BIT_KEY_SIZE:
77 if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
78 (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
79 (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
82 case CC_AES_256_BIT_KEY_SIZE:
84 case (CC_AES_192_BIT_KEY_SIZE * 2):
85 case (CC_AES_256_BIT_KEY_SIZE * 2):
86 if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
87 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
88 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
95 if (likely(size == DES3_EDE_KEY_SIZE ||
96 size == DES_KEY_SIZE))
100 case S_DIN_to_MULTI2:
101 if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
112 static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
113 switch (ctx_p->flow_mode) {
115 switch (ctx_p->cipher_mode) {
117 if ((size >= SSI_MIN_AES_XTS_SIZE) &&
118 (size <= SSI_MAX_AES_XTS_SIZE) &&
119 IS_ALIGNED(size, AES_BLOCK_SIZE))
122 case DRV_CIPHER_CBC_CTS:
123 if (likely(size >= AES_BLOCK_SIZE))
131 case DRV_CIPHER_ESSIV:
132 case DRV_CIPHER_BITLOCKER:
133 if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
141 if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
144 #if SSI_CC_HAS_MULTI2
145 case S_DIN_to_MULTI2:
146 switch (ctx_p->cipher_mode) {
148 if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
157 #endif /*SSI_CC_HAS_MULTI2*/
164 static unsigned int get_max_keysize(struct crypto_tfm *tfm)
166 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
168 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER)
169 return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
171 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER)
172 return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
177 static int ssi_blkcipher_init(struct crypto_tfm *tfm)
179 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
180 struct crypto_alg *alg = tfm->__crt_alg;
181 struct ssi_crypto_alg *ssi_alg =
182 container_of(alg, struct ssi_crypto_alg, crypto_alg);
185 unsigned int max_key_buf_size = get_max_keysize(tfm);
187 SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p,
188 crypto_tfm_alg_name(tfm));
190 CHECK_AND_RETURN_UPON_FIPS_ERROR();
191 ctx_p->cipher_mode = ssi_alg->cipher_mode;
192 ctx_p->flow_mode = ssi_alg->flow_mode;
193 ctx_p->drvdata = ssi_alg->drvdata;
194 dev = &ctx_p->drvdata->plat_dev->dev;
196 /* Allocate key buffer, cache line aligned */
197 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL | GFP_DMA);
198 if (!ctx_p->user.key) {
199 SSI_LOG_ERR("Allocating key buffer in context failed\n");
202 SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
206 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
207 max_key_buf_size, DMA_TO_DEVICE);
208 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
209 SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
210 max_key_buf_size, ctx_p->user.key);
213 SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
214 max_key_buf_size, ctx_p->user.key,
215 (unsigned long long)ctx_p->user.key_dma_addr);
217 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
218 /* Alloc hash tfm for essiv */
219 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
220 if (IS_ERR(ctx_p->shash_tfm)) {
221 SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
222 return PTR_ERR(ctx_p->shash_tfm);
229 static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
231 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
232 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
233 unsigned int max_key_buf_size = get_max_keysize(tfm);
235 SSI_LOG_DEBUG("Clearing context @%p for %s\n",
236 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
238 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
239 /* Free hash tfm for essiv */
240 crypto_free_shash(ctx_p->shash_tfm);
241 ctx_p->shash_tfm = NULL;
244 /* Unmap key buffer */
245 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
247 SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n",
248 (unsigned long long)ctx_p->user.key_dma_addr);
250 /* Free key buffer in context */
251 kfree(ctx_p->user.key);
252 SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
256 typedef struct tdes_keys {
257 u8 key1[DES_KEY_SIZE];
258 u8 key2[DES_KEY_SIZE];
259 u8 key3[DES_KEY_SIZE];
262 static const u8 zero_buff[] = { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
263 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
264 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
265 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
267 /* The function verifies that tdes keys are not weak.*/
268 static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
270 #ifdef CCREE_FIPS_SUPPORT
271 tdes_keys_t *tdes_key = (tdes_keys_t*)key;
273 /* verify key1 != key2 and key3 != key2*/
274 if (unlikely((memcmp((u8*)tdes_key->key1, (u8*)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
275 (memcmp((u8*)tdes_key->key3, (u8*)tdes_key->key2, sizeof(tdes_key->key3)) == 0))) {
278 #endif /* CCREE_FIPS_SUPPORT */
283 /* The function verifies that xts keys are not weak.*/
284 static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
286 #ifdef CCREE_FIPS_SUPPORT
287 /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
288 int singleKeySize = keylen >> 1;
290 if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0))
292 #endif /* CCREE_FIPS_SUPPORT */
297 static enum cc_hw_crypto_key hw_key_to_cc_hw_key(int slot_num)
312 static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
316 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
317 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
318 u32 tmp[DES_EXPKEY_WORDS];
319 unsigned int max_key_buf_size = get_max_keysize(tfm);
321 SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
322 ctx_p, crypto_tfm_alg_name(tfm), keylen);
323 dump_byte_array("key", (u8 *)key, keylen);
325 CHECK_AND_RETURN_UPON_FIPS_ERROR();
327 SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
329 /* STAT_PHASE_0: Init and sanity checks */
331 #if SSI_CC_HAS_MULTI2
332 /*last byte of key buffer is round number and should not be a part of key size*/
333 if (ctx_p->flow_mode == S_DIN_to_MULTI2)
335 #endif /*SSI_CC_HAS_MULTI2*/
337 if (unlikely(validate_keys_sizes(ctx_p, keylen) != 0)) {
338 SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
339 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
343 if (ssi_is_hw_key(tfm)) {
344 /* setting HW key slots */
345 struct arm_hw_key_info *hki = (struct arm_hw_key_info*)key;
347 if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
348 SSI_LOG_ERR("HW key not supported for non-AES flows\n");
352 ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
353 if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
354 SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
358 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
359 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
360 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
361 if (unlikely(hki->hw_key1 == hki->hw_key2)) {
362 SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
365 ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
366 if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
367 SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
372 ctx_p->keylen = keylen;
373 SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
379 if (ctx_p->flow_mode == S_DIN_to_DES) {
380 if (unlikely(!des_ekey(tmp, key)) &&
381 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
382 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
383 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak DES key");
387 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
388 ssi_fips_verify_xts_keys(key, keylen) != 0) {
389 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
392 if ((ctx_p->flow_mode == S_DIN_to_DES) &&
393 (keylen == DES3_EDE_KEY_SIZE) &&
394 ssi_fips_verify_3des_keys(key, keylen) != 0) {
395 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
401 /* STAT_PHASE_1: Copy key to ctx */
402 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
403 max_key_buf_size, DMA_TO_DEVICE);
405 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
406 #if SSI_CC_HAS_MULTI2
407 memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
408 ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
409 if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
410 ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
411 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
412 SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
414 #endif /*SSI_CC_HAS_MULTI2*/
416 memcpy(ctx_p->user.key, key, keylen);
418 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
420 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
421 /* sha256 for key2 - use sw implementation */
422 int key_len = keylen >> 1;
424 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
425 desc->tfm = ctx_p->shash_tfm;
427 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
429 SSI_LOG_ERR("Failed to hash ESSIV key.\n");
434 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
435 max_key_buf_size, DMA_TO_DEVICE);
436 ctx_p->keylen = keylen;
439 SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
444 ssi_blkcipher_create_setup_desc(
445 struct crypto_tfm *tfm,
446 struct blkcipher_req_ctx *req_ctx,
449 struct cc_hw_desc desc[],
450 unsigned int *seq_size)
452 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
453 int cipher_mode = ctx_p->cipher_mode;
454 int flow_mode = ctx_p->flow_mode;
455 int direction = req_ctx->gen_ctx.op_type;
456 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
457 unsigned int key_len = ctx_p->keylen;
458 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
459 unsigned int du_size = nbytes;
461 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
463 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
465 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
468 switch (cipher_mode) {
470 case DRV_CIPHER_CBC_CTS:
473 /* Load cipher state */
474 hw_desc_init(&desc[*seq_size]);
475 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
477 set_cipher_config0(&desc[*seq_size], direction);
478 set_flow_mode(&desc[*seq_size], flow_mode);
479 set_cipher_mode(&desc[*seq_size], cipher_mode);
480 if ((cipher_mode == DRV_CIPHER_CTR) ||
481 (cipher_mode == DRV_CIPHER_OFB)) {
482 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
484 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
490 hw_desc_init(&desc[*seq_size]);
491 set_cipher_mode(&desc[*seq_size], cipher_mode);
492 set_cipher_config0(&desc[*seq_size], direction);
493 if (flow_mode == S_DIN_to_AES) {
494 if (ssi_is_hw_key(tfm)) {
495 set_hw_crypto_key(&desc[*seq_size],
496 ctx_p->hw.key1_slot);
498 set_din_type(&desc[*seq_size], DMA_DLLI,
499 key_dma_addr, ((key_len == 24) ?
503 set_key_size_aes(&desc[*seq_size], key_len);
506 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
508 set_key_size_des(&desc[*seq_size], key_len);
510 set_flow_mode(&desc[*seq_size], flow_mode);
511 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
515 case DRV_CIPHER_ESSIV:
516 case DRV_CIPHER_BITLOCKER:
518 hw_desc_init(&desc[*seq_size]);
519 set_cipher_mode(&desc[*seq_size], cipher_mode);
520 set_cipher_config0(&desc[*seq_size], direction);
521 if (ssi_is_hw_key(tfm)) {
522 set_hw_crypto_key(&desc[*seq_size],
523 ctx_p->hw.key1_slot);
525 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
526 (key_len / 2), NS_BIT);
528 set_key_size_aes(&desc[*seq_size], (key_len / 2));
529 set_flow_mode(&desc[*seq_size], flow_mode);
530 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
534 hw_desc_init(&desc[*seq_size]);
535 set_cipher_mode(&desc[*seq_size], cipher_mode);
536 set_cipher_config0(&desc[*seq_size], direction);
537 if (ssi_is_hw_key(tfm)) {
538 set_hw_crypto_key(&desc[*seq_size],
539 ctx_p->hw.key2_slot);
541 set_din_type(&desc[*seq_size], DMA_DLLI,
542 (key_dma_addr + (key_len / 2)),
543 (key_len / 2), NS_BIT);
545 set_xex_data_unit_size(&desc[*seq_size], du_size);
546 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
547 set_key_size_aes(&desc[*seq_size], (key_len / 2));
548 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
552 hw_desc_init(&desc[*seq_size]);
553 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
554 set_cipher_mode(&desc[*seq_size], cipher_mode);
555 set_cipher_config0(&desc[*seq_size], direction);
556 set_key_size_aes(&desc[*seq_size], (key_len / 2));
557 set_flow_mode(&desc[*seq_size], flow_mode);
558 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
559 CC_AES_BLOCK_SIZE, NS_BIT);
563 SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
568 #if SSI_CC_HAS_MULTI2
569 static inline void ssi_blkcipher_create_multi2_setup_desc(
570 struct crypto_tfm *tfm,
571 struct blkcipher_req_ctx *req_ctx,
573 struct cc_hw_desc desc[],
574 unsigned int *seq_size)
576 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
578 int direction = req_ctx->gen_ctx.op_type;
579 /* Load system key */
580 hw_desc_init(&desc[*seq_size]);
581 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
582 set_cipher_config0(&desc[*seq_size], direction);
583 set_din_type(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
584 CC_MULTI2_SYSTEM_KEY_SIZE, NS_BIT);
585 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
586 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
590 hw_desc_init(&desc[*seq_size]);
591 set_din_type(&desc[*seq_size], DMA_DLLI,
592 (ctx_p->user.key_dma_addr + CC_MULTI2_SYSTEM_KEY_SIZE),
593 CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
594 set_multi2_num_rounds(&desc[*seq_size], ctx_p->key_round_number);
595 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
596 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
597 set_cipher_config0(&desc[*seq_size], direction);
598 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
603 hw_desc_init(&desc[*seq_size]);
604 set_din_type(&desc[*seq_size], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr,
606 set_cipher_config0(&desc[*seq_size], direction);
607 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
608 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
609 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
612 #endif /*SSI_CC_HAS_MULTI2*/
615 ssi_blkcipher_create_data_desc(
616 struct crypto_tfm *tfm,
617 struct blkcipher_req_ctx *req_ctx,
618 struct scatterlist *dst, struct scatterlist *src,
621 struct cc_hw_desc desc[],
622 unsigned int *seq_size)
624 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
625 unsigned int flow_mode = ctx_p->flow_mode;
627 switch (ctx_p->flow_mode) {
629 flow_mode = DIN_AES_DOUT;
632 flow_mode = DIN_DES_DOUT;
634 #if SSI_CC_HAS_MULTI2
635 case S_DIN_to_MULTI2:
636 flow_mode = DIN_MULTI2_DOUT;
638 #endif /*SSI_CC_HAS_MULTI2*/
640 SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
644 if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)) {
645 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
646 (unsigned long long)sg_dma_address(src),
648 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
649 (unsigned long long)sg_dma_address(dst),
651 hw_desc_init(&desc[*seq_size]);
652 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
654 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
655 nbytes, NS_BIT, (!areq ? 0 : 1));
657 set_queue_last_ind(&desc[*seq_size]);
659 set_flow_mode(&desc[*seq_size], flow_mode);
663 SSI_LOG_DEBUG(" bypass params addr 0x%llX "
664 "length 0x%X addr 0x%08X\n",
665 (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
666 req_ctx->mlli_params.mlli_len,
667 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
668 hw_desc_init(&desc[*seq_size]);
669 set_din_type(&desc[*seq_size], DMA_DLLI,
670 req_ctx->mlli_params.mlli_dma_addr,
671 req_ctx->mlli_params.mlli_len, NS_BIT);
672 set_dout_sram(&desc[*seq_size],
673 ctx_p->drvdata->mlli_sram_addr,
674 req_ctx->mlli_params.mlli_len);
675 set_flow_mode(&desc[*seq_size], BYPASS);
678 hw_desc_init(&desc[*seq_size]);
679 set_din_type(&desc[*seq_size], DMA_MLLI,
680 ctx_p->drvdata->mlli_sram_addr,
681 req_ctx->in_mlli_nents, NS_BIT);
682 if (req_ctx->out_nents == 0) {
683 SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
685 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
686 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
687 set_dout_mlli(&desc[*seq_size],
688 ctx_p->drvdata->mlli_sram_addr,
689 req_ctx->in_mlli_nents, NS_BIT,
692 SSI_LOG_DEBUG(" din/dout params "
693 "addr 0x%08X addr 0x%08X\n",
694 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
695 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
696 (u32)LLI_ENTRY_BYTE_SIZE *
698 set_dout_mlli(&desc[*seq_size],
699 (ctx_p->drvdata->mlli_sram_addr +
700 (LLI_ENTRY_BYTE_SIZE *
701 req_ctx->in_mlli_nents)),
702 req_ctx->out_mlli_nents, NS_BIT,
706 set_queue_last_ind(&desc[*seq_size]);
708 set_flow_mode(&desc[*seq_size], flow_mode);
713 static int ssi_blkcipher_complete(struct device *dev,
714 struct ssi_ablkcipher_ctx *ctx_p,
715 struct blkcipher_req_ctx *req_ctx,
716 struct scatterlist *dst,
717 struct scatterlist *src,
720 void __iomem *cc_base)
722 int completion_error = 0;
723 u32 inflight_counter;
725 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
728 /*Set the inflight couter value to local variable*/
729 inflight_counter = ctx_p->drvdata->inflight_counter;
730 /*Decrease the inflight counter*/
731 if (ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
732 ctx_p->drvdata->inflight_counter--;
735 ablkcipher_request_complete(areq, completion_error);
738 return completion_error;
741 static int ssi_blkcipher_process(
742 struct crypto_tfm *tfm,
743 struct blkcipher_req_ctx *req_ctx,
744 struct scatterlist *dst, struct scatterlist *src,
746 void *info, //req info
749 enum drv_crypto_direction direction)
751 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
752 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
753 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
754 struct ssi_crypto_req ssi_req = {};
755 int rc, seq_len = 0, cts_restore_flag = 0;
757 SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
758 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? "Encrypt" : "Decrypt"),
761 CHECK_AND_RETURN_UPON_FIPS_ERROR();
762 /* STAT_PHASE_0: Init and sanity checks */
764 /* TODO: check data length according to mode */
765 if (unlikely(validate_data_size(ctx_p, nbytes))) {
766 SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
767 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
771 /* No data to process is valid */
774 /*For CTS in case of data size aligned to 16 use CBC mode*/
775 if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)) {
776 ctx_p->cipher_mode = DRV_CIPHER_CBC;
777 cts_restore_flag = 1;
780 /* Setup DX request structure */
781 ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
782 ssi_req.user_arg = (void *)areq;
784 #ifdef ENABLE_CYCLE_COUNT
785 ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
786 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
790 /* Setup request context */
791 req_ctx->gen_ctx.op_type = direction;
794 /* STAT_PHASE_1: Map buffers */
796 rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
797 if (unlikely(rc != 0)) {
798 SSI_LOG_ERR("map_request() failed\n");
803 /* STAT_PHASE_2: Create sequence */
805 /* Setup processing */
806 #if SSI_CC_HAS_MULTI2
807 if (ctx_p->flow_mode == S_DIN_to_MULTI2)
808 ssi_blkcipher_create_multi2_setup_desc(tfm, req_ctx, ivsize,
811 #endif /*SSI_CC_HAS_MULTI2*/
812 ssi_blkcipher_create_setup_desc(tfm, req_ctx, ivsize, nbytes,
814 /* Data processing */
815 ssi_blkcipher_create_data_desc(tfm,
822 /* do we need to generate IV? */
823 if (req_ctx->is_giv) {
824 ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
825 ssi_req.ivgen_dma_addr_len = 1;
826 /* set the IV size (8/16 B long)*/
827 ssi_req.ivgen_size = ivsize;
830 /* STAT_PHASE_3: Lock HW and push sequence */
832 rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (!areq) ? 0 : 1);
834 if (unlikely(rc != -EINPROGRESS)) {
835 /* Failed to send the request or request completed synchronously */
836 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
841 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
843 rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst,
845 ctx_p->drvdata->cc_base);
850 if (cts_restore_flag != 0)
851 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
856 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
858 struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
859 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
860 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
861 struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
862 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
864 CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
866 ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src,
867 ivsize, areq, cc_base);
870 /* Async wrap functions */
872 static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
874 struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
876 ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
878 return ssi_blkcipher_init(tfm);
882 static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
886 return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
889 static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
891 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
892 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
893 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
894 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
896 req_ctx->backup_info = req->info;
897 req_ctx->is_giv = false;
899 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
902 static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
904 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
905 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
906 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
907 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
909 req_ctx->backup_info = req->info;
910 req_ctx->is_giv = false;
911 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
915 /* DX Block cipher alg */
916 static struct ssi_alg_template blkcipher_algs[] = {
918 #if SSI_CC_HAS_AES_XTS
921 .driver_name = "xts-aes-dx",
922 .blocksize = AES_BLOCK_SIZE,
923 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
924 .template_ablkcipher = {
925 .setkey = ssi_ablkcipher_setkey,
926 .encrypt = ssi_ablkcipher_encrypt,
927 .decrypt = ssi_ablkcipher_decrypt,
928 .min_keysize = AES_MIN_KEY_SIZE * 2,
929 .max_keysize = AES_MAX_KEY_SIZE * 2,
930 .ivsize = AES_BLOCK_SIZE,
933 .cipher_mode = DRV_CIPHER_XTS,
934 .flow_mode = S_DIN_to_AES,
938 .driver_name = "xts-aes-du512-dx",
939 .blocksize = AES_BLOCK_SIZE,
940 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
941 .template_ablkcipher = {
942 .setkey = ssi_ablkcipher_setkey,
943 .encrypt = ssi_ablkcipher_encrypt,
944 .decrypt = ssi_ablkcipher_decrypt,
945 .min_keysize = AES_MIN_KEY_SIZE * 2,
946 .max_keysize = AES_MAX_KEY_SIZE * 2,
947 .ivsize = AES_BLOCK_SIZE,
949 .cipher_mode = DRV_CIPHER_XTS,
950 .flow_mode = S_DIN_to_AES,
954 .driver_name = "xts-aes-du4096-dx",
955 .blocksize = AES_BLOCK_SIZE,
956 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
957 .template_ablkcipher = {
958 .setkey = ssi_ablkcipher_setkey,
959 .encrypt = ssi_ablkcipher_encrypt,
960 .decrypt = ssi_ablkcipher_decrypt,
961 .min_keysize = AES_MIN_KEY_SIZE * 2,
962 .max_keysize = AES_MAX_KEY_SIZE * 2,
963 .ivsize = AES_BLOCK_SIZE,
965 .cipher_mode = DRV_CIPHER_XTS,
966 .flow_mode = S_DIN_to_AES,
968 #endif /*SSI_CC_HAS_AES_XTS*/
969 #if SSI_CC_HAS_AES_ESSIV
971 .name = "essiv(aes)",
972 .driver_name = "essiv-aes-dx",
973 .blocksize = AES_BLOCK_SIZE,
974 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
975 .template_ablkcipher = {
976 .setkey = ssi_ablkcipher_setkey,
977 .encrypt = ssi_ablkcipher_encrypt,
978 .decrypt = ssi_ablkcipher_decrypt,
979 .min_keysize = AES_MIN_KEY_SIZE * 2,
980 .max_keysize = AES_MAX_KEY_SIZE * 2,
981 .ivsize = AES_BLOCK_SIZE,
983 .cipher_mode = DRV_CIPHER_ESSIV,
984 .flow_mode = S_DIN_to_AES,
987 .name = "essiv(aes)",
988 .driver_name = "essiv-aes-du512-dx",
989 .blocksize = AES_BLOCK_SIZE,
990 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
991 .template_ablkcipher = {
992 .setkey = ssi_ablkcipher_setkey,
993 .encrypt = ssi_ablkcipher_encrypt,
994 .decrypt = ssi_ablkcipher_decrypt,
995 .min_keysize = AES_MIN_KEY_SIZE * 2,
996 .max_keysize = AES_MAX_KEY_SIZE * 2,
997 .ivsize = AES_BLOCK_SIZE,
999 .cipher_mode = DRV_CIPHER_ESSIV,
1000 .flow_mode = S_DIN_to_AES,
1003 .name = "essiv(aes)",
1004 .driver_name = "essiv-aes-du4096-dx",
1005 .blocksize = AES_BLOCK_SIZE,
1006 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1007 .template_ablkcipher = {
1008 .setkey = ssi_ablkcipher_setkey,
1009 .encrypt = ssi_ablkcipher_encrypt,
1010 .decrypt = ssi_ablkcipher_decrypt,
1011 .min_keysize = AES_MIN_KEY_SIZE * 2,
1012 .max_keysize = AES_MAX_KEY_SIZE * 2,
1013 .ivsize = AES_BLOCK_SIZE,
1015 .cipher_mode = DRV_CIPHER_ESSIV,
1016 .flow_mode = S_DIN_to_AES,
1018 #endif /*SSI_CC_HAS_AES_ESSIV*/
1019 #if SSI_CC_HAS_AES_BITLOCKER
1021 .name = "bitlocker(aes)",
1022 .driver_name = "bitlocker-aes-dx",
1023 .blocksize = AES_BLOCK_SIZE,
1024 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1025 .template_ablkcipher = {
1026 .setkey = ssi_ablkcipher_setkey,
1027 .encrypt = ssi_ablkcipher_encrypt,
1028 .decrypt = ssi_ablkcipher_decrypt,
1029 .min_keysize = AES_MIN_KEY_SIZE * 2,
1030 .max_keysize = AES_MAX_KEY_SIZE * 2,
1031 .ivsize = AES_BLOCK_SIZE,
1033 .cipher_mode = DRV_CIPHER_BITLOCKER,
1034 .flow_mode = S_DIN_to_AES,
1037 .name = "bitlocker(aes)",
1038 .driver_name = "bitlocker-aes-du512-dx",
1039 .blocksize = AES_BLOCK_SIZE,
1040 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1041 .template_ablkcipher = {
1042 .setkey = ssi_ablkcipher_setkey,
1043 .encrypt = ssi_ablkcipher_encrypt,
1044 .decrypt = ssi_ablkcipher_decrypt,
1045 .min_keysize = AES_MIN_KEY_SIZE * 2,
1046 .max_keysize = AES_MAX_KEY_SIZE * 2,
1047 .ivsize = AES_BLOCK_SIZE,
1049 .cipher_mode = DRV_CIPHER_BITLOCKER,
1050 .flow_mode = S_DIN_to_AES,
1053 .name = "bitlocker(aes)",
1054 .driver_name = "bitlocker-aes-du4096-dx",
1055 .blocksize = AES_BLOCK_SIZE,
1056 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1057 .template_ablkcipher = {
1058 .setkey = ssi_ablkcipher_setkey,
1059 .encrypt = ssi_ablkcipher_encrypt,
1060 .decrypt = ssi_ablkcipher_decrypt,
1061 .min_keysize = AES_MIN_KEY_SIZE * 2,
1062 .max_keysize = AES_MAX_KEY_SIZE * 2,
1063 .ivsize = AES_BLOCK_SIZE,
1065 .cipher_mode = DRV_CIPHER_BITLOCKER,
1066 .flow_mode = S_DIN_to_AES,
1068 #endif /*SSI_CC_HAS_AES_BITLOCKER*/
1071 .driver_name = "ecb-aes-dx",
1072 .blocksize = AES_BLOCK_SIZE,
1073 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1074 .template_ablkcipher = {
1075 .setkey = ssi_ablkcipher_setkey,
1076 .encrypt = ssi_ablkcipher_encrypt,
1077 .decrypt = ssi_ablkcipher_decrypt,
1078 .min_keysize = AES_MIN_KEY_SIZE,
1079 .max_keysize = AES_MAX_KEY_SIZE,
1082 .cipher_mode = DRV_CIPHER_ECB,
1083 .flow_mode = S_DIN_to_AES,
1087 .driver_name = "cbc-aes-dx",
1088 .blocksize = AES_BLOCK_SIZE,
1089 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1090 .template_ablkcipher = {
1091 .setkey = ssi_ablkcipher_setkey,
1092 .encrypt = ssi_ablkcipher_encrypt,
1093 .decrypt = ssi_ablkcipher_decrypt,
1094 .min_keysize = AES_MIN_KEY_SIZE,
1095 .max_keysize = AES_MAX_KEY_SIZE,
1096 .ivsize = AES_BLOCK_SIZE,
1098 .cipher_mode = DRV_CIPHER_CBC,
1099 .flow_mode = S_DIN_to_AES,
1103 .driver_name = "ofb-aes-dx",
1104 .blocksize = AES_BLOCK_SIZE,
1105 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1106 .template_ablkcipher = {
1107 .setkey = ssi_ablkcipher_setkey,
1108 .encrypt = ssi_ablkcipher_encrypt,
1109 .decrypt = ssi_ablkcipher_decrypt,
1110 .min_keysize = AES_MIN_KEY_SIZE,
1111 .max_keysize = AES_MAX_KEY_SIZE,
1112 .ivsize = AES_BLOCK_SIZE,
1114 .cipher_mode = DRV_CIPHER_OFB,
1115 .flow_mode = S_DIN_to_AES,
1117 #if SSI_CC_HAS_AES_CTS
1119 .name = "cts1(cbc(aes))",
1120 .driver_name = "cts1-cbc-aes-dx",
1121 .blocksize = AES_BLOCK_SIZE,
1122 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1123 .template_ablkcipher = {
1124 .setkey = ssi_ablkcipher_setkey,
1125 .encrypt = ssi_ablkcipher_encrypt,
1126 .decrypt = ssi_ablkcipher_decrypt,
1127 .min_keysize = AES_MIN_KEY_SIZE,
1128 .max_keysize = AES_MAX_KEY_SIZE,
1129 .ivsize = AES_BLOCK_SIZE,
1131 .cipher_mode = DRV_CIPHER_CBC_CTS,
1132 .flow_mode = S_DIN_to_AES,
1137 .driver_name = "ctr-aes-dx",
1139 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1140 .template_ablkcipher = {
1141 .setkey = ssi_ablkcipher_setkey,
1142 .encrypt = ssi_ablkcipher_encrypt,
1143 .decrypt = ssi_ablkcipher_decrypt,
1144 .min_keysize = AES_MIN_KEY_SIZE,
1145 .max_keysize = AES_MAX_KEY_SIZE,
1146 .ivsize = AES_BLOCK_SIZE,
1148 .cipher_mode = DRV_CIPHER_CTR,
1149 .flow_mode = S_DIN_to_AES,
1152 .name = "cbc(des3_ede)",
1153 .driver_name = "cbc-3des-dx",
1154 .blocksize = DES3_EDE_BLOCK_SIZE,
1155 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1156 .template_ablkcipher = {
1157 .setkey = ssi_ablkcipher_setkey,
1158 .encrypt = ssi_ablkcipher_encrypt,
1159 .decrypt = ssi_ablkcipher_decrypt,
1160 .min_keysize = DES3_EDE_KEY_SIZE,
1161 .max_keysize = DES3_EDE_KEY_SIZE,
1162 .ivsize = DES3_EDE_BLOCK_SIZE,
1164 .cipher_mode = DRV_CIPHER_CBC,
1165 .flow_mode = S_DIN_to_DES,
1168 .name = "ecb(des3_ede)",
1169 .driver_name = "ecb-3des-dx",
1170 .blocksize = DES3_EDE_BLOCK_SIZE,
1171 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1172 .template_ablkcipher = {
1173 .setkey = ssi_ablkcipher_setkey,
1174 .encrypt = ssi_ablkcipher_encrypt,
1175 .decrypt = ssi_ablkcipher_decrypt,
1176 .min_keysize = DES3_EDE_KEY_SIZE,
1177 .max_keysize = DES3_EDE_KEY_SIZE,
1180 .cipher_mode = DRV_CIPHER_ECB,
1181 .flow_mode = S_DIN_to_DES,
1185 .driver_name = "cbc-des-dx",
1186 .blocksize = DES_BLOCK_SIZE,
1187 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1188 .template_ablkcipher = {
1189 .setkey = ssi_ablkcipher_setkey,
1190 .encrypt = ssi_ablkcipher_encrypt,
1191 .decrypt = ssi_ablkcipher_decrypt,
1192 .min_keysize = DES_KEY_SIZE,
1193 .max_keysize = DES_KEY_SIZE,
1194 .ivsize = DES_BLOCK_SIZE,
1196 .cipher_mode = DRV_CIPHER_CBC,
1197 .flow_mode = S_DIN_to_DES,
1201 .driver_name = "ecb-des-dx",
1202 .blocksize = DES_BLOCK_SIZE,
1203 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1204 .template_ablkcipher = {
1205 .setkey = ssi_ablkcipher_setkey,
1206 .encrypt = ssi_ablkcipher_encrypt,
1207 .decrypt = ssi_ablkcipher_decrypt,
1208 .min_keysize = DES_KEY_SIZE,
1209 .max_keysize = DES_KEY_SIZE,
1212 .cipher_mode = DRV_CIPHER_ECB,
1213 .flow_mode = S_DIN_to_DES,
1215 #if SSI_CC_HAS_MULTI2
1217 .name = "cbc(multi2)",
1218 .driver_name = "cbc-multi2-dx",
1219 .blocksize = CC_MULTI2_BLOCK_SIZE,
1220 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1221 .template_ablkcipher = {
1222 .setkey = ssi_ablkcipher_setkey,
1223 .encrypt = ssi_ablkcipher_encrypt,
1224 .decrypt = ssi_ablkcipher_decrypt,
1225 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1226 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1227 .ivsize = CC_MULTI2_IV_SIZE,
1229 .cipher_mode = DRV_MULTI2_CBC,
1230 .flow_mode = S_DIN_to_MULTI2,
1233 .name = "ofb(multi2)",
1234 .driver_name = "ofb-multi2-dx",
1236 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1237 .template_ablkcipher = {
1238 .setkey = ssi_ablkcipher_setkey,
1239 .encrypt = ssi_ablkcipher_encrypt,
1240 .decrypt = ssi_ablkcipher_encrypt,
1241 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1242 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1243 .ivsize = CC_MULTI2_IV_SIZE,
1245 .cipher_mode = DRV_MULTI2_OFB,
1246 .flow_mode = S_DIN_to_MULTI2,
1248 #endif /*SSI_CC_HAS_MULTI2*/
1252 struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1254 struct ssi_crypto_alg *t_alg;
1255 struct crypto_alg *alg;
1257 t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1259 SSI_LOG_ERR("failed to allocate t_alg\n");
1260 return ERR_PTR(-ENOMEM);
1263 alg = &t_alg->crypto_alg;
1265 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1266 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1267 template->driver_name);
1268 alg->cra_module = THIS_MODULE;
1269 alg->cra_priority = SSI_CRA_PRIO;
1270 alg->cra_blocksize = template->blocksize;
1271 alg->cra_alignmask = 0;
1272 alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1274 alg->cra_init = ssi_ablkcipher_init;
1275 alg->cra_exit = ssi_blkcipher_exit;
1276 alg->cra_type = &crypto_ablkcipher_type;
1277 alg->cra_ablkcipher = template->template_ablkcipher;
1278 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1281 t_alg->cipher_mode = template->cipher_mode;
1282 t_alg->flow_mode = template->flow_mode;
1287 int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1289 struct ssi_crypto_alg *t_alg, *n;
1290 struct ssi_blkcipher_handle *blkcipher_handle =
1291 drvdata->blkcipher_handle;
1293 dev = &drvdata->plat_dev->dev;
1295 if (blkcipher_handle) {
1296 /* Remove registered algs */
1297 list_for_each_entry_safe(t_alg, n,
1298 &blkcipher_handle->blkcipher_alg_list,
1300 crypto_unregister_alg(&t_alg->crypto_alg);
1301 list_del(&t_alg->entry);
1304 kfree(blkcipher_handle);
1305 drvdata->blkcipher_handle = NULL;
1312 int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1314 struct ssi_blkcipher_handle *ablkcipher_handle;
1315 struct ssi_crypto_alg *t_alg;
1319 ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1321 if (!ablkcipher_handle)
1324 drvdata->blkcipher_handle = ablkcipher_handle;
1326 INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1329 SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1330 for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1331 SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1332 t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1333 if (IS_ERR(t_alg)) {
1334 rc = PTR_ERR(t_alg);
1335 SSI_LOG_ERR("%s alg allocation failed\n",
1336 blkcipher_algs[alg].driver_name);
1339 t_alg->drvdata = drvdata;
1341 SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1342 rc = crypto_register_alg(&t_alg->crypto_alg);
1343 SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1344 t_alg->crypto_alg.cra_driver_name, rc);
1345 if (unlikely(rc != 0)) {
1346 SSI_LOG_ERR("%s alg registration failed\n",
1347 t_alg->crypto_alg.cra_driver_name);
1351 list_add_tail(&t_alg->entry,
1352 &ablkcipher_handle->blkcipher_alg_list);
1353 SSI_LOG_DEBUG("Registered %s\n",
1354 t_alg->crypto_alg.cra_driver_name);
1360 ssi_ablkcipher_free(drvdata);