2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/semaphore.h>
21 #include <crypto/algapi.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/des.h>
27 #include "ssi_config.h"
28 #include "ssi_driver.h"
29 #include "cc_lli_defs.h"
30 #include "ssi_buffer_mgr.h"
31 #include "ssi_cipher.h"
32 #include "ssi_request_mgr.h"
33 #include "ssi_sysfs.h"
34 #include "ssi_fips_local.h"
36 #define MAX_ABLKCIPHER_SEQ_LEN 6
38 #define template_ablkcipher template_u.ablkcipher
39 #define template_sblkcipher template_u.blkcipher
41 #define SSI_MIN_AES_XTS_SIZE 0x10
42 #define SSI_MAX_AES_XTS_SIZE 0x2000
43 struct ssi_blkcipher_handle {
44 struct list_head blkcipher_alg_list;
47 struct cc_user_key_info {
49 dma_addr_t key_dma_addr;
51 struct cc_hw_key_info {
52 enum cc_hw_crypto_key key1_slot;
53 enum cc_hw_crypto_key key2_slot;
56 struct ssi_ablkcipher_ctx {
57 struct ssi_drvdata *drvdata;
63 struct blkcipher_req_ctx *sync_ctx;
64 struct cc_user_key_info user;
65 struct cc_hw_key_info hw;
66 struct crypto_shash *shash_tfm;
69 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
72 static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, u32 size) {
73 switch (ctx_p->flow_mode){
76 case CC_AES_128_BIT_KEY_SIZE:
77 case CC_AES_192_BIT_KEY_SIZE:
78 if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
79 (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
80 (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
83 case CC_AES_256_BIT_KEY_SIZE:
85 case (CC_AES_192_BIT_KEY_SIZE*2):
86 case (CC_AES_256_BIT_KEY_SIZE*2):
87 if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
88 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
89 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
96 if (likely(size == DES3_EDE_KEY_SIZE ||
97 size == DES_KEY_SIZE))
100 #if SSI_CC_HAS_MULTI2
101 case S_DIN_to_MULTI2:
102 if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
114 static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
115 switch (ctx_p->flow_mode){
117 switch (ctx_p->cipher_mode){
119 if ((size >= SSI_MIN_AES_XTS_SIZE) &&
120 (size <= SSI_MAX_AES_XTS_SIZE) &&
121 IS_ALIGNED(size, AES_BLOCK_SIZE))
124 case DRV_CIPHER_CBC_CTS:
125 if (likely(size >= AES_BLOCK_SIZE))
133 case DRV_CIPHER_ESSIV:
134 case DRV_CIPHER_BITLOCKER:
135 if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
143 if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
146 #if SSI_CC_HAS_MULTI2
147 case S_DIN_to_MULTI2:
148 switch (ctx_p->cipher_mode) {
150 if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
159 #endif /*SSI_CC_HAS_MULTI2*/
167 static unsigned int get_max_keysize(struct crypto_tfm *tfm)
169 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
171 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER) {
172 return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
175 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER) {
176 return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
182 static int ssi_blkcipher_init(struct crypto_tfm *tfm)
184 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
185 struct crypto_alg *alg = tfm->__crt_alg;
186 struct ssi_crypto_alg *ssi_alg =
187 container_of(alg, struct ssi_crypto_alg, crypto_alg);
190 unsigned int max_key_buf_size = get_max_keysize(tfm);
192 SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p,
193 crypto_tfm_alg_name(tfm));
195 CHECK_AND_RETURN_UPON_FIPS_ERROR();
196 ctx_p->cipher_mode = ssi_alg->cipher_mode;
197 ctx_p->flow_mode = ssi_alg->flow_mode;
198 ctx_p->drvdata = ssi_alg->drvdata;
199 dev = &ctx_p->drvdata->plat_dev->dev;
201 /* Allocate key buffer, cache line aligned */
202 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL|GFP_DMA);
203 if (!ctx_p->user.key) {
204 SSI_LOG_ERR("Allocating key buffer in context failed\n");
207 SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
211 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
212 max_key_buf_size, DMA_TO_DEVICE);
213 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
214 SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
215 max_key_buf_size, ctx_p->user.key);
218 SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr, max_key_buf_size);
219 SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
220 max_key_buf_size, ctx_p->user.key,
221 (unsigned long long)ctx_p->user.key_dma_addr);
223 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
224 /* Alloc hash tfm for essiv */
225 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
226 if (IS_ERR(ctx_p->shash_tfm)) {
227 SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
228 return PTR_ERR(ctx_p->shash_tfm);
235 static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
237 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
238 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
239 unsigned int max_key_buf_size = get_max_keysize(tfm);
241 SSI_LOG_DEBUG("Clearing context @%p for %s\n",
242 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
244 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
245 /* Free hash tfm for essiv */
246 crypto_free_shash(ctx_p->shash_tfm);
247 ctx_p->shash_tfm = NULL;
250 /* Unmap key buffer */
251 SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
252 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
254 SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n",
255 (unsigned long long)ctx_p->user.key_dma_addr);
257 /* Free key buffer in context */
258 kfree(ctx_p->user.key);
259 SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
263 typedef struct tdes_keys{
264 u8 key1[DES_KEY_SIZE];
265 u8 key2[DES_KEY_SIZE];
266 u8 key3[DES_KEY_SIZE];
269 static const u8 zero_buff[] = {0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
270 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
271 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
272 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
274 /* The function verifies that tdes keys are not weak.*/
275 static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
277 #ifdef CCREE_FIPS_SUPPORT
278 tdes_keys_t *tdes_key = (tdes_keys_t*)key;
280 /* verify key1 != key2 and key3 != key2*/
281 if (unlikely( (memcmp((u8*)tdes_key->key1, (u8*)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
282 (memcmp((u8*)tdes_key->key3, (u8*)tdes_key->key2, sizeof(tdes_key->key3)) == 0) )) {
285 #endif /* CCREE_FIPS_SUPPORT */
290 /* The function verifies that xts keys are not weak.*/
291 static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
293 #ifdef CCREE_FIPS_SUPPORT
294 /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
295 int singleKeySize = keylen >> 1;
297 if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0)) {
300 #endif /* CCREE_FIPS_SUPPORT */
305 static enum cc_hw_crypto_key hw_key_to_cc_hw_key(int slot_num)
320 static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
324 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
325 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
326 u32 tmp[DES_EXPKEY_WORDS];
327 unsigned int max_key_buf_size = get_max_keysize(tfm);
328 DECL_CYCLE_COUNT_RESOURCES;
330 SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
331 ctx_p, crypto_tfm_alg_name(tfm), keylen);
332 dump_byte_array("key", (u8 *)key, keylen);
334 CHECK_AND_RETURN_UPON_FIPS_ERROR();
336 SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
338 /* STAT_PHASE_0: Init and sanity checks */
341 #if SSI_CC_HAS_MULTI2
342 /*last byte of key buffer is round number and should not be a part of key size*/
343 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
346 #endif /*SSI_CC_HAS_MULTI2*/
348 if (unlikely(validate_keys_sizes(ctx_p,keylen) != 0)) {
349 SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
350 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
354 if (ssi_is_hw_key(tfm)) {
355 /* setting HW key slots */
356 struct arm_hw_key_info *hki = (struct arm_hw_key_info*)key;
358 if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
359 SSI_LOG_ERR("HW key not supported for non-AES flows\n");
363 ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
364 if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
365 SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
369 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
370 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
371 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
372 if (unlikely(hki->hw_key1 == hki->hw_key2)) {
373 SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
376 ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
377 if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
378 SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
383 ctx_p->keylen = keylen;
384 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
385 SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
391 if (ctx_p->flow_mode == S_DIN_to_DES) {
392 if (unlikely(!des_ekey(tmp, key)) &&
393 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
394 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
395 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak DES key");
399 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
400 ssi_fips_verify_xts_keys(key, keylen) != 0) {
401 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
404 if ((ctx_p->flow_mode == S_DIN_to_DES) &&
405 (keylen == DES3_EDE_KEY_SIZE) &&
406 ssi_fips_verify_3des_keys(key, keylen) != 0) {
407 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
412 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
414 /* STAT_PHASE_1: Copy key to ctx */
416 SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
417 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
418 max_key_buf_size, DMA_TO_DEVICE);
419 #if SSI_CC_HAS_MULTI2
420 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
421 memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
422 ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
423 if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
424 ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
425 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
426 SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
430 #endif /*SSI_CC_HAS_MULTI2*/
432 memcpy(ctx_p->user.key, key, keylen);
434 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
436 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
437 /* sha256 for key2 - use sw implementation */
438 int key_len = keylen >> 1;
440 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
441 desc->tfm = ctx_p->shash_tfm;
443 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
445 SSI_LOG_ERR("Failed to hash ESSIV key.\n");
450 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
451 max_key_buf_size, DMA_TO_DEVICE);
452 SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr ,max_key_buf_size);
453 ctx_p->keylen = keylen;
455 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_1);
457 SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
462 ssi_blkcipher_create_setup_desc(
463 struct crypto_tfm *tfm,
464 struct blkcipher_req_ctx *req_ctx,
467 struct cc_hw_desc desc[],
468 unsigned int *seq_size)
470 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
471 int cipher_mode = ctx_p->cipher_mode;
472 int flow_mode = ctx_p->flow_mode;
473 int direction = req_ctx->gen_ctx.op_type;
474 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
475 unsigned int key_len = ctx_p->keylen;
476 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
477 unsigned int du_size = nbytes;
479 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
481 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
483 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
486 switch (cipher_mode) {
488 case DRV_CIPHER_CBC_CTS:
491 /* Load cipher state */
492 HW_DESC_INIT(&desc[*seq_size]);
493 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
496 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
497 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
498 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
499 if ((cipher_mode == DRV_CIPHER_CTR) ||
500 (cipher_mode == DRV_CIPHER_OFB) ) {
501 HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
504 HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
511 HW_DESC_INIT(&desc[*seq_size]);
512 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
513 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
514 if (flow_mode == S_DIN_to_AES) {
516 if (ssi_is_hw_key(tfm)) {
517 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
519 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
521 ((key_len == 24) ? AES_MAX_KEY_SIZE : key_len),
524 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len);
527 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
528 key_dma_addr, key_len,
530 HW_DESC_SET_KEY_SIZE_DES(&desc[*seq_size], key_len);
532 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
533 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
537 case DRV_CIPHER_ESSIV:
538 case DRV_CIPHER_BITLOCKER:
540 HW_DESC_INIT(&desc[*seq_size]);
541 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
542 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
543 if (ssi_is_hw_key(tfm)) {
544 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
546 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
547 key_dma_addr, key_len/2,
550 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
551 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
552 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
556 HW_DESC_INIT(&desc[*seq_size]);
557 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
558 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
559 if (ssi_is_hw_key(tfm)) {
560 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key2_slot);
562 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
563 (key_dma_addr+key_len/2), key_len/2,
566 HW_DESC_SET_XEX_DATA_UNIT_SIZE(&desc[*seq_size], du_size);
567 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], S_DIN_to_AES2);
568 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
569 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
573 HW_DESC_INIT(&desc[*seq_size]);
574 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
575 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
576 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
577 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
578 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
579 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
580 iv_dma_addr, CC_AES_BLOCK_SIZE,
585 SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
590 #if SSI_CC_HAS_MULTI2
591 static inline void ssi_blkcipher_create_multi2_setup_desc(
592 struct crypto_tfm *tfm,
593 struct blkcipher_req_ctx *req_ctx,
595 struct cc_hw_desc desc[],
596 unsigned int *seq_size)
598 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
600 int direction = req_ctx->gen_ctx.op_type;
601 /* Load system key */
602 HW_DESC_INIT(&desc[*seq_size]);
603 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
604 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
605 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
606 CC_MULTI2_SYSTEM_KEY_SIZE,
608 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
609 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
613 HW_DESC_INIT(&desc[*seq_size]);
614 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
615 (ctx_p->user.key_dma_addr +
616 CC_MULTI2_SYSTEM_KEY_SIZE),
617 CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
618 HW_DESC_SET_MULTI2_NUM_ROUNDS(&desc[*seq_size],
619 ctx_p->key_round_number);
620 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
621 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
622 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
623 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE0 );
628 HW_DESC_INIT(&desc[*seq_size]);
629 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
630 req_ctx->gen_ctx.iv_dma_addr,
632 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
633 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
634 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
635 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
639 #endif /*SSI_CC_HAS_MULTI2*/
642 ssi_blkcipher_create_data_desc(
643 struct crypto_tfm *tfm,
644 struct blkcipher_req_ctx *req_ctx,
645 struct scatterlist *dst, struct scatterlist *src,
648 struct cc_hw_desc desc[],
649 unsigned int *seq_size)
651 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
652 unsigned int flow_mode = ctx_p->flow_mode;
654 switch (ctx_p->flow_mode) {
656 flow_mode = DIN_AES_DOUT;
659 flow_mode = DIN_DES_DOUT;
661 #if SSI_CC_HAS_MULTI2
662 case S_DIN_to_MULTI2:
663 flow_mode = DIN_MULTI2_DOUT;
665 #endif /*SSI_CC_HAS_MULTI2*/
667 SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
671 if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)){
672 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
673 (unsigned long long)sg_dma_address(src),
675 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
676 (unsigned long long)sg_dma_address(dst),
678 HW_DESC_INIT(&desc[*seq_size]);
679 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
682 HW_DESC_SET_DOUT_DLLI(&desc[*seq_size],
685 NS_BIT, (areq == NULL)? 0:1);
687 HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
689 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
693 SSI_LOG_DEBUG(" bypass params addr 0x%llX "
694 "length 0x%X addr 0x%08X\n",
695 (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
696 req_ctx->mlli_params.mlli_len,
697 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
698 HW_DESC_INIT(&desc[*seq_size]);
699 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
700 req_ctx->mlli_params.mlli_dma_addr,
701 req_ctx->mlli_params.mlli_len,
703 HW_DESC_SET_DOUT_SRAM(&desc[*seq_size],
704 ctx_p->drvdata->mlli_sram_addr,
705 req_ctx->mlli_params.mlli_len);
706 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], BYPASS);
709 HW_DESC_INIT(&desc[*seq_size]);
710 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_MLLI,
711 ctx_p->drvdata->mlli_sram_addr,
712 req_ctx->in_mlli_nents, NS_BIT);
713 if (req_ctx->out_nents == 0) {
714 SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
716 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
717 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
718 HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
719 ctx_p->drvdata->mlli_sram_addr,
720 req_ctx->in_mlli_nents,
721 NS_BIT,(areq == NULL)? 0:1);
723 SSI_LOG_DEBUG(" din/dout params "
724 "addr 0x%08X addr 0x%08X\n",
725 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
726 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
727 (u32)LLI_ENTRY_BYTE_SIZE *
729 HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
730 (ctx_p->drvdata->mlli_sram_addr +
731 LLI_ENTRY_BYTE_SIZE *
732 req_ctx->in_mlli_nents),
733 req_ctx->out_mlli_nents, NS_BIT,(areq == NULL)? 0:1);
736 HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
738 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
743 static int ssi_blkcipher_complete(struct device *dev,
744 struct ssi_ablkcipher_ctx *ctx_p,
745 struct blkcipher_req_ctx *req_ctx,
746 struct scatterlist *dst, struct scatterlist *src,
749 void __iomem *cc_base)
751 int completion_error = 0;
752 u32 inflight_counter;
753 DECL_CYCLE_COUNT_RESOURCES;
756 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
757 END_CYCLE_COUNT(STAT_OP_TYPE_GENERIC, STAT_PHASE_4);
760 /*Set the inflight couter value to local variable*/
761 inflight_counter = ctx_p->drvdata->inflight_counter;
762 /*Decrease the inflight counter*/
763 if(ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
764 ctx_p->drvdata->inflight_counter--;
767 ablkcipher_request_complete(areq, completion_error);
770 return completion_error;
773 static int ssi_blkcipher_process(
774 struct crypto_tfm *tfm,
775 struct blkcipher_req_ctx *req_ctx,
776 struct scatterlist *dst, struct scatterlist *src,
778 void *info, //req info
781 enum drv_crypto_direction direction)
783 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
784 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
785 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
786 struct ssi_crypto_req ssi_req = {};
787 int rc, seq_len = 0,cts_restore_flag = 0;
788 DECL_CYCLE_COUNT_RESOURCES;
790 SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
791 ((direction==DRV_CRYPTO_DIRECTION_ENCRYPT)?"Encrypt":"Decrypt"),
794 CHECK_AND_RETURN_UPON_FIPS_ERROR();
795 /* STAT_PHASE_0: Init and sanity checks */
798 /* TODO: check data length according to mode */
799 if (unlikely(validate_data_size(ctx_p, nbytes))) {
800 SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
801 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
805 /* No data to process is valid */
808 /*For CTS in case of data size aligned to 16 use CBC mode*/
809 if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)){
811 ctx_p->cipher_mode = DRV_CIPHER_CBC;
812 cts_restore_flag = 1;
815 /* Setup DX request structure */
816 ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
817 ssi_req.user_arg = (void *)areq;
819 #ifdef ENABLE_CYCLE_COUNT
820 ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
821 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
825 /* Setup request context */
826 req_ctx->gen_ctx.op_type = direction;
828 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_0);
830 /* STAT_PHASE_1: Map buffers */
833 rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
834 if (unlikely(rc != 0)) {
835 SSI_LOG_ERR("map_request() failed\n");
839 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_1);
841 /* STAT_PHASE_2: Create sequence */
844 /* Setup processing */
845 #if SSI_CC_HAS_MULTI2
846 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
847 ssi_blkcipher_create_multi2_setup_desc(tfm,
853 #endif /*SSI_CC_HAS_MULTI2*/
855 ssi_blkcipher_create_setup_desc(tfm,
862 /* Data processing */
863 ssi_blkcipher_create_data_desc(tfm,
870 /* do we need to generate IV? */
871 if (req_ctx->is_giv == true) {
872 ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
873 ssi_req.ivgen_dma_addr_len = 1;
874 /* set the IV size (8/16 B long)*/
875 ssi_req.ivgen_size = ivsize;
877 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_2);
879 /* STAT_PHASE_3: Lock HW and push sequence */
882 rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (areq == NULL)? 0:1);
884 if (unlikely(rc != -EINPROGRESS)) {
885 /* Failed to send the request or request completed synchronously */
886 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
889 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
892 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
893 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
895 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
896 rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst,
898 ctx_p->drvdata->cc_base);
903 if (cts_restore_flag != 0)
904 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
909 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
911 struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
912 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
913 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
914 struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
915 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
917 CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
919 ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src,
920 ivsize, areq, cc_base);
925 static int ssi_sblkcipher_init(struct crypto_tfm *tfm)
927 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
929 /* Allocate sync ctx buffer */
930 ctx_p->sync_ctx = kmalloc(sizeof(struct blkcipher_req_ctx), GFP_KERNEL|GFP_DMA);
931 if (!ctx_p->sync_ctx) {
932 SSI_LOG_ERR("Allocating sync ctx buffer in context failed\n");
935 SSI_LOG_DEBUG("Allocated sync ctx buffer in context ctx_p->sync_ctx=@%p\n",
938 return ssi_blkcipher_init(tfm);
942 static void ssi_sblkcipher_exit(struct crypto_tfm *tfm)
944 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
946 kfree(ctx_p->sync_ctx);
947 SSI_LOG_DEBUG("Free sync ctx buffer in context ctx_p->sync_ctx=@%p\n", ctx_p->sync_ctx);
949 ssi_blkcipher_exit(tfm);
953 static int ssi_sblkcipher_encrypt(struct blkcipher_desc *desc,
954 struct scatterlist *dst, struct scatterlist *src,
957 struct crypto_blkcipher *blk_tfm = desc->tfm;
958 struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
959 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
960 struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
961 unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
963 req_ctx->backup_info = desc->info;
964 req_ctx->is_giv = false;
966 return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_ENCRYPT);
969 static int ssi_sblkcipher_decrypt(struct blkcipher_desc *desc,
970 struct scatterlist *dst, struct scatterlist *src,
973 struct crypto_blkcipher *blk_tfm = desc->tfm;
974 struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
975 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
976 struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
977 unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
979 req_ctx->backup_info = desc->info;
980 req_ctx->is_giv = false;
982 return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_DECRYPT);
986 /* Async wrap functions */
988 static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
990 struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
992 ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
994 return ssi_blkcipher_init(tfm);
998 static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
1000 unsigned int keylen)
1002 return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
1005 static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
1007 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1008 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1009 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1010 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1012 req_ctx->backup_info = req->info;
1013 req_ctx->is_giv = false;
1015 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
1018 static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
1020 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1021 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1022 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1023 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1025 req_ctx->backup_info = req->info;
1026 req_ctx->is_giv = false;
1027 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
1031 /* DX Block cipher alg */
1032 static struct ssi_alg_template blkcipher_algs[] = {
1033 /* Async template */
1034 #if SSI_CC_HAS_AES_XTS
1037 .driver_name = "xts-aes-dx",
1038 .blocksize = AES_BLOCK_SIZE,
1039 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1040 .template_ablkcipher = {
1041 .setkey = ssi_ablkcipher_setkey,
1042 .encrypt = ssi_ablkcipher_encrypt,
1043 .decrypt = ssi_ablkcipher_decrypt,
1044 .min_keysize = AES_MIN_KEY_SIZE * 2,
1045 .max_keysize = AES_MAX_KEY_SIZE * 2,
1046 .ivsize = AES_BLOCK_SIZE,
1049 .cipher_mode = DRV_CIPHER_XTS,
1050 .flow_mode = S_DIN_to_AES,
1051 .synchronous = false,
1055 .driver_name = "xts-aes-du512-dx",
1056 .blocksize = AES_BLOCK_SIZE,
1057 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1058 .template_ablkcipher = {
1059 .setkey = ssi_ablkcipher_setkey,
1060 .encrypt = ssi_ablkcipher_encrypt,
1061 .decrypt = ssi_ablkcipher_decrypt,
1062 .min_keysize = AES_MIN_KEY_SIZE * 2,
1063 .max_keysize = AES_MAX_KEY_SIZE * 2,
1064 .ivsize = AES_BLOCK_SIZE,
1066 .cipher_mode = DRV_CIPHER_XTS,
1067 .flow_mode = S_DIN_to_AES,
1068 .synchronous = false,
1072 .driver_name = "xts-aes-du4096-dx",
1073 .blocksize = AES_BLOCK_SIZE,
1074 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1075 .template_ablkcipher = {
1076 .setkey = ssi_ablkcipher_setkey,
1077 .encrypt = ssi_ablkcipher_encrypt,
1078 .decrypt = ssi_ablkcipher_decrypt,
1079 .min_keysize = AES_MIN_KEY_SIZE * 2,
1080 .max_keysize = AES_MAX_KEY_SIZE * 2,
1081 .ivsize = AES_BLOCK_SIZE,
1083 .cipher_mode = DRV_CIPHER_XTS,
1084 .flow_mode = S_DIN_to_AES,
1085 .synchronous = false,
1087 #endif /*SSI_CC_HAS_AES_XTS*/
1088 #if SSI_CC_HAS_AES_ESSIV
1090 .name = "essiv(aes)",
1091 .driver_name = "essiv-aes-dx",
1092 .blocksize = AES_BLOCK_SIZE,
1093 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1094 .template_ablkcipher = {
1095 .setkey = ssi_ablkcipher_setkey,
1096 .encrypt = ssi_ablkcipher_encrypt,
1097 .decrypt = ssi_ablkcipher_decrypt,
1098 .min_keysize = AES_MIN_KEY_SIZE * 2,
1099 .max_keysize = AES_MAX_KEY_SIZE * 2,
1100 .ivsize = AES_BLOCK_SIZE,
1102 .cipher_mode = DRV_CIPHER_ESSIV,
1103 .flow_mode = S_DIN_to_AES,
1104 .synchronous = false,
1107 .name = "essiv(aes)",
1108 .driver_name = "essiv-aes-du512-dx",
1109 .blocksize = AES_BLOCK_SIZE,
1110 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1111 .template_ablkcipher = {
1112 .setkey = ssi_ablkcipher_setkey,
1113 .encrypt = ssi_ablkcipher_encrypt,
1114 .decrypt = ssi_ablkcipher_decrypt,
1115 .min_keysize = AES_MIN_KEY_SIZE * 2,
1116 .max_keysize = AES_MAX_KEY_SIZE * 2,
1117 .ivsize = AES_BLOCK_SIZE,
1119 .cipher_mode = DRV_CIPHER_ESSIV,
1120 .flow_mode = S_DIN_to_AES,
1121 .synchronous = false,
1124 .name = "essiv(aes)",
1125 .driver_name = "essiv-aes-du4096-dx",
1126 .blocksize = AES_BLOCK_SIZE,
1127 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1128 .template_ablkcipher = {
1129 .setkey = ssi_ablkcipher_setkey,
1130 .encrypt = ssi_ablkcipher_encrypt,
1131 .decrypt = ssi_ablkcipher_decrypt,
1132 .min_keysize = AES_MIN_KEY_SIZE * 2,
1133 .max_keysize = AES_MAX_KEY_SIZE * 2,
1134 .ivsize = AES_BLOCK_SIZE,
1136 .cipher_mode = DRV_CIPHER_ESSIV,
1137 .flow_mode = S_DIN_to_AES,
1138 .synchronous = false,
1140 #endif /*SSI_CC_HAS_AES_ESSIV*/
1141 #if SSI_CC_HAS_AES_BITLOCKER
1143 .name = "bitlocker(aes)",
1144 .driver_name = "bitlocker-aes-dx",
1145 .blocksize = AES_BLOCK_SIZE,
1146 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1147 .template_ablkcipher = {
1148 .setkey = ssi_ablkcipher_setkey,
1149 .encrypt = ssi_ablkcipher_encrypt,
1150 .decrypt = ssi_ablkcipher_decrypt,
1151 .min_keysize = AES_MIN_KEY_SIZE * 2,
1152 .max_keysize = AES_MAX_KEY_SIZE * 2,
1153 .ivsize = AES_BLOCK_SIZE,
1155 .cipher_mode = DRV_CIPHER_BITLOCKER,
1156 .flow_mode = S_DIN_to_AES,
1157 .synchronous = false,
1160 .name = "bitlocker(aes)",
1161 .driver_name = "bitlocker-aes-du512-dx",
1162 .blocksize = AES_BLOCK_SIZE,
1163 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1164 .template_ablkcipher = {
1165 .setkey = ssi_ablkcipher_setkey,
1166 .encrypt = ssi_ablkcipher_encrypt,
1167 .decrypt = ssi_ablkcipher_decrypt,
1168 .min_keysize = AES_MIN_KEY_SIZE * 2,
1169 .max_keysize = AES_MAX_KEY_SIZE * 2,
1170 .ivsize = AES_BLOCK_SIZE,
1172 .cipher_mode = DRV_CIPHER_BITLOCKER,
1173 .flow_mode = S_DIN_to_AES,
1174 .synchronous = false,
1177 .name = "bitlocker(aes)",
1178 .driver_name = "bitlocker-aes-du4096-dx",
1179 .blocksize = AES_BLOCK_SIZE,
1180 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1181 .template_ablkcipher = {
1182 .setkey = ssi_ablkcipher_setkey,
1183 .encrypt = ssi_ablkcipher_encrypt,
1184 .decrypt = ssi_ablkcipher_decrypt,
1185 .min_keysize = AES_MIN_KEY_SIZE * 2,
1186 .max_keysize = AES_MAX_KEY_SIZE * 2,
1187 .ivsize = AES_BLOCK_SIZE,
1189 .cipher_mode = DRV_CIPHER_BITLOCKER,
1190 .flow_mode = S_DIN_to_AES,
1191 .synchronous = false,
1193 #endif /*SSI_CC_HAS_AES_BITLOCKER*/
1196 .driver_name = "ecb-aes-dx",
1197 .blocksize = AES_BLOCK_SIZE,
1198 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1199 .template_ablkcipher = {
1200 .setkey = ssi_ablkcipher_setkey,
1201 .encrypt = ssi_ablkcipher_encrypt,
1202 .decrypt = ssi_ablkcipher_decrypt,
1203 .min_keysize = AES_MIN_KEY_SIZE,
1204 .max_keysize = AES_MAX_KEY_SIZE,
1207 .cipher_mode = DRV_CIPHER_ECB,
1208 .flow_mode = S_DIN_to_AES,
1209 .synchronous = false,
1213 .driver_name = "cbc-aes-dx",
1214 .blocksize = AES_BLOCK_SIZE,
1215 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1216 .template_ablkcipher = {
1217 .setkey = ssi_ablkcipher_setkey,
1218 .encrypt = ssi_ablkcipher_encrypt,
1219 .decrypt = ssi_ablkcipher_decrypt,
1220 .min_keysize = AES_MIN_KEY_SIZE,
1221 .max_keysize = AES_MAX_KEY_SIZE,
1222 .ivsize = AES_BLOCK_SIZE,
1224 .cipher_mode = DRV_CIPHER_CBC,
1225 .flow_mode = S_DIN_to_AES,
1226 .synchronous = false,
1230 .driver_name = "ofb-aes-dx",
1231 .blocksize = AES_BLOCK_SIZE,
1232 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1233 .template_ablkcipher = {
1234 .setkey = ssi_ablkcipher_setkey,
1235 .encrypt = ssi_ablkcipher_encrypt,
1236 .decrypt = ssi_ablkcipher_decrypt,
1237 .min_keysize = AES_MIN_KEY_SIZE,
1238 .max_keysize = AES_MAX_KEY_SIZE,
1239 .ivsize = AES_BLOCK_SIZE,
1241 .cipher_mode = DRV_CIPHER_OFB,
1242 .flow_mode = S_DIN_to_AES,
1243 .synchronous = false,
1245 #if SSI_CC_HAS_AES_CTS
1247 .name = "cts1(cbc(aes))",
1248 .driver_name = "cts1-cbc-aes-dx",
1249 .blocksize = AES_BLOCK_SIZE,
1250 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1251 .template_ablkcipher = {
1252 .setkey = ssi_ablkcipher_setkey,
1253 .encrypt = ssi_ablkcipher_encrypt,
1254 .decrypt = ssi_ablkcipher_decrypt,
1255 .min_keysize = AES_MIN_KEY_SIZE,
1256 .max_keysize = AES_MAX_KEY_SIZE,
1257 .ivsize = AES_BLOCK_SIZE,
1259 .cipher_mode = DRV_CIPHER_CBC_CTS,
1260 .flow_mode = S_DIN_to_AES,
1261 .synchronous = false,
1266 .driver_name = "ctr-aes-dx",
1268 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1269 .template_ablkcipher = {
1270 .setkey = ssi_ablkcipher_setkey,
1271 .encrypt = ssi_ablkcipher_encrypt,
1272 .decrypt = ssi_ablkcipher_decrypt,
1273 .min_keysize = AES_MIN_KEY_SIZE,
1274 .max_keysize = AES_MAX_KEY_SIZE,
1275 .ivsize = AES_BLOCK_SIZE,
1277 .cipher_mode = DRV_CIPHER_CTR,
1278 .flow_mode = S_DIN_to_AES,
1279 .synchronous = false,
1282 .name = "cbc(des3_ede)",
1283 .driver_name = "cbc-3des-dx",
1284 .blocksize = DES3_EDE_BLOCK_SIZE,
1285 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1286 .template_ablkcipher = {
1287 .setkey = ssi_ablkcipher_setkey,
1288 .encrypt = ssi_ablkcipher_encrypt,
1289 .decrypt = ssi_ablkcipher_decrypt,
1290 .min_keysize = DES3_EDE_KEY_SIZE,
1291 .max_keysize = DES3_EDE_KEY_SIZE,
1292 .ivsize = DES3_EDE_BLOCK_SIZE,
1294 .cipher_mode = DRV_CIPHER_CBC,
1295 .flow_mode = S_DIN_to_DES,
1296 .synchronous = false,
1299 .name = "ecb(des3_ede)",
1300 .driver_name = "ecb-3des-dx",
1301 .blocksize = DES3_EDE_BLOCK_SIZE,
1302 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1303 .template_ablkcipher = {
1304 .setkey = ssi_ablkcipher_setkey,
1305 .encrypt = ssi_ablkcipher_encrypt,
1306 .decrypt = ssi_ablkcipher_decrypt,
1307 .min_keysize = DES3_EDE_KEY_SIZE,
1308 .max_keysize = DES3_EDE_KEY_SIZE,
1311 .cipher_mode = DRV_CIPHER_ECB,
1312 .flow_mode = S_DIN_to_DES,
1313 .synchronous = false,
1317 .driver_name = "cbc-des-dx",
1318 .blocksize = DES_BLOCK_SIZE,
1319 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1320 .template_ablkcipher = {
1321 .setkey = ssi_ablkcipher_setkey,
1322 .encrypt = ssi_ablkcipher_encrypt,
1323 .decrypt = ssi_ablkcipher_decrypt,
1324 .min_keysize = DES_KEY_SIZE,
1325 .max_keysize = DES_KEY_SIZE,
1326 .ivsize = DES_BLOCK_SIZE,
1328 .cipher_mode = DRV_CIPHER_CBC,
1329 .flow_mode = S_DIN_to_DES,
1330 .synchronous = false,
1334 .driver_name = "ecb-des-dx",
1335 .blocksize = DES_BLOCK_SIZE,
1336 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1337 .template_ablkcipher = {
1338 .setkey = ssi_ablkcipher_setkey,
1339 .encrypt = ssi_ablkcipher_encrypt,
1340 .decrypt = ssi_ablkcipher_decrypt,
1341 .min_keysize = DES_KEY_SIZE,
1342 .max_keysize = DES_KEY_SIZE,
1345 .cipher_mode = DRV_CIPHER_ECB,
1346 .flow_mode = S_DIN_to_DES,
1347 .synchronous = false,
1349 #if SSI_CC_HAS_MULTI2
1351 .name = "cbc(multi2)",
1352 .driver_name = "cbc-multi2-dx",
1353 .blocksize = CC_MULTI2_BLOCK_SIZE,
1354 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1355 .template_ablkcipher = {
1356 .setkey = ssi_ablkcipher_setkey,
1357 .encrypt = ssi_ablkcipher_encrypt,
1358 .decrypt = ssi_ablkcipher_decrypt,
1359 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1360 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1361 .ivsize = CC_MULTI2_IV_SIZE,
1363 .cipher_mode = DRV_MULTI2_CBC,
1364 .flow_mode = S_DIN_to_MULTI2,
1365 .synchronous = false,
1368 .name = "ofb(multi2)",
1369 .driver_name = "ofb-multi2-dx",
1371 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1372 .template_ablkcipher = {
1373 .setkey = ssi_ablkcipher_setkey,
1374 .encrypt = ssi_ablkcipher_encrypt,
1375 .decrypt = ssi_ablkcipher_encrypt,
1376 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1377 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1378 .ivsize = CC_MULTI2_IV_SIZE,
1380 .cipher_mode = DRV_MULTI2_OFB,
1381 .flow_mode = S_DIN_to_MULTI2,
1382 .synchronous = false,
1384 #endif /*SSI_CC_HAS_MULTI2*/
1388 struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1390 struct ssi_crypto_alg *t_alg;
1391 struct crypto_alg *alg;
1393 t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1395 SSI_LOG_ERR("failed to allocate t_alg\n");
1396 return ERR_PTR(-ENOMEM);
1399 alg = &t_alg->crypto_alg;
1401 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1402 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1403 template->driver_name);
1404 alg->cra_module = THIS_MODULE;
1405 alg->cra_priority = SSI_CRA_PRIO;
1406 alg->cra_blocksize = template->blocksize;
1407 alg->cra_alignmask = 0;
1408 alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1410 alg->cra_init = template->synchronous? ssi_sblkcipher_init:ssi_ablkcipher_init;
1411 alg->cra_exit = template->synchronous? ssi_sblkcipher_exit:ssi_blkcipher_exit;
1412 alg->cra_type = template->synchronous? &crypto_blkcipher_type:&crypto_ablkcipher_type;
1413 if(template->synchronous) {
1414 alg->cra_blkcipher = template->template_sblkcipher;
1415 alg->cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1418 alg->cra_ablkcipher = template->template_ablkcipher;
1419 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1423 t_alg->cipher_mode = template->cipher_mode;
1424 t_alg->flow_mode = template->flow_mode;
1429 int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1431 struct ssi_crypto_alg *t_alg, *n;
1432 struct ssi_blkcipher_handle *blkcipher_handle =
1433 drvdata->blkcipher_handle;
1435 dev = &drvdata->plat_dev->dev;
1437 if (blkcipher_handle != NULL) {
1438 /* Remove registered algs */
1439 list_for_each_entry_safe(t_alg, n,
1440 &blkcipher_handle->blkcipher_alg_list,
1442 crypto_unregister_alg(&t_alg->crypto_alg);
1443 list_del(&t_alg->entry);
1446 kfree(blkcipher_handle);
1447 drvdata->blkcipher_handle = NULL;
1454 int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1456 struct ssi_blkcipher_handle *ablkcipher_handle;
1457 struct ssi_crypto_alg *t_alg;
1461 ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1463 if (ablkcipher_handle == NULL)
1466 drvdata->blkcipher_handle = ablkcipher_handle;
1468 INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1471 SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1472 for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1473 SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1474 t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1475 if (IS_ERR(t_alg)) {
1476 rc = PTR_ERR(t_alg);
1477 SSI_LOG_ERR("%s alg allocation failed\n",
1478 blkcipher_algs[alg].driver_name);
1481 t_alg->drvdata = drvdata;
1483 SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1484 rc = crypto_register_alg(&t_alg->crypto_alg);
1485 SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1486 t_alg->crypto_alg.cra_driver_name, rc);
1487 if (unlikely(rc != 0)) {
1488 SSI_LOG_ERR("%s alg registration failed\n",
1489 t_alg->crypto_alg.cra_driver_name);
1493 list_add_tail(&t_alg->entry,
1494 &ablkcipher_handle->blkcipher_alg_list);
1495 SSI_LOG_DEBUG("Registered %s\n",
1496 t_alg->crypto_alg.cra_driver_name);
1502 ssi_ablkcipher_free(drvdata);