2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/semaphore.h>
21 #include <crypto/algapi.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/des.h>
27 #include "ssi_config.h"
28 #include "ssi_driver.h"
29 #include "cc_lli_defs.h"
30 #include "ssi_buffer_mgr.h"
31 #include "ssi_cipher.h"
32 #include "ssi_request_mgr.h"
33 #include "ssi_sysfs.h"
34 #include "ssi_fips_local.h"
36 #define MAX_ABLKCIPHER_SEQ_LEN 6
38 #define template_ablkcipher template_u.ablkcipher
39 #define template_sblkcipher template_u.blkcipher
41 #define SSI_MIN_AES_XTS_SIZE 0x10
42 #define SSI_MAX_AES_XTS_SIZE 0x2000
43 struct ssi_blkcipher_handle {
44 struct list_head blkcipher_alg_list;
47 struct cc_user_key_info {
49 dma_addr_t key_dma_addr;
51 struct cc_hw_key_info {
52 enum HwCryptoKey key1_slot;
53 enum HwCryptoKey key2_slot;
56 struct ssi_ablkcipher_ctx {
57 struct ssi_drvdata *drvdata;
63 struct blkcipher_req_ctx *sync_ctx;
64 struct cc_user_key_info user;
65 struct cc_hw_key_info hw;
66 struct crypto_shash *shash_tfm;
69 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
72 static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, uint32_t size) {
73 switch (ctx_p->flow_mode){
76 case CC_AES_128_BIT_KEY_SIZE:
77 case CC_AES_192_BIT_KEY_SIZE:
78 if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
79 (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
80 (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
83 case CC_AES_256_BIT_KEY_SIZE:
85 case (CC_AES_192_BIT_KEY_SIZE*2):
86 case (CC_AES_256_BIT_KEY_SIZE*2):
87 if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
88 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
89 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
96 if (likely(size == DES3_EDE_KEY_SIZE ||
97 size == DES_KEY_SIZE))
100 #if SSI_CC_HAS_MULTI2
101 case S_DIN_to_MULTI2:
102 if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
114 static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
115 switch (ctx_p->flow_mode){
117 switch (ctx_p->cipher_mode){
119 if ((size >= SSI_MIN_AES_XTS_SIZE) &&
120 (size <= SSI_MAX_AES_XTS_SIZE) &&
121 IS_ALIGNED(size, AES_BLOCK_SIZE))
124 case DRV_CIPHER_CBC_CTS:
125 if (likely(size >= AES_BLOCK_SIZE))
133 case DRV_CIPHER_ESSIV:
134 case DRV_CIPHER_BITLOCKER:
135 if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
143 if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
146 #if SSI_CC_HAS_MULTI2
147 case S_DIN_to_MULTI2:
148 switch (ctx_p->cipher_mode) {
150 if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
159 #endif /*SSI_CC_HAS_MULTI2*/
167 static unsigned int get_max_keysize(struct crypto_tfm *tfm)
169 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
171 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER) {
172 return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
175 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER) {
176 return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
182 static int ssi_blkcipher_init(struct crypto_tfm *tfm)
184 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
185 struct crypto_alg *alg = tfm->__crt_alg;
186 struct ssi_crypto_alg *ssi_alg =
187 container_of(alg, struct ssi_crypto_alg, crypto_alg);
190 unsigned int max_key_buf_size = get_max_keysize(tfm);
192 SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p,
193 crypto_tfm_alg_name(tfm));
195 CHECK_AND_RETURN_UPON_FIPS_ERROR();
196 ctx_p->cipher_mode = ssi_alg->cipher_mode;
197 ctx_p->flow_mode = ssi_alg->flow_mode;
198 ctx_p->drvdata = ssi_alg->drvdata;
199 dev = &ctx_p->drvdata->plat_dev->dev;
201 /* Allocate key buffer, cache line aligned */
202 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL|GFP_DMA);
203 if (!ctx_p->user.key) {
204 SSI_LOG_ERR("Allocating key buffer in context failed\n");
207 SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
211 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
212 max_key_buf_size, DMA_TO_DEVICE);
213 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
214 SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
215 max_key_buf_size, ctx_p->user.key);
218 SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr, max_key_buf_size);
219 SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
220 max_key_buf_size, ctx_p->user.key,
221 (unsigned long long)ctx_p->user.key_dma_addr);
223 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
224 /* Alloc hash tfm for essiv */
225 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
226 if (IS_ERR(ctx_p->shash_tfm)) {
227 SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
228 return PTR_ERR(ctx_p->shash_tfm);
235 static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
237 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
238 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
239 unsigned int max_key_buf_size = get_max_keysize(tfm);
241 SSI_LOG_DEBUG("Clearing context @%p for %s\n",
242 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
244 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
245 /* Free hash tfm for essiv */
246 crypto_free_shash(ctx_p->shash_tfm);
247 ctx_p->shash_tfm = NULL;
250 /* Unmap key buffer */
251 SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
252 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
254 SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n",
255 (unsigned long long)ctx_p->user.key_dma_addr);
257 /* Free key buffer in context */
258 kfree(ctx_p->user.key);
259 SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
263 typedef struct tdes_keys{
264 u8 key1[DES_KEY_SIZE];
265 u8 key2[DES_KEY_SIZE];
266 u8 key3[DES_KEY_SIZE];
269 static const u8 zero_buff[] = {0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
270 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
271 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
272 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
274 /* The function verifies that tdes keys are not weak.*/
275 static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
277 #ifdef CCREE_FIPS_SUPPORT
278 tdes_keys_t *tdes_key = (tdes_keys_t*)key;
280 /* verify key1 != key2 and key3 != key2*/
281 if (unlikely( (memcmp((u8*)tdes_key->key1, (u8*)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
282 (memcmp((u8*)tdes_key->key3, (u8*)tdes_key->key2, sizeof(tdes_key->key3)) == 0) )) {
285 #endif /* CCREE_FIPS_SUPPORT */
290 /* The function verifies that xts keys are not weak.*/
291 static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
293 #ifdef CCREE_FIPS_SUPPORT
294 /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
295 int singleKeySize = keylen >> 1;
297 if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0)) {
300 #endif /* CCREE_FIPS_SUPPORT */
305 static enum HwCryptoKey hw_key_to_cc_hw_key(int slot_num)
320 static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
324 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
325 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
326 u32 tmp[DES_EXPKEY_WORDS];
327 unsigned int max_key_buf_size = get_max_keysize(tfm);
328 DECL_CYCLE_COUNT_RESOURCES;
330 SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
331 ctx_p, crypto_tfm_alg_name(tfm), keylen);
332 dump_byte_array("key", (uint8_t *)key, keylen);
334 CHECK_AND_RETURN_UPON_FIPS_ERROR();
336 SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
338 /* STAT_PHASE_0: Init and sanity checks */
341 #if SSI_CC_HAS_MULTI2
342 /*last byte of key buffer is round number and should not be a part of key size*/
343 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
346 #endif /*SSI_CC_HAS_MULTI2*/
348 if (unlikely(validate_keys_sizes(ctx_p,keylen) != 0)) {
349 SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
350 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
354 if (ssi_is_hw_key(tfm)) {
355 /* setting HW key slots */
356 struct arm_hw_key_info *hki = (struct arm_hw_key_info*)key;
358 if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
359 SSI_LOG_ERR("HW key not supported for non-AES flows\n");
363 ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
364 if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
365 SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
369 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
370 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
371 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
372 if (unlikely(hki->hw_key1 == hki->hw_key2)) {
373 SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
376 ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
377 if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
378 SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
383 ctx_p->keylen = keylen;
384 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
385 SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
391 if (ctx_p->flow_mode == S_DIN_to_DES) {
392 if (unlikely(!des_ekey(tmp, key)) &&
393 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
394 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
395 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak DES key");
399 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
400 ssi_fips_verify_xts_keys(key, keylen) != 0) {
401 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
404 if ((ctx_p->flow_mode == S_DIN_to_DES) &&
405 (keylen == DES3_EDE_KEY_SIZE) &&
406 ssi_fips_verify_3des_keys(key, keylen) != 0) {
407 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
412 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
414 /* STAT_PHASE_1: Copy key to ctx */
416 SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
417 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
418 max_key_buf_size, DMA_TO_DEVICE);
419 #if SSI_CC_HAS_MULTI2
420 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
421 memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
422 ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
423 if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
424 ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
425 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
426 SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
430 #endif /*SSI_CC_HAS_MULTI2*/
432 memcpy(ctx_p->user.key, key, keylen);
434 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
436 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
437 /* sha256 for key2 - use sw implementation */
438 int key_len = keylen >> 1;
440 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
441 desc->tfm = ctx_p->shash_tfm;
443 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
445 SSI_LOG_ERR("Failed to hash ESSIV key.\n");
450 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
451 max_key_buf_size, DMA_TO_DEVICE);
452 SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr ,max_key_buf_size);
453 ctx_p->keylen = keylen;
455 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_1);
457 SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
462 ssi_blkcipher_create_setup_desc(
463 struct crypto_tfm *tfm,
464 struct blkcipher_req_ctx *req_ctx,
468 unsigned int *seq_size)
470 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
471 int cipher_mode = ctx_p->cipher_mode;
472 int flow_mode = ctx_p->flow_mode;
473 int direction = req_ctx->gen_ctx.op_type;
474 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
475 unsigned int key_len = ctx_p->keylen;
476 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
477 unsigned int du_size = nbytes;
479 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
481 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
483 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
486 switch (cipher_mode) {
488 case DRV_CIPHER_CBC_CTS:
491 /* Load cipher state */
492 HW_DESC_INIT(&desc[*seq_size]);
493 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
496 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
497 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
498 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
499 if ((cipher_mode == DRV_CIPHER_CTR) ||
500 (cipher_mode == DRV_CIPHER_OFB) ) {
501 HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
504 HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
511 HW_DESC_INIT(&desc[*seq_size]);
512 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
513 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
514 if (flow_mode == S_DIN_to_AES) {
516 if (ssi_is_hw_key(tfm)) {
517 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
519 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
521 ((key_len == 24) ? AES_MAX_KEY_SIZE : key_len),
524 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len);
527 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
528 key_dma_addr, key_len,
530 HW_DESC_SET_KEY_SIZE_DES(&desc[*seq_size], key_len);
532 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
533 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
537 case DRV_CIPHER_ESSIV:
538 case DRV_CIPHER_BITLOCKER:
540 HW_DESC_INIT(&desc[*seq_size]);
541 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
542 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
543 if (ssi_is_hw_key(tfm)) {
544 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
546 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
547 key_dma_addr, key_len/2,
550 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
551 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
552 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
556 HW_DESC_INIT(&desc[*seq_size]);
557 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
558 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
559 if (ssi_is_hw_key(tfm)) {
560 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key2_slot);
562 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
563 (key_dma_addr+key_len/2), key_len/2,
566 HW_DESC_SET_XEX_DATA_UNIT_SIZE(&desc[*seq_size], du_size);
567 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], S_DIN_to_AES2);
568 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
569 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
573 HW_DESC_INIT(&desc[*seq_size]);
574 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
575 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
576 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
577 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
578 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
579 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
580 iv_dma_addr, CC_AES_BLOCK_SIZE,
585 SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
590 #if SSI_CC_HAS_MULTI2
591 static inline void ssi_blkcipher_create_multi2_setup_desc(
592 struct crypto_tfm *tfm,
593 struct blkcipher_req_ctx *req_ctx,
596 unsigned int *seq_size)
598 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
600 int direction = req_ctx->gen_ctx.op_type;
601 /* Load system key */
602 HW_DESC_INIT(&desc[*seq_size]);
603 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
604 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
605 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
606 CC_MULTI2_SYSTEM_KEY_SIZE,
608 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
609 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
613 HW_DESC_INIT(&desc[*seq_size]);
614 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
615 (ctx_p->user.key_dma_addr +
616 CC_MULTI2_SYSTEM_KEY_SIZE),
617 CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
618 HW_DESC_SET_MULTI2_NUM_ROUNDS(&desc[*seq_size],
619 ctx_p->key_round_number);
620 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
621 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
622 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
623 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE0 );
628 HW_DESC_INIT(&desc[*seq_size]);
629 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
630 req_ctx->gen_ctx.iv_dma_addr,
632 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
633 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
634 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
635 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
639 #endif /*SSI_CC_HAS_MULTI2*/
642 ssi_blkcipher_create_data_desc(
643 struct crypto_tfm *tfm,
644 struct blkcipher_req_ctx *req_ctx,
645 struct scatterlist *dst, struct scatterlist *src,
649 unsigned int *seq_size)
651 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
652 unsigned int flow_mode = ctx_p->flow_mode;
654 switch (ctx_p->flow_mode) {
656 flow_mode = DIN_AES_DOUT;
659 flow_mode = DIN_DES_DOUT;
661 #if SSI_CC_HAS_MULTI2
662 case S_DIN_to_MULTI2:
663 flow_mode = DIN_MULTI2_DOUT;
665 #endif /*SSI_CC_HAS_MULTI2*/
667 SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
671 if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)){
672 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
673 (unsigned long long)sg_dma_address(src),
675 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
676 (unsigned long long)sg_dma_address(dst),
678 HW_DESC_INIT(&desc[*seq_size]);
679 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
682 HW_DESC_SET_DOUT_DLLI(&desc[*seq_size],
685 NS_BIT, (areq == NULL)? 0:1);
687 HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
689 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
693 SSI_LOG_DEBUG(" bypass params addr 0x%llX "
694 "length 0x%X addr 0x%08X\n",
695 (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
696 req_ctx->mlli_params.mlli_len,
697 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
698 HW_DESC_INIT(&desc[*seq_size]);
699 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
700 req_ctx->mlli_params.mlli_dma_addr,
701 req_ctx->mlli_params.mlli_len,
703 HW_DESC_SET_DOUT_SRAM(&desc[*seq_size],
704 ctx_p->drvdata->mlli_sram_addr,
705 req_ctx->mlli_params.mlli_len);
706 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], BYPASS);
709 HW_DESC_INIT(&desc[*seq_size]);
710 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_MLLI,
711 ctx_p->drvdata->mlli_sram_addr,
712 req_ctx->in_mlli_nents, NS_BIT);
713 if (req_ctx->out_nents == 0) {
714 SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
716 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
717 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
718 HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
719 ctx_p->drvdata->mlli_sram_addr,
720 req_ctx->in_mlli_nents,
721 NS_BIT,(areq == NULL)? 0:1);
723 SSI_LOG_DEBUG(" din/dout params "
724 "addr 0x%08X addr 0x%08X\n",
725 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
726 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
727 (uint32_t)LLI_ENTRY_BYTE_SIZE *
729 HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
730 (ctx_p->drvdata->mlli_sram_addr +
731 LLI_ENTRY_BYTE_SIZE *
732 req_ctx->in_mlli_nents),
733 req_ctx->out_mlli_nents, NS_BIT,(areq == NULL)? 0:1);
736 HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
738 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
743 static int ssi_blkcipher_complete(struct device *dev,
744 struct ssi_ablkcipher_ctx *ctx_p,
745 struct blkcipher_req_ctx *req_ctx,
746 struct scatterlist *dst, struct scatterlist *src,
747 void *info, //req info
750 void __iomem *cc_base)
752 int completion_error = 0;
753 uint32_t inflight_counter;
754 DECL_CYCLE_COUNT_RESOURCES;
757 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
758 info = req_ctx->backup_info;
759 END_CYCLE_COUNT(STAT_OP_TYPE_GENERIC, STAT_PHASE_4);
762 /*Set the inflight couter value to local variable*/
763 inflight_counter = ctx_p->drvdata->inflight_counter;
764 /*Decrease the inflight counter*/
765 if(ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
766 ctx_p->drvdata->inflight_counter--;
769 ablkcipher_request_complete(areq, completion_error);
772 return completion_error;
775 static int ssi_blkcipher_process(
776 struct crypto_tfm *tfm,
777 struct blkcipher_req_ctx *req_ctx,
778 struct scatterlist *dst, struct scatterlist *src,
780 void *info, //req info
783 enum drv_crypto_direction direction)
785 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
786 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
787 HwDesc_s desc[MAX_ABLKCIPHER_SEQ_LEN];
788 struct ssi_crypto_req ssi_req = {};
789 int rc, seq_len = 0,cts_restore_flag = 0;
790 DECL_CYCLE_COUNT_RESOURCES;
792 SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
793 ((direction==DRV_CRYPTO_DIRECTION_ENCRYPT)?"Encrypt":"Decrypt"),
796 CHECK_AND_RETURN_UPON_FIPS_ERROR();
797 /* STAT_PHASE_0: Init and sanity checks */
800 /* TODO: check data length according to mode */
801 if (unlikely(validate_data_size(ctx_p, nbytes))) {
802 SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
803 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
807 /* No data to process is valid */
810 /*For CTS in case of data size aligned to 16 use CBC mode*/
811 if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)){
813 ctx_p->cipher_mode = DRV_CIPHER_CBC;
814 cts_restore_flag = 1;
817 /* Setup DX request structure */
818 ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
819 ssi_req.user_arg = (void *)areq;
821 #ifdef ENABLE_CYCLE_COUNT
822 ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
823 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
827 /* Setup request context */
828 req_ctx->gen_ctx.op_type = direction;
830 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_0);
832 /* STAT_PHASE_1: Map buffers */
835 rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
836 if (unlikely(rc != 0)) {
837 SSI_LOG_ERR("map_request() failed\n");
841 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_1);
843 /* STAT_PHASE_2: Create sequence */
846 /* Setup processing */
847 #if SSI_CC_HAS_MULTI2
848 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
849 ssi_blkcipher_create_multi2_setup_desc(tfm,
855 #endif /*SSI_CC_HAS_MULTI2*/
857 ssi_blkcipher_create_setup_desc(tfm,
864 /* Data processing */
865 ssi_blkcipher_create_data_desc(tfm,
872 /* do we need to generate IV? */
873 if (req_ctx->is_giv == true) {
874 ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
875 ssi_req.ivgen_dma_addr_len = 1;
876 /* set the IV size (8/16 B long)*/
877 ssi_req.ivgen_size = ivsize;
879 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_2);
881 /* STAT_PHASE_3: Lock HW and push sequence */
884 rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (areq == NULL)? 0:1);
886 if (unlikely(rc != -EINPROGRESS)) {
887 /* Failed to send the request or request completed synchronously */
888 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
891 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
894 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
895 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
897 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
898 rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst, src, info, ivsize, NULL, ctx_p->drvdata->cc_base);
903 if (cts_restore_flag != 0)
904 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
909 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
911 struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
912 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
913 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
914 struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
915 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
917 CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
919 ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src, areq->info, ivsize, areq, cc_base);
924 static int ssi_sblkcipher_init(struct crypto_tfm *tfm)
926 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
928 /* Allocate sync ctx buffer */
929 ctx_p->sync_ctx = kmalloc(sizeof(struct blkcipher_req_ctx), GFP_KERNEL|GFP_DMA);
930 if (!ctx_p->sync_ctx) {
931 SSI_LOG_ERR("Allocating sync ctx buffer in context failed\n");
934 SSI_LOG_DEBUG("Allocated sync ctx buffer in context ctx_p->sync_ctx=@%p\n",
937 return ssi_blkcipher_init(tfm);
941 static void ssi_sblkcipher_exit(struct crypto_tfm *tfm)
943 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
945 kfree(ctx_p->sync_ctx);
946 SSI_LOG_DEBUG("Free sync ctx buffer in context ctx_p->sync_ctx=@%p\n", ctx_p->sync_ctx);
948 ssi_blkcipher_exit(tfm);
952 static int ssi_sblkcipher_encrypt(struct blkcipher_desc *desc,
953 struct scatterlist *dst, struct scatterlist *src,
956 struct crypto_blkcipher *blk_tfm = desc->tfm;
957 struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
958 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
959 struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
960 unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
962 req_ctx->backup_info = desc->info;
963 req_ctx->is_giv = false;
965 return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_ENCRYPT);
968 static int ssi_sblkcipher_decrypt(struct blkcipher_desc *desc,
969 struct scatterlist *dst, struct scatterlist *src,
972 struct crypto_blkcipher *blk_tfm = desc->tfm;
973 struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
974 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
975 struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
976 unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
978 req_ctx->backup_info = desc->info;
979 req_ctx->is_giv = false;
981 return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_DECRYPT);
985 /* Async wrap functions */
987 static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
989 struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
991 ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
993 return ssi_blkcipher_init(tfm);
997 static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
1001 return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
1004 static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
1006 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1007 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1008 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1009 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1011 req_ctx->backup_info = req->info;
1012 req_ctx->is_giv = false;
1014 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
1017 static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
1019 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1020 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1021 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1022 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1024 req_ctx->backup_info = req->info;
1025 req_ctx->is_giv = false;
1026 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
1030 /* DX Block cipher alg */
1031 static struct ssi_alg_template blkcipher_algs[] = {
1032 /* Async template */
1033 #if SSI_CC_HAS_AES_XTS
1036 .driver_name = "xts-aes-dx",
1037 .blocksize = AES_BLOCK_SIZE,
1038 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1039 .template_ablkcipher = {
1040 .setkey = ssi_ablkcipher_setkey,
1041 .encrypt = ssi_ablkcipher_encrypt,
1042 .decrypt = ssi_ablkcipher_decrypt,
1043 .min_keysize = AES_MIN_KEY_SIZE * 2,
1044 .max_keysize = AES_MAX_KEY_SIZE * 2,
1045 .ivsize = AES_BLOCK_SIZE,
1048 .cipher_mode = DRV_CIPHER_XTS,
1049 .flow_mode = S_DIN_to_AES,
1050 .synchronous = false,
1054 .driver_name = "xts-aes-du512-dx",
1055 .blocksize = AES_BLOCK_SIZE,
1056 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1057 .template_ablkcipher = {
1058 .setkey = ssi_ablkcipher_setkey,
1059 .encrypt = ssi_ablkcipher_encrypt,
1060 .decrypt = ssi_ablkcipher_decrypt,
1061 .min_keysize = AES_MIN_KEY_SIZE * 2,
1062 .max_keysize = AES_MAX_KEY_SIZE * 2,
1063 .ivsize = AES_BLOCK_SIZE,
1065 .cipher_mode = DRV_CIPHER_XTS,
1066 .flow_mode = S_DIN_to_AES,
1067 .synchronous = false,
1071 .driver_name = "xts-aes-du4096-dx",
1072 .blocksize = AES_BLOCK_SIZE,
1073 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1074 .template_ablkcipher = {
1075 .setkey = ssi_ablkcipher_setkey,
1076 .encrypt = ssi_ablkcipher_encrypt,
1077 .decrypt = ssi_ablkcipher_decrypt,
1078 .min_keysize = AES_MIN_KEY_SIZE * 2,
1079 .max_keysize = AES_MAX_KEY_SIZE * 2,
1080 .ivsize = AES_BLOCK_SIZE,
1082 .cipher_mode = DRV_CIPHER_XTS,
1083 .flow_mode = S_DIN_to_AES,
1084 .synchronous = false,
1086 #endif /*SSI_CC_HAS_AES_XTS*/
1087 #if SSI_CC_HAS_AES_ESSIV
1089 .name = "essiv(aes)",
1090 .driver_name = "essiv-aes-dx",
1091 .blocksize = AES_BLOCK_SIZE,
1092 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1093 .template_ablkcipher = {
1094 .setkey = ssi_ablkcipher_setkey,
1095 .encrypt = ssi_ablkcipher_encrypt,
1096 .decrypt = ssi_ablkcipher_decrypt,
1097 .min_keysize = AES_MIN_KEY_SIZE * 2,
1098 .max_keysize = AES_MAX_KEY_SIZE * 2,
1099 .ivsize = AES_BLOCK_SIZE,
1101 .cipher_mode = DRV_CIPHER_ESSIV,
1102 .flow_mode = S_DIN_to_AES,
1103 .synchronous = false,
1106 .name = "essiv(aes)",
1107 .driver_name = "essiv-aes-du512-dx",
1108 .blocksize = AES_BLOCK_SIZE,
1109 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1110 .template_ablkcipher = {
1111 .setkey = ssi_ablkcipher_setkey,
1112 .encrypt = ssi_ablkcipher_encrypt,
1113 .decrypt = ssi_ablkcipher_decrypt,
1114 .min_keysize = AES_MIN_KEY_SIZE * 2,
1115 .max_keysize = AES_MAX_KEY_SIZE * 2,
1116 .ivsize = AES_BLOCK_SIZE,
1118 .cipher_mode = DRV_CIPHER_ESSIV,
1119 .flow_mode = S_DIN_to_AES,
1120 .synchronous = false,
1123 .name = "essiv(aes)",
1124 .driver_name = "essiv-aes-du4096-dx",
1125 .blocksize = AES_BLOCK_SIZE,
1126 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1127 .template_ablkcipher = {
1128 .setkey = ssi_ablkcipher_setkey,
1129 .encrypt = ssi_ablkcipher_encrypt,
1130 .decrypt = ssi_ablkcipher_decrypt,
1131 .min_keysize = AES_MIN_KEY_SIZE * 2,
1132 .max_keysize = AES_MAX_KEY_SIZE * 2,
1133 .ivsize = AES_BLOCK_SIZE,
1135 .cipher_mode = DRV_CIPHER_ESSIV,
1136 .flow_mode = S_DIN_to_AES,
1137 .synchronous = false,
1139 #endif /*SSI_CC_HAS_AES_ESSIV*/
1140 #if SSI_CC_HAS_AES_BITLOCKER
1142 .name = "bitlocker(aes)",
1143 .driver_name = "bitlocker-aes-dx",
1144 .blocksize = AES_BLOCK_SIZE,
1145 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1146 .template_ablkcipher = {
1147 .setkey = ssi_ablkcipher_setkey,
1148 .encrypt = ssi_ablkcipher_encrypt,
1149 .decrypt = ssi_ablkcipher_decrypt,
1150 .min_keysize = AES_MIN_KEY_SIZE * 2,
1151 .max_keysize = AES_MAX_KEY_SIZE * 2,
1152 .ivsize = AES_BLOCK_SIZE,
1154 .cipher_mode = DRV_CIPHER_BITLOCKER,
1155 .flow_mode = S_DIN_to_AES,
1156 .synchronous = false,
1159 .name = "bitlocker(aes)",
1160 .driver_name = "bitlocker-aes-du512-dx",
1161 .blocksize = AES_BLOCK_SIZE,
1162 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1163 .template_ablkcipher = {
1164 .setkey = ssi_ablkcipher_setkey,
1165 .encrypt = ssi_ablkcipher_encrypt,
1166 .decrypt = ssi_ablkcipher_decrypt,
1167 .min_keysize = AES_MIN_KEY_SIZE * 2,
1168 .max_keysize = AES_MAX_KEY_SIZE * 2,
1169 .ivsize = AES_BLOCK_SIZE,
1171 .cipher_mode = DRV_CIPHER_BITLOCKER,
1172 .flow_mode = S_DIN_to_AES,
1173 .synchronous = false,
1176 .name = "bitlocker(aes)",
1177 .driver_name = "bitlocker-aes-du4096-dx",
1178 .blocksize = AES_BLOCK_SIZE,
1179 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1180 .template_ablkcipher = {
1181 .setkey = ssi_ablkcipher_setkey,
1182 .encrypt = ssi_ablkcipher_encrypt,
1183 .decrypt = ssi_ablkcipher_decrypt,
1184 .min_keysize = AES_MIN_KEY_SIZE * 2,
1185 .max_keysize = AES_MAX_KEY_SIZE * 2,
1186 .ivsize = AES_BLOCK_SIZE,
1188 .cipher_mode = DRV_CIPHER_BITLOCKER,
1189 .flow_mode = S_DIN_to_AES,
1190 .synchronous = false,
1192 #endif /*SSI_CC_HAS_AES_BITLOCKER*/
1195 .driver_name = "ecb-aes-dx",
1196 .blocksize = AES_BLOCK_SIZE,
1197 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1198 .template_ablkcipher = {
1199 .setkey = ssi_ablkcipher_setkey,
1200 .encrypt = ssi_ablkcipher_encrypt,
1201 .decrypt = ssi_ablkcipher_decrypt,
1202 .min_keysize = AES_MIN_KEY_SIZE,
1203 .max_keysize = AES_MAX_KEY_SIZE,
1206 .cipher_mode = DRV_CIPHER_ECB,
1207 .flow_mode = S_DIN_to_AES,
1208 .synchronous = false,
1212 .driver_name = "cbc-aes-dx",
1213 .blocksize = AES_BLOCK_SIZE,
1214 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1215 .template_ablkcipher = {
1216 .setkey = ssi_ablkcipher_setkey,
1217 .encrypt = ssi_ablkcipher_encrypt,
1218 .decrypt = ssi_ablkcipher_decrypt,
1219 .min_keysize = AES_MIN_KEY_SIZE,
1220 .max_keysize = AES_MAX_KEY_SIZE,
1221 .ivsize = AES_BLOCK_SIZE,
1223 .cipher_mode = DRV_CIPHER_CBC,
1224 .flow_mode = S_DIN_to_AES,
1225 .synchronous = false,
1229 .driver_name = "ofb-aes-dx",
1230 .blocksize = AES_BLOCK_SIZE,
1231 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1232 .template_ablkcipher = {
1233 .setkey = ssi_ablkcipher_setkey,
1234 .encrypt = ssi_ablkcipher_encrypt,
1235 .decrypt = ssi_ablkcipher_decrypt,
1236 .min_keysize = AES_MIN_KEY_SIZE,
1237 .max_keysize = AES_MAX_KEY_SIZE,
1238 .ivsize = AES_BLOCK_SIZE,
1240 .cipher_mode = DRV_CIPHER_OFB,
1241 .flow_mode = S_DIN_to_AES,
1242 .synchronous = false,
1244 #if SSI_CC_HAS_AES_CTS
1246 .name = "cts1(cbc(aes))",
1247 .driver_name = "cts1-cbc-aes-dx",
1248 .blocksize = AES_BLOCK_SIZE,
1249 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1250 .template_ablkcipher = {
1251 .setkey = ssi_ablkcipher_setkey,
1252 .encrypt = ssi_ablkcipher_encrypt,
1253 .decrypt = ssi_ablkcipher_decrypt,
1254 .min_keysize = AES_MIN_KEY_SIZE,
1255 .max_keysize = AES_MAX_KEY_SIZE,
1256 .ivsize = AES_BLOCK_SIZE,
1258 .cipher_mode = DRV_CIPHER_CBC_CTS,
1259 .flow_mode = S_DIN_to_AES,
1260 .synchronous = false,
1265 .driver_name = "ctr-aes-dx",
1267 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1268 .template_ablkcipher = {
1269 .setkey = ssi_ablkcipher_setkey,
1270 .encrypt = ssi_ablkcipher_encrypt,
1271 .decrypt = ssi_ablkcipher_decrypt,
1272 .min_keysize = AES_MIN_KEY_SIZE,
1273 .max_keysize = AES_MAX_KEY_SIZE,
1274 .ivsize = AES_BLOCK_SIZE,
1276 .cipher_mode = DRV_CIPHER_CTR,
1277 .flow_mode = S_DIN_to_AES,
1278 .synchronous = false,
1281 .name = "cbc(des3_ede)",
1282 .driver_name = "cbc-3des-dx",
1283 .blocksize = DES3_EDE_BLOCK_SIZE,
1284 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1285 .template_ablkcipher = {
1286 .setkey = ssi_ablkcipher_setkey,
1287 .encrypt = ssi_ablkcipher_encrypt,
1288 .decrypt = ssi_ablkcipher_decrypt,
1289 .min_keysize = DES3_EDE_KEY_SIZE,
1290 .max_keysize = DES3_EDE_KEY_SIZE,
1291 .ivsize = DES3_EDE_BLOCK_SIZE,
1293 .cipher_mode = DRV_CIPHER_CBC,
1294 .flow_mode = S_DIN_to_DES,
1295 .synchronous = false,
1298 .name = "ecb(des3_ede)",
1299 .driver_name = "ecb-3des-dx",
1300 .blocksize = DES3_EDE_BLOCK_SIZE,
1301 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1302 .template_ablkcipher = {
1303 .setkey = ssi_ablkcipher_setkey,
1304 .encrypt = ssi_ablkcipher_encrypt,
1305 .decrypt = ssi_ablkcipher_decrypt,
1306 .min_keysize = DES3_EDE_KEY_SIZE,
1307 .max_keysize = DES3_EDE_KEY_SIZE,
1310 .cipher_mode = DRV_CIPHER_ECB,
1311 .flow_mode = S_DIN_to_DES,
1312 .synchronous = false,
1316 .driver_name = "cbc-des-dx",
1317 .blocksize = DES_BLOCK_SIZE,
1318 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1319 .template_ablkcipher = {
1320 .setkey = ssi_ablkcipher_setkey,
1321 .encrypt = ssi_ablkcipher_encrypt,
1322 .decrypt = ssi_ablkcipher_decrypt,
1323 .min_keysize = DES_KEY_SIZE,
1324 .max_keysize = DES_KEY_SIZE,
1325 .ivsize = DES_BLOCK_SIZE,
1327 .cipher_mode = DRV_CIPHER_CBC,
1328 .flow_mode = S_DIN_to_DES,
1329 .synchronous = false,
1333 .driver_name = "ecb-des-dx",
1334 .blocksize = DES_BLOCK_SIZE,
1335 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1336 .template_ablkcipher = {
1337 .setkey = ssi_ablkcipher_setkey,
1338 .encrypt = ssi_ablkcipher_encrypt,
1339 .decrypt = ssi_ablkcipher_decrypt,
1340 .min_keysize = DES_KEY_SIZE,
1341 .max_keysize = DES_KEY_SIZE,
1344 .cipher_mode = DRV_CIPHER_ECB,
1345 .flow_mode = S_DIN_to_DES,
1346 .synchronous = false,
1348 #if SSI_CC_HAS_MULTI2
1350 .name = "cbc(multi2)",
1351 .driver_name = "cbc-multi2-dx",
1352 .blocksize = CC_MULTI2_BLOCK_SIZE,
1353 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1354 .template_ablkcipher = {
1355 .setkey = ssi_ablkcipher_setkey,
1356 .encrypt = ssi_ablkcipher_encrypt,
1357 .decrypt = ssi_ablkcipher_decrypt,
1358 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1359 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1360 .ivsize = CC_MULTI2_IV_SIZE,
1362 .cipher_mode = DRV_MULTI2_CBC,
1363 .flow_mode = S_DIN_to_MULTI2,
1364 .synchronous = false,
1367 .name = "ofb(multi2)",
1368 .driver_name = "ofb-multi2-dx",
1370 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1371 .template_ablkcipher = {
1372 .setkey = ssi_ablkcipher_setkey,
1373 .encrypt = ssi_ablkcipher_encrypt,
1374 .decrypt = ssi_ablkcipher_encrypt,
1375 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1376 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1377 .ivsize = CC_MULTI2_IV_SIZE,
1379 .cipher_mode = DRV_MULTI2_OFB,
1380 .flow_mode = S_DIN_to_MULTI2,
1381 .synchronous = false,
1383 #endif /*SSI_CC_HAS_MULTI2*/
1387 struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1389 struct ssi_crypto_alg *t_alg;
1390 struct crypto_alg *alg;
1392 t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1394 SSI_LOG_ERR("failed to allocate t_alg\n");
1395 return ERR_PTR(-ENOMEM);
1398 alg = &t_alg->crypto_alg;
1400 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1401 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1402 template->driver_name);
1403 alg->cra_module = THIS_MODULE;
1404 alg->cra_priority = SSI_CRA_PRIO;
1405 alg->cra_blocksize = template->blocksize;
1406 alg->cra_alignmask = 0;
1407 alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1409 alg->cra_init = template->synchronous? ssi_sblkcipher_init:ssi_ablkcipher_init;
1410 alg->cra_exit = template->synchronous? ssi_sblkcipher_exit:ssi_blkcipher_exit;
1411 alg->cra_type = template->synchronous? &crypto_blkcipher_type:&crypto_ablkcipher_type;
1412 if(template->synchronous) {
1413 alg->cra_blkcipher = template->template_sblkcipher;
1414 alg->cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1417 alg->cra_ablkcipher = template->template_ablkcipher;
1418 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1422 t_alg->cipher_mode = template->cipher_mode;
1423 t_alg->flow_mode = template->flow_mode;
1428 int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1430 struct ssi_crypto_alg *t_alg, *n;
1431 struct ssi_blkcipher_handle *blkcipher_handle =
1432 drvdata->blkcipher_handle;
1434 dev = &drvdata->plat_dev->dev;
1436 if (blkcipher_handle != NULL) {
1437 /* Remove registered algs */
1438 list_for_each_entry_safe(t_alg, n,
1439 &blkcipher_handle->blkcipher_alg_list,
1441 crypto_unregister_alg(&t_alg->crypto_alg);
1442 list_del(&t_alg->entry);
1445 kfree(blkcipher_handle);
1446 drvdata->blkcipher_handle = NULL;
1453 int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1455 struct ssi_blkcipher_handle *ablkcipher_handle;
1456 struct ssi_crypto_alg *t_alg;
1460 ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1462 if (ablkcipher_handle == NULL)
1465 drvdata->blkcipher_handle = ablkcipher_handle;
1467 INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1470 SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1471 for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1472 SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1473 t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1474 if (IS_ERR(t_alg)) {
1475 rc = PTR_ERR(t_alg);
1476 SSI_LOG_ERR("%s alg allocation failed\n",
1477 blkcipher_algs[alg].driver_name);
1480 t_alg->drvdata = drvdata;
1482 SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1483 rc = crypto_register_alg(&t_alg->crypto_alg);
1484 SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1485 t_alg->crypto_alg.cra_driver_name, rc);
1486 if (unlikely(rc != 0)) {
1487 SSI_LOG_ERR("%s alg registration failed\n",
1488 t_alg->crypto_alg.cra_driver_name);
1492 list_add_tail(&t_alg->entry,
1493 &ablkcipher_handle->blkcipher_alg_list);
1494 SSI_LOG_DEBUG("Registered %s\n",
1495 t_alg->crypto_alg.cra_driver_name);
1501 ssi_ablkcipher_free(drvdata);