]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - drivers/staging/ccree/ssi_cipher.c
Merge tag 'iio-for-4.13a' of git://git.kernel.org/pub/scm/linux/kernel/git/jic23...
[karo-tx-linux.git] / drivers / staging / ccree / ssi_cipher.c
1 /*
2  * Copyright (C) 2012-2017 ARM Limited or its affiliates.
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  *
13  * You should have received a copy of the GNU General Public License
14  * along with this program; if not, see <http://www.gnu.org/licenses/>.
15  */
16
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/semaphore.h>
21 #include <crypto/algapi.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/des.h>
26
27 #include "ssi_config.h"
28 #include "ssi_driver.h"
29 #include "cc_lli_defs.h"
30 #include "ssi_buffer_mgr.h"
31 #include "ssi_cipher.h"
32 #include "ssi_request_mgr.h"
33 #include "ssi_sysfs.h"
34 #include "ssi_fips_local.h"
35
36 #define MAX_ABLKCIPHER_SEQ_LEN 6
37
38 #define template_ablkcipher     template_u.ablkcipher
39 #define template_sblkcipher     template_u.blkcipher
40
41 #define SSI_MIN_AES_XTS_SIZE 0x10
42 #define SSI_MAX_AES_XTS_SIZE 0x2000
43 struct ssi_blkcipher_handle {
44         struct list_head blkcipher_alg_list;
45 };
46
47 struct cc_user_key_info {
48         u8 *key;
49         dma_addr_t key_dma_addr;
50 };
51 struct cc_hw_key_info {
52         enum cc_hw_crypto_key key1_slot;
53         enum cc_hw_crypto_key key2_slot;
54 };
55
56 struct ssi_ablkcipher_ctx {
57         struct ssi_drvdata *drvdata;
58         int keylen;
59         int key_round_number;
60         int cipher_mode;
61         int flow_mode;
62         unsigned int flags;
63         struct blkcipher_req_ctx *sync_ctx;
64         struct cc_user_key_info user;
65         struct cc_hw_key_info hw;
66         struct crypto_shash *shash_tfm;
67 };
68
69 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
70
71
72 static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, u32 size) {
73         switch (ctx_p->flow_mode){
74         case S_DIN_to_AES:
75                 switch (size){
76                 case CC_AES_128_BIT_KEY_SIZE:
77                 case CC_AES_192_BIT_KEY_SIZE:
78                         if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
79                                    (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
80                                    (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
81                                 return 0;
82                         break;
83                 case CC_AES_256_BIT_KEY_SIZE:
84                         return 0;
85                 case (CC_AES_192_BIT_KEY_SIZE*2):
86                 case (CC_AES_256_BIT_KEY_SIZE*2):
87                         if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
88                                    (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
89                                    (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
90                                 return 0;
91                         break;
92                 default:
93                         break;
94                 }
95         case S_DIN_to_DES:
96                 if (likely(size == DES3_EDE_KEY_SIZE ||
97                     size == DES_KEY_SIZE))
98                         return 0;
99                 break;
100 #if SSI_CC_HAS_MULTI2
101         case S_DIN_to_MULTI2:
102                 if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
103                         return 0;
104                 break;
105 #endif
106         default:
107                 break;
108
109         }
110         return -EINVAL;
111 }
112
113
114 static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
115         switch (ctx_p->flow_mode){
116         case S_DIN_to_AES:
117                 switch (ctx_p->cipher_mode){
118                 case DRV_CIPHER_XTS:
119                         if ((size >= SSI_MIN_AES_XTS_SIZE) &&
120                             (size <= SSI_MAX_AES_XTS_SIZE) &&
121                             IS_ALIGNED(size, AES_BLOCK_SIZE))
122                                 return 0;
123                         break;
124                 case DRV_CIPHER_CBC_CTS:
125                         if (likely(size >= AES_BLOCK_SIZE))
126                                 return 0;
127                         break;
128                 case DRV_CIPHER_OFB:
129                 case DRV_CIPHER_CTR:
130                                 return 0;
131                 case DRV_CIPHER_ECB:
132                 case DRV_CIPHER_CBC:
133                 case DRV_CIPHER_ESSIV:
134                 case DRV_CIPHER_BITLOCKER:
135                         if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
136                                 return 0;
137                         break;
138                 default:
139                         break;
140                 }
141                 break;
142         case S_DIN_to_DES:
143                 if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
144                                 return 0;
145                 break;
146 #if SSI_CC_HAS_MULTI2
147         case S_DIN_to_MULTI2:
148                 switch (ctx_p->cipher_mode) {
149                 case DRV_MULTI2_CBC:
150                         if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
151                                 return 0;
152                         break;
153                 case DRV_MULTI2_OFB:
154                         return 0;
155                 default:
156                         break;
157                 }
158                 break;
159 #endif /*SSI_CC_HAS_MULTI2*/
160         default:
161                 break;
162
163         }
164         return -EINVAL;
165 }
166
167 static unsigned int get_max_keysize(struct crypto_tfm *tfm)
168 {
169         struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
170
171         if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER) {
172                 return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
173         }
174
175         if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER) {
176                 return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
177         }
178
179         return 0;
180 }
181
182 static int ssi_blkcipher_init(struct crypto_tfm *tfm)
183 {
184         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
185         struct crypto_alg *alg = tfm->__crt_alg;
186         struct ssi_crypto_alg *ssi_alg =
187                         container_of(alg, struct ssi_crypto_alg, crypto_alg);
188         struct device *dev;
189         int rc = 0;
190         unsigned int max_key_buf_size = get_max_keysize(tfm);
191
192         SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p,
193                                                 crypto_tfm_alg_name(tfm));
194
195         CHECK_AND_RETURN_UPON_FIPS_ERROR();
196         ctx_p->cipher_mode = ssi_alg->cipher_mode;
197         ctx_p->flow_mode = ssi_alg->flow_mode;
198         ctx_p->drvdata = ssi_alg->drvdata;
199         dev = &ctx_p->drvdata->plat_dev->dev;
200
201         /* Allocate key buffer, cache line aligned */
202         ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL|GFP_DMA);
203         if (!ctx_p->user.key) {
204                 SSI_LOG_ERR("Allocating key buffer in context failed\n");
205                 rc = -ENOMEM;
206         }
207         SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
208                       ctx_p->user.key);
209
210         /* Map key buffer */
211         ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
212                                              max_key_buf_size, DMA_TO_DEVICE);
213         if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
214                 SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
215                         max_key_buf_size, ctx_p->user.key);
216                 return -ENOMEM;
217         }
218         SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr, max_key_buf_size);
219         SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
220                 max_key_buf_size, ctx_p->user.key,
221                 (unsigned long long)ctx_p->user.key_dma_addr);
222
223         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
224                 /* Alloc hash tfm for essiv */
225                 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
226                 if (IS_ERR(ctx_p->shash_tfm)) {
227                         SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
228                         return PTR_ERR(ctx_p->shash_tfm);
229                 }
230         }
231
232         return rc;
233 }
234
235 static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
236 {
237         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
238         struct device *dev = &ctx_p->drvdata->plat_dev->dev;
239         unsigned int max_key_buf_size = get_max_keysize(tfm);
240
241         SSI_LOG_DEBUG("Clearing context @%p for %s\n",
242                 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
243
244         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
245                 /* Free hash tfm for essiv */
246                 crypto_free_shash(ctx_p->shash_tfm);
247                 ctx_p->shash_tfm = NULL;
248         }
249
250         /* Unmap key buffer */
251         SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
252         dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
253                                                                 DMA_TO_DEVICE);
254         SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n",
255                 (unsigned long long)ctx_p->user.key_dma_addr);
256
257         /* Free key buffer in context */
258         kfree(ctx_p->user.key);
259         SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
260 }
261
262
263 typedef struct tdes_keys{
264         u8      key1[DES_KEY_SIZE];
265         u8      key2[DES_KEY_SIZE];
266         u8      key3[DES_KEY_SIZE];
267 }tdes_keys_t;
268
269 static const u8 zero_buff[] = {0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
270                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
271                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
272                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
273
274 /* The function verifies that tdes keys are not weak.*/
275 static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
276 {
277 #ifdef CCREE_FIPS_SUPPORT
278         tdes_keys_t *tdes_key = (tdes_keys_t*)key;
279
280         /* verify key1 != key2 and key3 != key2*/
281         if (unlikely( (memcmp((u8*)tdes_key->key1, (u8*)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
282                       (memcmp((u8*)tdes_key->key3, (u8*)tdes_key->key2, sizeof(tdes_key->key3)) == 0) )) {
283                 return -ENOEXEC;
284         }
285 #endif /* CCREE_FIPS_SUPPORT */
286
287         return 0;
288 }
289
290 /* The function verifies that xts keys are not weak.*/
291 static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
292 {
293 #ifdef CCREE_FIPS_SUPPORT
294         /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
295         int singleKeySize = keylen >> 1;
296
297         if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0)) {
298                 return -ENOEXEC;
299         }
300 #endif /* CCREE_FIPS_SUPPORT */
301
302         return 0;
303 }
304
305 static enum cc_hw_crypto_key hw_key_to_cc_hw_key(int slot_num)
306 {
307         switch (slot_num) {
308         case 0:
309                 return KFDE0_KEY;
310         case 1:
311                 return KFDE1_KEY;
312         case 2:
313                 return KFDE2_KEY;
314         case 3:
315                 return KFDE3_KEY;
316         }
317         return END_OF_KEYS;
318 }
319
320 static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
321                                 const u8 *key,
322                                 unsigned int keylen)
323 {
324         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
325         struct device *dev = &ctx_p->drvdata->plat_dev->dev;
326         u32 tmp[DES_EXPKEY_WORDS];
327         unsigned int max_key_buf_size = get_max_keysize(tfm);
328         DECL_CYCLE_COUNT_RESOURCES;
329
330         SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
331                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
332         dump_byte_array("key", (u8 *)key, keylen);
333
334         CHECK_AND_RETURN_UPON_FIPS_ERROR();
335
336         SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
337
338         /* STAT_PHASE_0: Init and sanity checks */
339         START_CYCLE_COUNT();
340
341 #if SSI_CC_HAS_MULTI2
342         /*last byte of key buffer is round number and should not be a part of key size*/
343         if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
344                 keylen -=1;
345         }
346 #endif /*SSI_CC_HAS_MULTI2*/
347
348         if (unlikely(validate_keys_sizes(ctx_p,keylen) != 0)) {
349                 SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
350                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
351                 return -EINVAL;
352         }
353
354         if (ssi_is_hw_key(tfm)) {
355                 /* setting HW key slots */
356                 struct arm_hw_key_info *hki = (struct arm_hw_key_info*)key;
357
358                 if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
359                         SSI_LOG_ERR("HW key not supported for non-AES flows\n");
360                         return -EINVAL;
361                 }
362
363                 ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
364                 if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
365                         SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
366                         return -EINVAL;
367                 }
368
369                 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
370                     (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
371                     (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
372                         if (unlikely(hki->hw_key1 == hki->hw_key2)) {
373                                 SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
374                                 return -EINVAL;
375                         }
376                         ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
377                         if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
378                                 SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
379                                 return -EINVAL;
380                         }
381                 }
382
383                 ctx_p->keylen = keylen;
384                 END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
385                 SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
386
387                 return 0;
388         }
389
390         // verify weak keys
391         if (ctx_p->flow_mode == S_DIN_to_DES) {
392                 if (unlikely(!des_ekey(tmp, key)) &&
393                     (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
394                         tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
395                         SSI_LOG_DEBUG("ssi_blkcipher_setkey:  weak DES key");
396                         return -EINVAL;
397                 }
398         }
399         if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
400             ssi_fips_verify_xts_keys(key, keylen) != 0) {
401                 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
402                 return -EINVAL;
403         }
404         if ((ctx_p->flow_mode == S_DIN_to_DES) &&
405             (keylen == DES3_EDE_KEY_SIZE) &&
406             ssi_fips_verify_3des_keys(key, keylen) != 0) {
407                 SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
408                 return -EINVAL;
409         }
410
411
412         END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
413
414         /* STAT_PHASE_1: Copy key to ctx */
415         START_CYCLE_COUNT();
416         SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
417         dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
418                                         max_key_buf_size, DMA_TO_DEVICE);
419 #if SSI_CC_HAS_MULTI2
420         if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
421                 memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
422                 ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
423                 if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
424                     ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
425                         crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
426                         SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
427                         return -EINVAL;
428                 }
429         } else
430 #endif /*SSI_CC_HAS_MULTI2*/
431         {
432                 memcpy(ctx_p->user.key, key, keylen);
433                 if (keylen == 24)
434                         memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
435
436                 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
437                         /* sha256 for key2 - use sw implementation */
438                         int key_len = keylen >> 1;
439                         int err;
440                         SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
441                         desc->tfm = ctx_p->shash_tfm;
442
443                         err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
444                         if (err) {
445                                 SSI_LOG_ERR("Failed to hash ESSIV key.\n");
446                                 return err;
447                         }
448                 }
449         }
450         dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
451                                         max_key_buf_size, DMA_TO_DEVICE);
452         SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr ,max_key_buf_size);
453         ctx_p->keylen = keylen;
454
455         END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_1);
456
457          SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
458         return 0;
459 }
460
461 static inline void
462 ssi_blkcipher_create_setup_desc(
463         struct crypto_tfm *tfm,
464         struct blkcipher_req_ctx *req_ctx,
465         unsigned int ivsize,
466         unsigned int nbytes,
467         struct cc_hw_desc desc[],
468         unsigned int *seq_size)
469 {
470         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
471         int cipher_mode = ctx_p->cipher_mode;
472         int flow_mode = ctx_p->flow_mode;
473         int direction = req_ctx->gen_ctx.op_type;
474         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
475         unsigned int key_len = ctx_p->keylen;
476         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
477         unsigned int du_size = nbytes;
478
479         struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
480
481         if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
482                 du_size = 512;
483         if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
484                 du_size = 4096;
485
486         switch (cipher_mode) {
487         case DRV_CIPHER_CBC:
488         case DRV_CIPHER_CBC_CTS:
489         case DRV_CIPHER_CTR:
490         case DRV_CIPHER_OFB:
491                 /* Load cipher state */
492                 HW_DESC_INIT(&desc[*seq_size]);
493                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
494                                      iv_dma_addr, ivsize,
495                                      NS_BIT);
496                 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
497                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
498                 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
499                 if ((cipher_mode == DRV_CIPHER_CTR) ||
500                     (cipher_mode == DRV_CIPHER_OFB) ) {
501                         HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
502                                                SETUP_LOAD_STATE1);
503                 } else {
504                         HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
505                                                SETUP_LOAD_STATE0);
506                 }
507                 (*seq_size)++;
508                 /*FALLTHROUGH*/
509         case DRV_CIPHER_ECB:
510                 /* Load key */
511                 HW_DESC_INIT(&desc[*seq_size]);
512                 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
513                 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
514                 if (flow_mode == S_DIN_to_AES) {
515
516                         if (ssi_is_hw_key(tfm)) {
517                                 HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
518                         } else {
519                                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
520                                                      key_dma_addr,
521                                                      ((key_len == 24) ? AES_MAX_KEY_SIZE : key_len),
522                                                      NS_BIT);
523                         }
524                         HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len);
525                 } else {
526                         /*des*/
527                         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
528                                              key_dma_addr, key_len,
529                                              NS_BIT);
530                         HW_DESC_SET_KEY_SIZE_DES(&desc[*seq_size], key_len);
531                 }
532                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
533                 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
534                 (*seq_size)++;
535                 break;
536         case DRV_CIPHER_XTS:
537         case DRV_CIPHER_ESSIV:
538         case DRV_CIPHER_BITLOCKER:
539                 /* Load AES key */
540                 HW_DESC_INIT(&desc[*seq_size]);
541                 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
542                 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
543                 if (ssi_is_hw_key(tfm)) {
544                         HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
545                 } else {
546                         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
547                                              key_dma_addr, key_len/2,
548                                              NS_BIT);
549                 }
550                 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
551                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
552                 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
553                 (*seq_size)++;
554
555                 /* load XEX key */
556                 HW_DESC_INIT(&desc[*seq_size]);
557                 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
558                 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
559                 if (ssi_is_hw_key(tfm)) {
560                         HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key2_slot);
561                 } else {
562                         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
563                                              (key_dma_addr+key_len/2), key_len/2,
564                                              NS_BIT);
565                 }
566                 HW_DESC_SET_XEX_DATA_UNIT_SIZE(&desc[*seq_size], du_size);
567                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], S_DIN_to_AES2);
568                 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
569                 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
570                 (*seq_size)++;
571
572                 /* Set state */
573                 HW_DESC_INIT(&desc[*seq_size]);
574                 HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
575                 HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
576                 HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
577                 HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
578                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
579                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
580                                      iv_dma_addr, CC_AES_BLOCK_SIZE,
581                                      NS_BIT);
582                 (*seq_size)++;
583                 break;
584         default:
585                 SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
586                 BUG();
587         }
588 }
589
590 #if SSI_CC_HAS_MULTI2
591 static inline void ssi_blkcipher_create_multi2_setup_desc(
592         struct crypto_tfm *tfm,
593         struct blkcipher_req_ctx *req_ctx,
594         unsigned int ivsize,
595         struct cc_hw_desc desc[],
596         unsigned int *seq_size)
597 {
598         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
599
600         int direction = req_ctx->gen_ctx.op_type;
601         /* Load system key */
602         HW_DESC_INIT(&desc[*seq_size]);
603         HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
604         HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
605         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
606                                                 CC_MULTI2_SYSTEM_KEY_SIZE,
607                                                 NS_BIT);
608         HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
609         HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
610         (*seq_size)++;
611
612         /* load data key */
613         HW_DESC_INIT(&desc[*seq_size]);
614         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
615                                         (ctx_p->user.key_dma_addr +
616                                                 CC_MULTI2_SYSTEM_KEY_SIZE),
617                                 CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
618         HW_DESC_SET_MULTI2_NUM_ROUNDS(&desc[*seq_size],
619                                                 ctx_p->key_round_number);
620         HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
621         HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
622         HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
623         HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE0 );
624         (*seq_size)++;
625
626
627         /* Set state */
628         HW_DESC_INIT(&desc[*seq_size]);
629         HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
630                              req_ctx->gen_ctx.iv_dma_addr,
631                              ivsize, NS_BIT);
632         HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
633         HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
634         HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
635         HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
636         (*seq_size)++;
637
638 }
639 #endif /*SSI_CC_HAS_MULTI2*/
640
641 static inline void
642 ssi_blkcipher_create_data_desc(
643         struct crypto_tfm *tfm,
644         struct blkcipher_req_ctx *req_ctx,
645         struct scatterlist *dst, struct scatterlist *src,
646         unsigned int nbytes,
647         void *areq,
648         struct cc_hw_desc desc[],
649         unsigned int *seq_size)
650 {
651         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
652         unsigned int flow_mode = ctx_p->flow_mode;
653
654         switch (ctx_p->flow_mode) {
655         case S_DIN_to_AES:
656                 flow_mode = DIN_AES_DOUT;
657                 break;
658         case S_DIN_to_DES:
659                 flow_mode = DIN_DES_DOUT;
660                 break;
661 #if SSI_CC_HAS_MULTI2
662         case S_DIN_to_MULTI2:
663                 flow_mode = DIN_MULTI2_DOUT;
664                 break;
665 #endif /*SSI_CC_HAS_MULTI2*/
666         default:
667                 SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
668                 return;
669         }
670         /* Process */
671         if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)){
672                 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
673                              (unsigned long long)sg_dma_address(src),
674                              nbytes);
675                 SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
676                              (unsigned long long)sg_dma_address(dst),
677                              nbytes);
678                 HW_DESC_INIT(&desc[*seq_size]);
679                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
680                                      sg_dma_address(src),
681                                      nbytes, NS_BIT);
682                 HW_DESC_SET_DOUT_DLLI(&desc[*seq_size],
683                                       sg_dma_address(dst),
684                                       nbytes,
685                                       NS_BIT, (areq == NULL)? 0:1);
686                 if (areq != NULL) {
687                         HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
688                 }
689                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
690                 (*seq_size)++;
691         } else {
692                 /* bypass */
693                 SSI_LOG_DEBUG(" bypass params addr 0x%llX "
694                              "length 0x%X addr 0x%08X\n",
695                         (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
696                         req_ctx->mlli_params.mlli_len,
697                         (unsigned int)ctx_p->drvdata->mlli_sram_addr);
698                 HW_DESC_INIT(&desc[*seq_size]);
699                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
700                                      req_ctx->mlli_params.mlli_dma_addr,
701                                      req_ctx->mlli_params.mlli_len,
702                                      NS_BIT);
703                 HW_DESC_SET_DOUT_SRAM(&desc[*seq_size],
704                                       ctx_p->drvdata->mlli_sram_addr,
705                                       req_ctx->mlli_params.mlli_len);
706                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], BYPASS);
707                 (*seq_size)++;
708
709                 HW_DESC_INIT(&desc[*seq_size]);
710                 HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_MLLI,
711                         ctx_p->drvdata->mlli_sram_addr,
712                                      req_ctx->in_mlli_nents, NS_BIT);
713                 if (req_ctx->out_nents == 0) {
714                         SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
715                                      "addr 0x%08X\n",
716                         (unsigned int)ctx_p->drvdata->mlli_sram_addr,
717                         (unsigned int)ctx_p->drvdata->mlli_sram_addr);
718                         HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
719                         ctx_p->drvdata->mlli_sram_addr,
720                                               req_ctx->in_mlli_nents,
721                                               NS_BIT,(areq == NULL)? 0:1);
722                 } else {
723                         SSI_LOG_DEBUG(" din/dout params "
724                                      "addr 0x%08X addr 0x%08X\n",
725                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
726                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
727                                 (u32)LLI_ENTRY_BYTE_SIZE *
728                                                         req_ctx->in_nents);
729                         HW_DESC_SET_DOUT_MLLI(&desc[*seq_size],
730                                 (ctx_p->drvdata->mlli_sram_addr +
731                                 LLI_ENTRY_BYTE_SIZE *
732                                                 req_ctx->in_mlli_nents),
733                                 req_ctx->out_mlli_nents, NS_BIT,(areq == NULL)? 0:1);
734                 }
735                 if (areq != NULL) {
736                         HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
737                 }
738                 HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
739                 (*seq_size)++;
740         }
741 }
742
743 static int ssi_blkcipher_complete(struct device *dev,
744                                   struct ssi_ablkcipher_ctx *ctx_p,
745                                   struct blkcipher_req_ctx *req_ctx,
746                                   struct scatterlist *dst, struct scatterlist *src,
747                                   unsigned int ivsize,
748                                   void *areq,
749                                   void __iomem *cc_base)
750 {
751         int completion_error = 0;
752         u32 inflight_counter;
753         DECL_CYCLE_COUNT_RESOURCES;
754
755         START_CYCLE_COUNT();
756         ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
757         END_CYCLE_COUNT(STAT_OP_TYPE_GENERIC, STAT_PHASE_4);
758
759
760         /*Set the inflight couter value to local variable*/
761         inflight_counter =  ctx_p->drvdata->inflight_counter;
762         /*Decrease the inflight counter*/
763         if(ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
764                 ctx_p->drvdata->inflight_counter--;
765
766         if(areq){
767                 ablkcipher_request_complete(areq, completion_error);
768                 return 0;
769         }
770         return completion_error;
771 }
772
773 static int ssi_blkcipher_process(
774         struct crypto_tfm *tfm,
775         struct blkcipher_req_ctx *req_ctx,
776         struct scatterlist *dst, struct scatterlist *src,
777         unsigned int nbytes,
778         void *info, //req info
779         unsigned int ivsize,
780         void *areq,
781         enum drv_crypto_direction direction)
782 {
783         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
784         struct device *dev = &ctx_p->drvdata->plat_dev->dev;
785         struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
786         struct ssi_crypto_req ssi_req = {};
787         int rc, seq_len = 0,cts_restore_flag = 0;
788         DECL_CYCLE_COUNT_RESOURCES;
789
790         SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
791                 ((direction==DRV_CRYPTO_DIRECTION_ENCRYPT)?"Encrypt":"Decrypt"),
792                      areq, info, nbytes);
793
794         CHECK_AND_RETURN_UPON_FIPS_ERROR();
795         /* STAT_PHASE_0: Init and sanity checks */
796         START_CYCLE_COUNT();
797
798         /* TODO: check data length according to mode */
799         if (unlikely(validate_data_size(ctx_p, nbytes))) {
800                 SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
801                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
802                 return -EINVAL;
803         }
804         if (nbytes == 0) {
805                 /* No data to process is valid */
806                 return 0;
807         }
808         /*For CTS in case of data size aligned to 16 use CBC mode*/
809         if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)){
810
811                 ctx_p->cipher_mode = DRV_CIPHER_CBC;
812                 cts_restore_flag = 1;
813         }
814
815         /* Setup DX request structure */
816         ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
817         ssi_req.user_arg = (void *)areq;
818
819 #ifdef ENABLE_CYCLE_COUNT
820         ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
821                 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
822
823 #endif
824
825         /* Setup request context */
826         req_ctx->gen_ctx.op_type = direction;
827
828         END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_0);
829
830         /* STAT_PHASE_1: Map buffers */
831         START_CYCLE_COUNT();
832
833         rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
834         if (unlikely(rc != 0)) {
835                 SSI_LOG_ERR("map_request() failed\n");
836                 goto exit_process;
837         }
838
839         END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_1);
840
841         /* STAT_PHASE_2: Create sequence */
842         START_CYCLE_COUNT();
843
844         /* Setup processing */
845 #if SSI_CC_HAS_MULTI2
846         if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
847                 ssi_blkcipher_create_multi2_setup_desc(tfm,
848                                                        req_ctx,
849                                                        ivsize,
850                                                        desc,
851                                                        &seq_len);
852         } else
853 #endif /*SSI_CC_HAS_MULTI2*/
854         {
855                 ssi_blkcipher_create_setup_desc(tfm,
856                                                 req_ctx,
857                                                 ivsize,
858                                                 nbytes,
859                                                 desc,
860                                                 &seq_len);
861         }
862         /* Data processing */
863         ssi_blkcipher_create_data_desc(tfm,
864                               req_ctx,
865                               dst, src,
866                               nbytes,
867                               areq,
868                               desc, &seq_len);
869
870         /* do we need to generate IV? */
871         if (req_ctx->is_giv == true) {
872                 ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
873                 ssi_req.ivgen_dma_addr_len = 1;
874                 /* set the IV size (8/16 B long)*/
875                 ssi_req.ivgen_size = ivsize;
876         }
877         END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_2);
878
879         /* STAT_PHASE_3: Lock HW and push sequence */
880         START_CYCLE_COUNT();
881
882         rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (areq == NULL)? 0:1);
883         if(areq != NULL) {
884                 if (unlikely(rc != -EINPROGRESS)) {
885                         /* Failed to send the request or request completed synchronously */
886                         ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
887                 }
888
889                 END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
890         } else {
891                 if (rc != 0) {
892                         ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
893                         END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
894                 } else {
895                         END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
896                         rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst,
897                                                     src, ivsize, NULL,
898                                                     ctx_p->drvdata->cc_base);
899                 }
900         }
901
902 exit_process:
903         if (cts_restore_flag != 0)
904                 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
905
906         return rc;
907 }
908
909 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
910 {
911         struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
912         struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
913         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
914         struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
915         unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
916
917         CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
918
919         ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src,
920                                ivsize, areq, cc_base);
921 }
922
923
924
925 static int ssi_sblkcipher_init(struct crypto_tfm *tfm)
926 {
927         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
928
929         /* Allocate sync ctx buffer */
930         ctx_p->sync_ctx = kmalloc(sizeof(struct blkcipher_req_ctx), GFP_KERNEL|GFP_DMA);
931         if (!ctx_p->sync_ctx) {
932                 SSI_LOG_ERR("Allocating sync ctx buffer in context failed\n");
933                 return -ENOMEM;
934         }
935         SSI_LOG_DEBUG("Allocated sync ctx buffer in context ctx_p->sync_ctx=@%p\n",
936                                                                 ctx_p->sync_ctx);
937
938         return ssi_blkcipher_init(tfm);
939 }
940
941
942 static void ssi_sblkcipher_exit(struct crypto_tfm *tfm)
943 {
944         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
945
946         kfree(ctx_p->sync_ctx);
947         SSI_LOG_DEBUG("Free sync ctx buffer in context ctx_p->sync_ctx=@%p\n", ctx_p->sync_ctx);
948
949         ssi_blkcipher_exit(tfm);
950 }
951
952 #ifdef SYNC_ALGS
953 static int ssi_sblkcipher_encrypt(struct blkcipher_desc *desc,
954                         struct scatterlist *dst, struct scatterlist *src,
955                         unsigned int nbytes)
956 {
957         struct crypto_blkcipher *blk_tfm = desc->tfm;
958         struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
959         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
960         struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
961         unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
962
963         req_ctx->backup_info = desc->info;
964         req_ctx->is_giv = false;
965
966         return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_ENCRYPT);
967 }
968
969 static int ssi_sblkcipher_decrypt(struct blkcipher_desc *desc,
970                         struct scatterlist *dst, struct scatterlist *src,
971                         unsigned int nbytes)
972 {
973         struct crypto_blkcipher *blk_tfm = desc->tfm;
974         struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
975         struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
976         struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
977         unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
978
979         req_ctx->backup_info = desc->info;
980         req_ctx->is_giv = false;
981
982         return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_DECRYPT);
983 }
984 #endif
985
986 /* Async wrap functions */
987
988 static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
989 {
990         struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
991
992         ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
993
994         return ssi_blkcipher_init(tfm);
995 }
996
997
998 static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
999                                 const u8 *key,
1000                                 unsigned int keylen)
1001 {
1002         return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
1003 }
1004
1005 static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
1006 {
1007         struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1008         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1009         struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1010         unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1011
1012         req_ctx->backup_info = req->info;
1013         req_ctx->is_giv = false;
1014
1015         return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
1016 }
1017
1018 static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
1019 {
1020         struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1021         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1022         struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1023         unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1024
1025         req_ctx->backup_info = req->info;
1026         req_ctx->is_giv = false;
1027         return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
1028 }
1029
1030
1031 /* DX Block cipher alg */
1032 static struct ssi_alg_template blkcipher_algs[] = {
1033 /* Async template */
1034 #if SSI_CC_HAS_AES_XTS
1035         {
1036                 .name = "xts(aes)",
1037                 .driver_name = "xts-aes-dx",
1038                 .blocksize = AES_BLOCK_SIZE,
1039                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1040                 .template_ablkcipher = {
1041                         .setkey = ssi_ablkcipher_setkey,
1042                         .encrypt = ssi_ablkcipher_encrypt,
1043                         .decrypt = ssi_ablkcipher_decrypt,
1044                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1045                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1046                         .ivsize = AES_BLOCK_SIZE,
1047                         .geniv = "eseqiv",
1048                         },
1049                 .cipher_mode = DRV_CIPHER_XTS,
1050                 .flow_mode = S_DIN_to_AES,
1051         .synchronous = false,
1052         },
1053         {
1054                 .name = "xts(aes)",
1055                 .driver_name = "xts-aes-du512-dx",
1056                 .blocksize = AES_BLOCK_SIZE,
1057                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1058                 .template_ablkcipher = {
1059                         .setkey = ssi_ablkcipher_setkey,
1060                         .encrypt = ssi_ablkcipher_encrypt,
1061                         .decrypt = ssi_ablkcipher_decrypt,
1062                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1063                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1064                         .ivsize = AES_BLOCK_SIZE,
1065                         },
1066                 .cipher_mode = DRV_CIPHER_XTS,
1067                 .flow_mode = S_DIN_to_AES,
1068         .synchronous = false,
1069         },
1070         {
1071                 .name = "xts(aes)",
1072                 .driver_name = "xts-aes-du4096-dx",
1073                 .blocksize = AES_BLOCK_SIZE,
1074                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1075                 .template_ablkcipher = {
1076                         .setkey = ssi_ablkcipher_setkey,
1077                         .encrypt = ssi_ablkcipher_encrypt,
1078                         .decrypt = ssi_ablkcipher_decrypt,
1079                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1080                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1081                         .ivsize = AES_BLOCK_SIZE,
1082                         },
1083                 .cipher_mode = DRV_CIPHER_XTS,
1084                 .flow_mode = S_DIN_to_AES,
1085         .synchronous = false,
1086         },
1087 #endif /*SSI_CC_HAS_AES_XTS*/
1088 #if SSI_CC_HAS_AES_ESSIV
1089         {
1090                 .name = "essiv(aes)",
1091                 .driver_name = "essiv-aes-dx",
1092                 .blocksize = AES_BLOCK_SIZE,
1093                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1094                 .template_ablkcipher = {
1095                         .setkey = ssi_ablkcipher_setkey,
1096                         .encrypt = ssi_ablkcipher_encrypt,
1097                         .decrypt = ssi_ablkcipher_decrypt,
1098                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1099                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1100                         .ivsize = AES_BLOCK_SIZE,
1101                         },
1102                 .cipher_mode = DRV_CIPHER_ESSIV,
1103                 .flow_mode = S_DIN_to_AES,
1104                 .synchronous = false,
1105         },
1106         {
1107                 .name = "essiv(aes)",
1108                 .driver_name = "essiv-aes-du512-dx",
1109                 .blocksize = AES_BLOCK_SIZE,
1110                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1111                 .template_ablkcipher = {
1112                         .setkey = ssi_ablkcipher_setkey,
1113                         .encrypt = ssi_ablkcipher_encrypt,
1114                         .decrypt = ssi_ablkcipher_decrypt,
1115                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1116                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1117                         .ivsize = AES_BLOCK_SIZE,
1118                         },
1119                 .cipher_mode = DRV_CIPHER_ESSIV,
1120                 .flow_mode = S_DIN_to_AES,
1121                 .synchronous = false,
1122         },
1123         {
1124                 .name = "essiv(aes)",
1125                 .driver_name = "essiv-aes-du4096-dx",
1126                 .blocksize = AES_BLOCK_SIZE,
1127                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1128                 .template_ablkcipher = {
1129                         .setkey = ssi_ablkcipher_setkey,
1130                         .encrypt = ssi_ablkcipher_encrypt,
1131                         .decrypt = ssi_ablkcipher_decrypt,
1132                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1133                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1134                         .ivsize = AES_BLOCK_SIZE,
1135                         },
1136                 .cipher_mode = DRV_CIPHER_ESSIV,
1137                 .flow_mode = S_DIN_to_AES,
1138                 .synchronous = false,
1139         },
1140 #endif /*SSI_CC_HAS_AES_ESSIV*/
1141 #if SSI_CC_HAS_AES_BITLOCKER
1142         {
1143                 .name = "bitlocker(aes)",
1144                 .driver_name = "bitlocker-aes-dx",
1145                 .blocksize = AES_BLOCK_SIZE,
1146                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1147                 .template_ablkcipher = {
1148                         .setkey = ssi_ablkcipher_setkey,
1149                         .encrypt = ssi_ablkcipher_encrypt,
1150                         .decrypt = ssi_ablkcipher_decrypt,
1151                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1152                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1153                         .ivsize = AES_BLOCK_SIZE,
1154                         },
1155                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1156                 .flow_mode = S_DIN_to_AES,
1157                 .synchronous = false,
1158         },
1159         {
1160                 .name = "bitlocker(aes)",
1161                 .driver_name = "bitlocker-aes-du512-dx",
1162                 .blocksize = AES_BLOCK_SIZE,
1163                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1164                 .template_ablkcipher = {
1165                         .setkey = ssi_ablkcipher_setkey,
1166                         .encrypt = ssi_ablkcipher_encrypt,
1167                         .decrypt = ssi_ablkcipher_decrypt,
1168                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1169                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1170                         .ivsize = AES_BLOCK_SIZE,
1171                         },
1172                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1173                 .flow_mode = S_DIN_to_AES,
1174                 .synchronous = false,
1175         },
1176         {
1177                 .name = "bitlocker(aes)",
1178                 .driver_name = "bitlocker-aes-du4096-dx",
1179                 .blocksize = AES_BLOCK_SIZE,
1180                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1181                 .template_ablkcipher = {
1182                         .setkey = ssi_ablkcipher_setkey,
1183                         .encrypt = ssi_ablkcipher_encrypt,
1184                         .decrypt = ssi_ablkcipher_decrypt,
1185                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1186                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1187                         .ivsize = AES_BLOCK_SIZE,
1188                         },
1189                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1190                 .flow_mode = S_DIN_to_AES,
1191                 .synchronous = false,
1192         },
1193 #endif /*SSI_CC_HAS_AES_BITLOCKER*/
1194         {
1195                 .name = "ecb(aes)",
1196                 .driver_name = "ecb-aes-dx",
1197                 .blocksize = AES_BLOCK_SIZE,
1198                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1199                 .template_ablkcipher = {
1200                         .setkey = ssi_ablkcipher_setkey,
1201                         .encrypt = ssi_ablkcipher_encrypt,
1202                         .decrypt = ssi_ablkcipher_decrypt,
1203                         .min_keysize = AES_MIN_KEY_SIZE,
1204                         .max_keysize = AES_MAX_KEY_SIZE,
1205                         .ivsize = 0,
1206                         },
1207                 .cipher_mode = DRV_CIPHER_ECB,
1208                 .flow_mode = S_DIN_to_AES,
1209         .synchronous = false,
1210         },
1211         {
1212                 .name = "cbc(aes)",
1213                 .driver_name = "cbc-aes-dx",
1214                 .blocksize = AES_BLOCK_SIZE,
1215                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1216                 .template_ablkcipher = {
1217                         .setkey = ssi_ablkcipher_setkey,
1218                         .encrypt = ssi_ablkcipher_encrypt,
1219                         .decrypt = ssi_ablkcipher_decrypt,
1220                         .min_keysize = AES_MIN_KEY_SIZE,
1221                         .max_keysize = AES_MAX_KEY_SIZE,
1222                         .ivsize = AES_BLOCK_SIZE,
1223                         },
1224                 .cipher_mode = DRV_CIPHER_CBC,
1225                 .flow_mode = S_DIN_to_AES,
1226         .synchronous = false,
1227         },
1228         {
1229                 .name = "ofb(aes)",
1230                 .driver_name = "ofb-aes-dx",
1231                 .blocksize = AES_BLOCK_SIZE,
1232                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1233                 .template_ablkcipher = {
1234                         .setkey = ssi_ablkcipher_setkey,
1235                         .encrypt = ssi_ablkcipher_encrypt,
1236                         .decrypt = ssi_ablkcipher_decrypt,
1237                         .min_keysize = AES_MIN_KEY_SIZE,
1238                         .max_keysize = AES_MAX_KEY_SIZE,
1239                         .ivsize = AES_BLOCK_SIZE,
1240                         },
1241                 .cipher_mode = DRV_CIPHER_OFB,
1242                 .flow_mode = S_DIN_to_AES,
1243         .synchronous = false,
1244         },
1245 #if SSI_CC_HAS_AES_CTS
1246         {
1247                 .name = "cts1(cbc(aes))",
1248                 .driver_name = "cts1-cbc-aes-dx",
1249                 .blocksize = AES_BLOCK_SIZE,
1250                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1251                 .template_ablkcipher = {
1252                         .setkey = ssi_ablkcipher_setkey,
1253                         .encrypt = ssi_ablkcipher_encrypt,
1254                         .decrypt = ssi_ablkcipher_decrypt,
1255                         .min_keysize = AES_MIN_KEY_SIZE,
1256                         .max_keysize = AES_MAX_KEY_SIZE,
1257                         .ivsize = AES_BLOCK_SIZE,
1258                         },
1259                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1260                 .flow_mode = S_DIN_to_AES,
1261         .synchronous = false,
1262         },
1263 #endif
1264         {
1265                 .name = "ctr(aes)",
1266                 .driver_name = "ctr-aes-dx",
1267                 .blocksize = 1,
1268                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1269                 .template_ablkcipher = {
1270                         .setkey = ssi_ablkcipher_setkey,
1271                         .encrypt = ssi_ablkcipher_encrypt,
1272                         .decrypt = ssi_ablkcipher_decrypt,
1273                         .min_keysize = AES_MIN_KEY_SIZE,
1274                         .max_keysize = AES_MAX_KEY_SIZE,
1275                         .ivsize = AES_BLOCK_SIZE,
1276                         },
1277                 .cipher_mode = DRV_CIPHER_CTR,
1278                 .flow_mode = S_DIN_to_AES,
1279         .synchronous = false,
1280         },
1281         {
1282                 .name = "cbc(des3_ede)",
1283                 .driver_name = "cbc-3des-dx",
1284                 .blocksize = DES3_EDE_BLOCK_SIZE,
1285                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1286                 .template_ablkcipher = {
1287                         .setkey = ssi_ablkcipher_setkey,
1288                         .encrypt = ssi_ablkcipher_encrypt,
1289                         .decrypt = ssi_ablkcipher_decrypt,
1290                         .min_keysize = DES3_EDE_KEY_SIZE,
1291                         .max_keysize = DES3_EDE_KEY_SIZE,
1292                         .ivsize = DES3_EDE_BLOCK_SIZE,
1293                         },
1294                 .cipher_mode = DRV_CIPHER_CBC,
1295                 .flow_mode = S_DIN_to_DES,
1296         .synchronous = false,
1297         },
1298         {
1299                 .name = "ecb(des3_ede)",
1300                 .driver_name = "ecb-3des-dx",
1301                 .blocksize = DES3_EDE_BLOCK_SIZE,
1302                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1303                 .template_ablkcipher = {
1304                         .setkey = ssi_ablkcipher_setkey,
1305                         .encrypt = ssi_ablkcipher_encrypt,
1306                         .decrypt = ssi_ablkcipher_decrypt,
1307                         .min_keysize = DES3_EDE_KEY_SIZE,
1308                         .max_keysize = DES3_EDE_KEY_SIZE,
1309                         .ivsize = 0,
1310                         },
1311                 .cipher_mode = DRV_CIPHER_ECB,
1312                 .flow_mode = S_DIN_to_DES,
1313         .synchronous = false,
1314         },
1315         {
1316                 .name = "cbc(des)",
1317                 .driver_name = "cbc-des-dx",
1318                 .blocksize = DES_BLOCK_SIZE,
1319                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1320                 .template_ablkcipher = {
1321                         .setkey = ssi_ablkcipher_setkey,
1322                         .encrypt = ssi_ablkcipher_encrypt,
1323                         .decrypt = ssi_ablkcipher_decrypt,
1324                         .min_keysize = DES_KEY_SIZE,
1325                         .max_keysize = DES_KEY_SIZE,
1326                         .ivsize = DES_BLOCK_SIZE,
1327                         },
1328                 .cipher_mode = DRV_CIPHER_CBC,
1329                 .flow_mode = S_DIN_to_DES,
1330         .synchronous = false,
1331         },
1332         {
1333                 .name = "ecb(des)",
1334                 .driver_name = "ecb-des-dx",
1335                 .blocksize = DES_BLOCK_SIZE,
1336                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1337                 .template_ablkcipher = {
1338                         .setkey = ssi_ablkcipher_setkey,
1339                         .encrypt = ssi_ablkcipher_encrypt,
1340                         .decrypt = ssi_ablkcipher_decrypt,
1341                         .min_keysize = DES_KEY_SIZE,
1342                         .max_keysize = DES_KEY_SIZE,
1343                         .ivsize = 0,
1344                         },
1345                 .cipher_mode = DRV_CIPHER_ECB,
1346                 .flow_mode = S_DIN_to_DES,
1347         .synchronous = false,
1348         },
1349 #if SSI_CC_HAS_MULTI2
1350         {
1351                 .name = "cbc(multi2)",
1352                 .driver_name = "cbc-multi2-dx",
1353                 .blocksize = CC_MULTI2_BLOCK_SIZE,
1354                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1355                 .template_ablkcipher = {
1356                         .setkey = ssi_ablkcipher_setkey,
1357                         .encrypt = ssi_ablkcipher_encrypt,
1358                         .decrypt = ssi_ablkcipher_decrypt,
1359                         .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1360                         .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1361                         .ivsize = CC_MULTI2_IV_SIZE,
1362                         },
1363                 .cipher_mode = DRV_MULTI2_CBC,
1364                 .flow_mode = S_DIN_to_MULTI2,
1365         .synchronous = false,
1366         },
1367         {
1368                 .name = "ofb(multi2)",
1369                 .driver_name = "ofb-multi2-dx",
1370                 .blocksize = 1,
1371                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1372                 .template_ablkcipher = {
1373                         .setkey = ssi_ablkcipher_setkey,
1374                         .encrypt = ssi_ablkcipher_encrypt,
1375                         .decrypt = ssi_ablkcipher_encrypt,
1376                         .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1377                         .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1378                         .ivsize = CC_MULTI2_IV_SIZE,
1379                         },
1380                 .cipher_mode = DRV_MULTI2_OFB,
1381                 .flow_mode = S_DIN_to_MULTI2,
1382         .synchronous = false,
1383         },
1384 #endif /*SSI_CC_HAS_MULTI2*/
1385 };
1386
1387 static
1388 struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1389 {
1390         struct ssi_crypto_alg *t_alg;
1391         struct crypto_alg *alg;
1392
1393         t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1394         if (!t_alg) {
1395                 SSI_LOG_ERR("failed to allocate t_alg\n");
1396                 return ERR_PTR(-ENOMEM);
1397         }
1398
1399         alg = &t_alg->crypto_alg;
1400
1401         snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1402         snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1403                  template->driver_name);
1404         alg->cra_module = THIS_MODULE;
1405         alg->cra_priority = SSI_CRA_PRIO;
1406         alg->cra_blocksize = template->blocksize;
1407         alg->cra_alignmask = 0;
1408         alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1409
1410         alg->cra_init = template->synchronous? ssi_sblkcipher_init:ssi_ablkcipher_init;
1411         alg->cra_exit = template->synchronous? ssi_sblkcipher_exit:ssi_blkcipher_exit;
1412         alg->cra_type = template->synchronous? &crypto_blkcipher_type:&crypto_ablkcipher_type;
1413         if(template->synchronous) {
1414                 alg->cra_blkcipher = template->template_sblkcipher;
1415                 alg->cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1416                                 template->type;
1417         } else {
1418                 alg->cra_ablkcipher = template->template_ablkcipher;
1419                 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1420                                 template->type;
1421         }
1422
1423         t_alg->cipher_mode = template->cipher_mode;
1424         t_alg->flow_mode = template->flow_mode;
1425
1426         return t_alg;
1427 }
1428
1429 int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1430 {
1431         struct ssi_crypto_alg *t_alg, *n;
1432         struct ssi_blkcipher_handle *blkcipher_handle =
1433                                                 drvdata->blkcipher_handle;
1434         struct device *dev;
1435         dev = &drvdata->plat_dev->dev;
1436
1437         if (blkcipher_handle != NULL) {
1438                 /* Remove registered algs */
1439                 list_for_each_entry_safe(t_alg, n,
1440                                 &blkcipher_handle->blkcipher_alg_list,
1441                                          entry) {
1442                         crypto_unregister_alg(&t_alg->crypto_alg);
1443                         list_del(&t_alg->entry);
1444                         kfree(t_alg);
1445                 }
1446                 kfree(blkcipher_handle);
1447                 drvdata->blkcipher_handle = NULL;
1448         }
1449         return 0;
1450 }
1451
1452
1453
1454 int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1455 {
1456         struct ssi_blkcipher_handle *ablkcipher_handle;
1457         struct ssi_crypto_alg *t_alg;
1458         int rc = -ENOMEM;
1459         int alg;
1460
1461         ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1462                 GFP_KERNEL);
1463         if (ablkcipher_handle == NULL)
1464                 return -ENOMEM;
1465
1466         drvdata->blkcipher_handle = ablkcipher_handle;
1467
1468         INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1469
1470         /* Linux crypto */
1471         SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1472         for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1473                 SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1474                 t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1475                 if (IS_ERR(t_alg)) {
1476                         rc = PTR_ERR(t_alg);
1477                         SSI_LOG_ERR("%s alg allocation failed\n",
1478                                  blkcipher_algs[alg].driver_name);
1479                         goto fail0;
1480                 }
1481                 t_alg->drvdata = drvdata;
1482
1483                 SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1484                 rc = crypto_register_alg(&t_alg->crypto_alg);
1485                 SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1486                         t_alg->crypto_alg.cra_driver_name, rc);
1487                 if (unlikely(rc != 0)) {
1488                         SSI_LOG_ERR("%s alg registration failed\n",
1489                                 t_alg->crypto_alg.cra_driver_name);
1490                         kfree(t_alg);
1491                         goto fail0;
1492                 } else {
1493                         list_add_tail(&t_alg->entry,
1494                                       &ablkcipher_handle->blkcipher_alg_list);
1495                         SSI_LOG_DEBUG("Registered %s\n",
1496                                         t_alg->crypto_alg.cra_driver_name);
1497                 }
1498         }
1499         return 0;
1500
1501 fail0:
1502         ssi_ablkcipher_free(drvdata);
1503         return rc;
1504 }