]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - arch/x86/crypto/serpent_avx_glue.c
Merge tag 'for-linus-v3.10-rc1' of git://oss.sgi.com/xfs/xfs
[karo-tx-linux.git] / arch / x86 / crypto / serpent_avx_glue.c
1 /*
2  * Glue Code for AVX assembler versions of Serpent Cipher
3  *
4  * Copyright (C) 2012 Johannes Goetzfried
5  *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6  *
7  * Glue code based on serpent_sse2_glue.c by:
8  *  Copyright (C) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
23  * USA
24  *
25  */
26
27 #include <linux/module.h>
28 #include <linux/hardirq.h>
29 #include <linux/types.h>
30 #include <linux/crypto.h>
31 #include <linux/err.h>
32 #include <crypto/algapi.h>
33 #include <crypto/serpent.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
39 #include <asm/xcr.h>
40 #include <asm/xsave.h>
41 #include <asm/crypto/serpent-avx.h>
42 #include <asm/crypto/ablk_helper.h>
43 #include <asm/crypto/glue_helper.h>
44
45 static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
46 {
47         be128 ctrblk;
48
49         le128_to_be128(&ctrblk, iv);
50         le128_inc(iv);
51
52         __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
53         u128_xor(dst, src, (u128 *)&ctrblk);
54 }
55
56 static const struct common_glue_ctx serpent_enc = {
57         .num_funcs = 2,
58         .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
59
60         .funcs = { {
61                 .num_blocks = SERPENT_PARALLEL_BLOCKS,
62                 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
63         }, {
64                 .num_blocks = 1,
65                 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
66         } }
67 };
68
69 static const struct common_glue_ctx serpent_ctr = {
70         .num_funcs = 2,
71         .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
72
73         .funcs = { {
74                 .num_blocks = SERPENT_PARALLEL_BLOCKS,
75                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
76         }, {
77                 .num_blocks = 1,
78                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr) }
79         } }
80 };
81
82 static const struct common_glue_ctx serpent_dec = {
83         .num_funcs = 2,
84         .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
85
86         .funcs = { {
87                 .num_blocks = SERPENT_PARALLEL_BLOCKS,
88                 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
89         }, {
90                 .num_blocks = 1,
91                 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
92         } }
93 };
94
95 static const struct common_glue_ctx serpent_dec_cbc = {
96         .num_funcs = 2,
97         .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
98
99         .funcs = { {
100                 .num_blocks = SERPENT_PARALLEL_BLOCKS,
101                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
102         }, {
103                 .num_blocks = 1,
104                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
105         } }
106 };
107
108 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
109                        struct scatterlist *src, unsigned int nbytes)
110 {
111         return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
112 }
113
114 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
115                        struct scatterlist *src, unsigned int nbytes)
116 {
117         return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
118 }
119
120 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
121                        struct scatterlist *src, unsigned int nbytes)
122 {
123         return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
124                                      dst, src, nbytes);
125 }
126
127 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
128                        struct scatterlist *src, unsigned int nbytes)
129 {
130         return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
131                                        nbytes);
132 }
133
134 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
135                      struct scatterlist *src, unsigned int nbytes)
136 {
137         return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
138 }
139
140 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
141 {
142         return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
143                               NULL, fpu_enabled, nbytes);
144 }
145
146 static inline void serpent_fpu_end(bool fpu_enabled)
147 {
148         glue_fpu_end(fpu_enabled);
149 }
150
151 struct crypt_priv {
152         struct serpent_ctx *ctx;
153         bool fpu_enabled;
154 };
155
156 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
157 {
158         const unsigned int bsize = SERPENT_BLOCK_SIZE;
159         struct crypt_priv *ctx = priv;
160         int i;
161
162         ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
163
164         if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
165                 serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
166                 return;
167         }
168
169         for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
170                 __serpent_encrypt(ctx->ctx, srcdst, srcdst);
171 }
172
173 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
174 {
175         const unsigned int bsize = SERPENT_BLOCK_SIZE;
176         struct crypt_priv *ctx = priv;
177         int i;
178
179         ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
180
181         if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
182                 serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
183                 return;
184         }
185
186         for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
187                 __serpent_decrypt(ctx->ctx, srcdst, srcdst);
188 }
189
190 struct serpent_lrw_ctx {
191         struct lrw_table_ctx lrw_table;
192         struct serpent_ctx serpent_ctx;
193 };
194
195 static int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
196                               unsigned int keylen)
197 {
198         struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
199         int err;
200
201         err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
202                                                         SERPENT_BLOCK_SIZE);
203         if (err)
204                 return err;
205
206         return lrw_init_table(&ctx->lrw_table, key + keylen -
207                                                 SERPENT_BLOCK_SIZE);
208 }
209
210 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
211                        struct scatterlist *src, unsigned int nbytes)
212 {
213         struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
214         be128 buf[SERPENT_PARALLEL_BLOCKS];
215         struct crypt_priv crypt_ctx = {
216                 .ctx = &ctx->serpent_ctx,
217                 .fpu_enabled = false,
218         };
219         struct lrw_crypt_req req = {
220                 .tbuf = buf,
221                 .tbuflen = sizeof(buf),
222
223                 .table_ctx = &ctx->lrw_table,
224                 .crypt_ctx = &crypt_ctx,
225                 .crypt_fn = encrypt_callback,
226         };
227         int ret;
228
229         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
230         ret = lrw_crypt(desc, dst, src, nbytes, &req);
231         serpent_fpu_end(crypt_ctx.fpu_enabled);
232
233         return ret;
234 }
235
236 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
237                        struct scatterlist *src, unsigned int nbytes)
238 {
239         struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
240         be128 buf[SERPENT_PARALLEL_BLOCKS];
241         struct crypt_priv crypt_ctx = {
242                 .ctx = &ctx->serpent_ctx,
243                 .fpu_enabled = false,
244         };
245         struct lrw_crypt_req req = {
246                 .tbuf = buf,
247                 .tbuflen = sizeof(buf),
248
249                 .table_ctx = &ctx->lrw_table,
250                 .crypt_ctx = &crypt_ctx,
251                 .crypt_fn = decrypt_callback,
252         };
253         int ret;
254
255         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
256         ret = lrw_crypt(desc, dst, src, nbytes, &req);
257         serpent_fpu_end(crypt_ctx.fpu_enabled);
258
259         return ret;
260 }
261
262 static void lrw_exit_tfm(struct crypto_tfm *tfm)
263 {
264         struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
265
266         lrw_free_table(&ctx->lrw_table);
267 }
268
269 struct serpent_xts_ctx {
270         struct serpent_ctx tweak_ctx;
271         struct serpent_ctx crypt_ctx;
272 };
273
274 static int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
275                               unsigned int keylen)
276 {
277         struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
278         u32 *flags = &tfm->crt_flags;
279         int err;
280
281         /* key consists of keys of equal size concatenated, therefore
282          * the length must be even
283          */
284         if (keylen % 2) {
285                 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
286                 return -EINVAL;
287         }
288
289         /* first half of xts-key is for crypt */
290         err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
291         if (err)
292                 return err;
293
294         /* second half of xts-key is for tweak */
295         return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
296 }
297
298 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
299                        struct scatterlist *src, unsigned int nbytes)
300 {
301         struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302         be128 buf[SERPENT_PARALLEL_BLOCKS];
303         struct crypt_priv crypt_ctx = {
304                 .ctx = &ctx->crypt_ctx,
305                 .fpu_enabled = false,
306         };
307         struct xts_crypt_req req = {
308                 .tbuf = buf,
309                 .tbuflen = sizeof(buf),
310
311                 .tweak_ctx = &ctx->tweak_ctx,
312                 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
313                 .crypt_ctx = &crypt_ctx,
314                 .crypt_fn = encrypt_callback,
315         };
316         int ret;
317
318         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
319         ret = xts_crypt(desc, dst, src, nbytes, &req);
320         serpent_fpu_end(crypt_ctx.fpu_enabled);
321
322         return ret;
323 }
324
325 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
326                        struct scatterlist *src, unsigned int nbytes)
327 {
328         struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
329         be128 buf[SERPENT_PARALLEL_BLOCKS];
330         struct crypt_priv crypt_ctx = {
331                 .ctx = &ctx->crypt_ctx,
332                 .fpu_enabled = false,
333         };
334         struct xts_crypt_req req = {
335                 .tbuf = buf,
336                 .tbuflen = sizeof(buf),
337
338                 .tweak_ctx = &ctx->tweak_ctx,
339                 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
340                 .crypt_ctx = &crypt_ctx,
341                 .crypt_fn = decrypt_callback,
342         };
343         int ret;
344
345         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
346         ret = xts_crypt(desc, dst, src, nbytes, &req);
347         serpent_fpu_end(crypt_ctx.fpu_enabled);
348
349         return ret;
350 }
351
352 static struct crypto_alg serpent_algs[10] = { {
353         .cra_name               = "__ecb-serpent-avx",
354         .cra_driver_name        = "__driver-ecb-serpent-avx",
355         .cra_priority           = 0,
356         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
357         .cra_blocksize          = SERPENT_BLOCK_SIZE,
358         .cra_ctxsize            = sizeof(struct serpent_ctx),
359         .cra_alignmask          = 0,
360         .cra_type               = &crypto_blkcipher_type,
361         .cra_module             = THIS_MODULE,
362         .cra_u = {
363                 .blkcipher = {
364                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
365                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
366                         .setkey         = serpent_setkey,
367                         .encrypt        = ecb_encrypt,
368                         .decrypt        = ecb_decrypt,
369                 },
370         },
371 }, {
372         .cra_name               = "__cbc-serpent-avx",
373         .cra_driver_name        = "__driver-cbc-serpent-avx",
374         .cra_priority           = 0,
375         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
376         .cra_blocksize          = SERPENT_BLOCK_SIZE,
377         .cra_ctxsize            = sizeof(struct serpent_ctx),
378         .cra_alignmask          = 0,
379         .cra_type               = &crypto_blkcipher_type,
380         .cra_module             = THIS_MODULE,
381         .cra_u = {
382                 .blkcipher = {
383                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
384                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
385                         .setkey         = serpent_setkey,
386                         .encrypt        = cbc_encrypt,
387                         .decrypt        = cbc_decrypt,
388                 },
389         },
390 }, {
391         .cra_name               = "__ctr-serpent-avx",
392         .cra_driver_name        = "__driver-ctr-serpent-avx",
393         .cra_priority           = 0,
394         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
395         .cra_blocksize          = 1,
396         .cra_ctxsize            = sizeof(struct serpent_ctx),
397         .cra_alignmask          = 0,
398         .cra_type               = &crypto_blkcipher_type,
399         .cra_module             = THIS_MODULE,
400         .cra_u = {
401                 .blkcipher = {
402                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
403                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
404                         .ivsize         = SERPENT_BLOCK_SIZE,
405                         .setkey         = serpent_setkey,
406                         .encrypt        = ctr_crypt,
407                         .decrypt        = ctr_crypt,
408                 },
409         },
410 }, {
411         .cra_name               = "__lrw-serpent-avx",
412         .cra_driver_name        = "__driver-lrw-serpent-avx",
413         .cra_priority           = 0,
414         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
415         .cra_blocksize          = SERPENT_BLOCK_SIZE,
416         .cra_ctxsize            = sizeof(struct serpent_lrw_ctx),
417         .cra_alignmask          = 0,
418         .cra_type               = &crypto_blkcipher_type,
419         .cra_module             = THIS_MODULE,
420         .cra_exit               = lrw_exit_tfm,
421         .cra_u = {
422                 .blkcipher = {
423                         .min_keysize    = SERPENT_MIN_KEY_SIZE +
424                                           SERPENT_BLOCK_SIZE,
425                         .max_keysize    = SERPENT_MAX_KEY_SIZE +
426                                           SERPENT_BLOCK_SIZE,
427                         .ivsize         = SERPENT_BLOCK_SIZE,
428                         .setkey         = lrw_serpent_setkey,
429                         .encrypt        = lrw_encrypt,
430                         .decrypt        = lrw_decrypt,
431                 },
432         },
433 }, {
434         .cra_name               = "__xts-serpent-avx",
435         .cra_driver_name        = "__driver-xts-serpent-avx",
436         .cra_priority           = 0,
437         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
438         .cra_blocksize          = SERPENT_BLOCK_SIZE,
439         .cra_ctxsize            = sizeof(struct serpent_xts_ctx),
440         .cra_alignmask          = 0,
441         .cra_type               = &crypto_blkcipher_type,
442         .cra_module             = THIS_MODULE,
443         .cra_u = {
444                 .blkcipher = {
445                         .min_keysize    = SERPENT_MIN_KEY_SIZE * 2,
446                         .max_keysize    = SERPENT_MAX_KEY_SIZE * 2,
447                         .ivsize         = SERPENT_BLOCK_SIZE,
448                         .setkey         = xts_serpent_setkey,
449                         .encrypt        = xts_encrypt,
450                         .decrypt        = xts_decrypt,
451                 },
452         },
453 }, {
454         .cra_name               = "ecb(serpent)",
455         .cra_driver_name        = "ecb-serpent-avx",
456         .cra_priority           = 500,
457         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
458         .cra_blocksize          = SERPENT_BLOCK_SIZE,
459         .cra_ctxsize            = sizeof(struct async_helper_ctx),
460         .cra_alignmask          = 0,
461         .cra_type               = &crypto_ablkcipher_type,
462         .cra_module             = THIS_MODULE,
463         .cra_init               = ablk_init,
464         .cra_exit               = ablk_exit,
465         .cra_u = {
466                 .ablkcipher = {
467                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
468                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
469                         .setkey         = ablk_set_key,
470                         .encrypt        = ablk_encrypt,
471                         .decrypt        = ablk_decrypt,
472                 },
473         },
474 }, {
475         .cra_name               = "cbc(serpent)",
476         .cra_driver_name        = "cbc-serpent-avx",
477         .cra_priority           = 500,
478         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
479         .cra_blocksize          = SERPENT_BLOCK_SIZE,
480         .cra_ctxsize            = sizeof(struct async_helper_ctx),
481         .cra_alignmask          = 0,
482         .cra_type               = &crypto_ablkcipher_type,
483         .cra_module             = THIS_MODULE,
484         .cra_init               = ablk_init,
485         .cra_exit               = ablk_exit,
486         .cra_u = {
487                 .ablkcipher = {
488                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
489                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
490                         .ivsize         = SERPENT_BLOCK_SIZE,
491                         .setkey         = ablk_set_key,
492                         .encrypt        = __ablk_encrypt,
493                         .decrypt        = ablk_decrypt,
494                 },
495         },
496 }, {
497         .cra_name               = "ctr(serpent)",
498         .cra_driver_name        = "ctr-serpent-avx",
499         .cra_priority           = 500,
500         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
501         .cra_blocksize          = 1,
502         .cra_ctxsize            = sizeof(struct async_helper_ctx),
503         .cra_alignmask          = 0,
504         .cra_type               = &crypto_ablkcipher_type,
505         .cra_module             = THIS_MODULE,
506         .cra_init               = ablk_init,
507         .cra_exit               = ablk_exit,
508         .cra_u = {
509                 .ablkcipher = {
510                         .min_keysize    = SERPENT_MIN_KEY_SIZE,
511                         .max_keysize    = SERPENT_MAX_KEY_SIZE,
512                         .ivsize         = SERPENT_BLOCK_SIZE,
513                         .setkey         = ablk_set_key,
514                         .encrypt        = ablk_encrypt,
515                         .decrypt        = ablk_encrypt,
516                         .geniv          = "chainiv",
517                 },
518         },
519 }, {
520         .cra_name               = "lrw(serpent)",
521         .cra_driver_name        = "lrw-serpent-avx",
522         .cra_priority           = 500,
523         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
524         .cra_blocksize          = SERPENT_BLOCK_SIZE,
525         .cra_ctxsize            = sizeof(struct async_helper_ctx),
526         .cra_alignmask          = 0,
527         .cra_type               = &crypto_ablkcipher_type,
528         .cra_module             = THIS_MODULE,
529         .cra_init               = ablk_init,
530         .cra_exit               = ablk_exit,
531         .cra_u = {
532                 .ablkcipher = {
533                         .min_keysize    = SERPENT_MIN_KEY_SIZE +
534                                           SERPENT_BLOCK_SIZE,
535                         .max_keysize    = SERPENT_MAX_KEY_SIZE +
536                                           SERPENT_BLOCK_SIZE,
537                         .ivsize         = SERPENT_BLOCK_SIZE,
538                         .setkey         = ablk_set_key,
539                         .encrypt        = ablk_encrypt,
540                         .decrypt        = ablk_decrypt,
541                 },
542         },
543 }, {
544         .cra_name               = "xts(serpent)",
545         .cra_driver_name        = "xts-serpent-avx",
546         .cra_priority           = 500,
547         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
548         .cra_blocksize          = SERPENT_BLOCK_SIZE,
549         .cra_ctxsize            = sizeof(struct async_helper_ctx),
550         .cra_alignmask          = 0,
551         .cra_type               = &crypto_ablkcipher_type,
552         .cra_module             = THIS_MODULE,
553         .cra_init               = ablk_init,
554         .cra_exit               = ablk_exit,
555         .cra_u = {
556                 .ablkcipher = {
557                         .min_keysize    = SERPENT_MIN_KEY_SIZE * 2,
558                         .max_keysize    = SERPENT_MAX_KEY_SIZE * 2,
559                         .ivsize         = SERPENT_BLOCK_SIZE,
560                         .setkey         = ablk_set_key,
561                         .encrypt        = ablk_encrypt,
562                         .decrypt        = ablk_decrypt,
563                 },
564         },
565 } };
566
567 static int __init serpent_init(void)
568 {
569         u64 xcr0;
570
571         if (!cpu_has_avx || !cpu_has_osxsave) {
572                 printk(KERN_INFO "AVX instructions are not detected.\n");
573                 return -ENODEV;
574         }
575
576         xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
577         if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
578                 printk(KERN_INFO "AVX detected but unusable.\n");
579                 return -ENODEV;
580         }
581
582         return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
583 }
584
585 static void __exit serpent_exit(void)
586 {
587         crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
588 }
589
590 module_init(serpent_init);
591 module_exit(serpent_exit);
592
593 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
594 MODULE_LICENSE("GPL");
595 MODULE_ALIAS("serpent");