~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/arm64/crypto/aes-glue.c

Version: ~ [ linux-5.4-rc3 ] ~ [ linux-5.3.6 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.79 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.149 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.196 ] ~ [ linux-4.8.17 ] ~ [ linux-4.7.10 ] ~ [ linux-4.6.7 ] ~ [ linux-4.5.7 ] ~ [ linux-4.4.196 ] ~ [ linux-4.3.6 ] ~ [ linux-4.2.8 ] ~ [ linux-4.1.52 ] ~ [ linux-4.0.9 ] ~ [ linux-3.19.8 ] ~ [ linux-3.18.140 ] ~ [ linux-3.17.8 ] ~ [ linux-3.16.75 ] ~ [ linux-3.15.10 ] ~ [ linux-3.14.79 ] ~ [ linux-3.13.11 ] ~ [ linux-3.12.74 ] ~ [ linux-3.11.10 ] ~ [ linux-3.10.108 ] ~ [ linux-3.9.11 ] ~ [ linux-3.8.13 ] ~ [ linux-3.7.10 ] ~ [ linux-3.6.11 ] ~ [ linux-3.5.7 ] ~ [ linux-3.4.113 ] ~ [ linux-3.3.8 ] ~ [ linux-3.2.102 ] ~ [ linux-3.1.10 ] ~ [ linux-3.0.101 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.5 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /*
  2  * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
  3  *
  4  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  5  *
  6  * This program is free software; you can redistribute it and/or modify
  7  * it under the terms of the GNU General Public License version 2 as
  8  * published by the Free Software Foundation.
  9  */
 10 
 11 #include <asm/neon.h>
 12 #include <asm/hwcap.h>
 13 #include <asm/simd.h>
 14 #include <crypto/aes.h>
 15 #include <crypto/internal/hash.h>
 16 #include <crypto/internal/simd.h>
 17 #include <crypto/internal/skcipher.h>
 18 #include <linux/module.h>
 19 #include <linux/cpufeature.h>
 20 #include <crypto/xts.h>
 21 
 22 #include "aes-ce-setkey.h"
 23 #include "aes-ctr-fallback.h"
 24 
 25 #ifdef USE_V8_CRYPTO_EXTENSIONS
 26 #define MODE                    "ce"
 27 #define PRIO                    300
 28 #define aes_setkey              ce_aes_setkey
 29 #define aes_expandkey           ce_aes_expandkey
 30 #define aes_ecb_encrypt         ce_aes_ecb_encrypt
 31 #define aes_ecb_decrypt         ce_aes_ecb_decrypt
 32 #define aes_cbc_encrypt         ce_aes_cbc_encrypt
 33 #define aes_cbc_decrypt         ce_aes_cbc_decrypt
 34 #define aes_ctr_encrypt         ce_aes_ctr_encrypt
 35 #define aes_xts_encrypt         ce_aes_xts_encrypt
 36 #define aes_xts_decrypt         ce_aes_xts_decrypt
 37 #define aes_mac_update          ce_aes_mac_update
 38 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
 39 #else
 40 #define MODE                    "neon"
 41 #define PRIO                    200
 42 #define aes_setkey              crypto_aes_set_key
 43 #define aes_expandkey           crypto_aes_expand_key
 44 #define aes_ecb_encrypt         neon_aes_ecb_encrypt
 45 #define aes_ecb_decrypt         neon_aes_ecb_decrypt
 46 #define aes_cbc_encrypt         neon_aes_cbc_encrypt
 47 #define aes_cbc_decrypt         neon_aes_cbc_decrypt
 48 #define aes_ctr_encrypt         neon_aes_ctr_encrypt
 49 #define aes_xts_encrypt         neon_aes_xts_encrypt
 50 #define aes_xts_decrypt         neon_aes_xts_decrypt
 51 #define aes_mac_update          neon_aes_mac_update
 52 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
 53 MODULE_ALIAS_CRYPTO("ecb(aes)");
 54 MODULE_ALIAS_CRYPTO("cbc(aes)");
 55 MODULE_ALIAS_CRYPTO("ctr(aes)");
 56 MODULE_ALIAS_CRYPTO("xts(aes)");
 57 MODULE_ALIAS_CRYPTO("cmac(aes)");
 58 MODULE_ALIAS_CRYPTO("xcbc(aes)");
 59 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
 60 #endif
 61 
 62 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
 63 MODULE_LICENSE("GPL v2");
 64 
 65 /* defined in aes-modes.S */
 66 asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
 67                                 int rounds, int blocks, int first);
 68 asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
 69                                 int rounds, int blocks, int first);
 70 
 71 asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
 72                                 int rounds, int blocks, u8 iv[], int first);
 73 asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
 74                                 int rounds, int blocks, u8 iv[], int first);
 75 
 76 asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
 77                                 int rounds, int blocks, u8 ctr[], int first);
 78 
 79 asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
 80                                 int rounds, int blocks, u8 const rk2[], u8 iv[],
 81                                 int first);
 82 asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
 83                                 int rounds, int blocks, u8 const rk2[], u8 iv[],
 84                                 int first);
 85 
 86 asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
 87                                int blocks, u8 dg[], int enc_before,
 88                                int enc_after);
 89 
 90 struct crypto_aes_xts_ctx {
 91         struct crypto_aes_ctx key1;
 92         struct crypto_aes_ctx __aligned(8) key2;
 93 };
 94 
 95 struct mac_tfm_ctx {
 96         struct crypto_aes_ctx key;
 97         u8 __aligned(8) consts[];
 98 };
 99 
100 struct mac_desc_ctx {
101         unsigned int len;
102         u8 dg[AES_BLOCK_SIZE];
103 };
104 
105 static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
106                                unsigned int key_len)
107 {
108         return aes_setkey(crypto_skcipher_tfm(tfm), in_key, key_len);
109 }
110 
111 static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
112                        unsigned int key_len)
113 {
114         struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
115         int ret;
116 
117         ret = xts_verify_key(tfm, in_key, key_len);
118         if (ret)
119                 return ret;
120 
121         ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
122         if (!ret)
123                 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
124                                     key_len / 2);
125         if (!ret)
126                 return 0;
127 
128         crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
129         return -EINVAL;
130 }
131 
132 static int ecb_encrypt(struct skcipher_request *req)
133 {
134         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
135         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
136         int err, first, rounds = 6 + ctx->key_length / 4;
137         struct skcipher_walk walk;
138         unsigned int blocks;
139 
140         err = skcipher_walk_virt(&walk, req, true);
141 
142         kernel_neon_begin();
143         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
144                 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
145                                 (u8 *)ctx->key_enc, rounds, blocks, first);
146                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
147         }
148         kernel_neon_end();
149         return err;
150 }
151 
152 static int ecb_decrypt(struct skcipher_request *req)
153 {
154         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
155         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
156         int err, first, rounds = 6 + ctx->key_length / 4;
157         struct skcipher_walk walk;
158         unsigned int blocks;
159 
160         err = skcipher_walk_virt(&walk, req, true);
161 
162         kernel_neon_begin();
163         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
164                 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
165                                 (u8 *)ctx->key_dec, rounds, blocks, first);
166                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
167         }
168         kernel_neon_end();
169         return err;
170 }
171 
172 static int cbc_encrypt(struct skcipher_request *req)
173 {
174         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
175         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
176         int err, first, rounds = 6 + ctx->key_length / 4;
177         struct skcipher_walk walk;
178         unsigned int blocks;
179 
180         err = skcipher_walk_virt(&walk, req, true);
181 
182         kernel_neon_begin();
183         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
184                 aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
185                                 (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
186                                 first);
187                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
188         }
189         kernel_neon_end();
190         return err;
191 }
192 
193 static int cbc_decrypt(struct skcipher_request *req)
194 {
195         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
196         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
197         int err, first, rounds = 6 + ctx->key_length / 4;
198         struct skcipher_walk walk;
199         unsigned int blocks;
200 
201         err = skcipher_walk_virt(&walk, req, true);
202 
203         kernel_neon_begin();
204         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
205                 aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
206                                 (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
207                                 first);
208                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
209         }
210         kernel_neon_end();
211         return err;
212 }
213 
214 static int ctr_encrypt(struct skcipher_request *req)
215 {
216         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
217         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
218         int err, first, rounds = 6 + ctx->key_length / 4;
219         struct skcipher_walk walk;
220         int blocks;
221 
222         err = skcipher_walk_virt(&walk, req, true);
223 
224         first = 1;
225         kernel_neon_begin();
226         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
227                 aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
228                                 (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
229                                 first);
230                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
231                 first = 0;
232         }
233         if (walk.nbytes) {
234                 u8 __aligned(8) tail[AES_BLOCK_SIZE];
235                 unsigned int nbytes = walk.nbytes;
236                 u8 *tdst = walk.dst.virt.addr;
237                 u8 *tsrc = walk.src.virt.addr;
238 
239                 /*
240                  * Tell aes_ctr_encrypt() to process a tail block.
241                  */
242                 blocks = -1;
243 
244                 aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
245                                 blocks, walk.iv, first);
246                 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
247                 err = skcipher_walk_done(&walk, 0);
248         }
249         kernel_neon_end();
250 
251         return err;
252 }
253 
254 static int ctr_encrypt_sync(struct skcipher_request *req)
255 {
256         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
257         struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
258 
259         if (!may_use_simd())
260                 return aes_ctr_encrypt_fallback(ctx, req);
261 
262         return ctr_encrypt(req);
263 }
264 
265 static int xts_encrypt(struct skcipher_request *req)
266 {
267         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
268         struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
269         int err, first, rounds = 6 + ctx->key1.key_length / 4;
270         struct skcipher_walk walk;
271         unsigned int blocks;
272 
273         err = skcipher_walk_virt(&walk, req, true);
274 
275         kernel_neon_begin();
276         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
277                 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
278                                 (u8 *)ctx->key1.key_enc, rounds, blocks,
279                                 (u8 *)ctx->key2.key_enc, walk.iv, first);
280                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
281         }
282         kernel_neon_end();
283 
284         return err;
285 }
286 
287 static int xts_decrypt(struct skcipher_request *req)
288 {
289         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
290         struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
291         int err, first, rounds = 6 + ctx->key1.key_length / 4;
292         struct skcipher_walk walk;
293         unsigned int blocks;
294 
295         err = skcipher_walk_virt(&walk, req, true);
296 
297         kernel_neon_begin();
298         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
299                 aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
300                                 (u8 *)ctx->key1.key_dec, rounds, blocks,
301                                 (u8 *)ctx->key2.key_enc, walk.iv, first);
302                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
303         }
304         kernel_neon_end();
305 
306         return err;
307 }
308 
309 static struct skcipher_alg aes_algs[] = { {
310         .base = {
311                 .cra_name               = "__ecb(aes)",
312                 .cra_driver_name        = "__ecb-aes-" MODE,
313                 .cra_priority           = PRIO,
314                 .cra_flags              = CRYPTO_ALG_INTERNAL,
315                 .cra_blocksize          = AES_BLOCK_SIZE,
316                 .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
317                 .cra_module             = THIS_MODULE,
318         },
319         .min_keysize    = AES_MIN_KEY_SIZE,
320         .max_keysize    = AES_MAX_KEY_SIZE,
321         .setkey         = skcipher_aes_setkey,
322         .encrypt        = ecb_encrypt,
323         .decrypt        = ecb_decrypt,
324 }, {
325         .base = {
326                 .cra_name               = "__cbc(aes)",
327                 .cra_driver_name        = "__cbc-aes-" MODE,
328                 .cra_priority           = PRIO,
329                 .cra_flags              = CRYPTO_ALG_INTERNAL,
330                 .cra_blocksize          = AES_BLOCK_SIZE,
331                 .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
332                 .cra_module             = THIS_MODULE,
333         },
334         .min_keysize    = AES_MIN_KEY_SIZE,
335         .max_keysize    = AES_MAX_KEY_SIZE,
336         .ivsize         = AES_BLOCK_SIZE,
337         .setkey         = skcipher_aes_setkey,
338         .encrypt        = cbc_encrypt,
339         .decrypt        = cbc_decrypt,
340 }, {
341         .base = {
342                 .cra_name               = "__ctr(aes)",
343                 .cra_driver_name        = "__ctr-aes-" MODE,
344                 .cra_priority           = PRIO,
345                 .cra_flags              = CRYPTO_ALG_INTERNAL,
346                 .cra_blocksize          = 1,
347                 .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
348                 .cra_module             = THIS_MODULE,
349         },
350         .min_keysize    = AES_MIN_KEY_SIZE,
351         .max_keysize    = AES_MAX_KEY_SIZE,
352         .ivsize         = AES_BLOCK_SIZE,
353         .chunksize      = AES_BLOCK_SIZE,
354         .setkey         = skcipher_aes_setkey,
355         .encrypt        = ctr_encrypt,
356         .decrypt        = ctr_encrypt,
357 }, {
358         .base = {
359                 .cra_name               = "ctr(aes)",
360                 .cra_driver_name        = "ctr-aes-" MODE,
361                 .cra_priority           = PRIO - 1,
362                 .cra_blocksize          = 1,
363                 .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
364                 .cra_module             = THIS_MODULE,
365         },
366         .min_keysize    = AES_MIN_KEY_SIZE,
367         .max_keysize    = AES_MAX_KEY_SIZE,
368         .ivsize         = AES_BLOCK_SIZE,
369         .chunksize      = AES_BLOCK_SIZE,
370         .setkey         = skcipher_aes_setkey,
371         .encrypt        = ctr_encrypt_sync,
372         .decrypt        = ctr_encrypt_sync,
373 }, {
374         .base = {
375                 .cra_name               = "__xts(aes)",
376                 .cra_driver_name        = "__xts-aes-" MODE,
377                 .cra_priority           = PRIO,
378                 .cra_flags              = CRYPTO_ALG_INTERNAL,
379                 .cra_blocksize          = AES_BLOCK_SIZE,
380                 .cra_ctxsize            = sizeof(struct crypto_aes_xts_ctx),
381                 .cra_module             = THIS_MODULE,
382         },
383         .min_keysize    = 2 * AES_MIN_KEY_SIZE,
384         .max_keysize    = 2 * AES_MAX_KEY_SIZE,
385         .ivsize         = AES_BLOCK_SIZE,
386         .setkey         = xts_set_key,
387         .encrypt        = xts_encrypt,
388         .decrypt        = xts_decrypt,
389 } };
390 
391 static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
392                          unsigned int key_len)
393 {
394         struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
395         int err;
396 
397         err = aes_expandkey(&ctx->key, in_key, key_len);
398         if (err)
399                 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
400 
401         return err;
402 }
403 
404 static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
405 {
406         u64 a = be64_to_cpu(x->a);
407         u64 b = be64_to_cpu(x->b);
408 
409         y->a = cpu_to_be64((a << 1) | (b >> 63));
410         y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
411 }
412 
413 static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
414                        unsigned int key_len)
415 {
416         struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
417         be128 *consts = (be128 *)ctx->consts;
418         u8 *rk = (u8 *)ctx->key.key_enc;
419         int rounds = 6 + key_len / 4;
420         int err;
421 
422         err = cbcmac_setkey(tfm, in_key, key_len);
423         if (err)
424                 return err;
425 
426         /* encrypt the zero vector */
427         kernel_neon_begin();
428         aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1, 1);
429         kernel_neon_end();
430 
431         cmac_gf128_mul_by_x(consts, consts);
432         cmac_gf128_mul_by_x(consts + 1, consts);
433 
434         return 0;
435 }
436 
437 static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
438                        unsigned int key_len)
439 {
440         static u8 const ks[3][AES_BLOCK_SIZE] = {
441                 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
442                 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
443                 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
444         };
445 
446         struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
447         u8 *rk = (u8 *)ctx->key.key_enc;
448         int rounds = 6 + key_len / 4;
449         u8 key[AES_BLOCK_SIZE];
450         int err;
451 
452         err = cbcmac_setkey(tfm, in_key, key_len);
453         if (err)
454                 return err;
455 
456         kernel_neon_begin();
457         aes_ecb_encrypt(key, ks[0], rk, rounds, 1, 1);
458         aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2, 0);
459         kernel_neon_end();
460 
461         return cbcmac_setkey(tfm, key, sizeof(key));
462 }
463 
464 static int mac_init(struct shash_desc *desc)
465 {
466         struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
467 
468         memset(ctx->dg, 0, AES_BLOCK_SIZE);
469         ctx->len = 0;
470 
471         return 0;
472 }
473 
474 static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
475                           u8 dg[], int enc_before, int enc_after)
476 {
477         int rounds = 6 + ctx->key_length / 4;
478 
479         if (may_use_simd()) {
480                 kernel_neon_begin();
481                 aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
482                                enc_after);
483                 kernel_neon_end();
484         } else {
485                 if (enc_before)
486                         __aes_arm64_encrypt(ctx->key_enc, dg, dg, rounds);
487 
488                 while (blocks--) {
489                         crypto_xor(dg, in, AES_BLOCK_SIZE);
490                         in += AES_BLOCK_SIZE;
491 
492                         if (blocks || enc_after)
493                                 __aes_arm64_encrypt(ctx->key_enc, dg, dg,
494                                                     rounds);
495                 }
496         }
497 }
498 
499 static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
500 {
501         struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
502         struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
503 
504         while (len > 0) {
505                 unsigned int l;
506 
507                 if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
508                     (ctx->len + len) > AES_BLOCK_SIZE) {
509 
510                         int blocks = len / AES_BLOCK_SIZE;
511 
512                         len %= AES_BLOCK_SIZE;
513 
514                         mac_do_update(&tctx->key, p, blocks, ctx->dg,
515                                       (ctx->len != 0), (len != 0));
516 
517                         p += blocks * AES_BLOCK_SIZE;
518 
519                         if (!len) {
520                                 ctx->len = AES_BLOCK_SIZE;
521                                 break;
522                         }
523                         ctx->len = 0;
524                 }
525 
526                 l = min(len, AES_BLOCK_SIZE - ctx->len);
527 
528                 if (l <= AES_BLOCK_SIZE) {
529                         crypto_xor(ctx->dg + ctx->len, p, l);
530                         ctx->len += l;
531                         len -= l;
532                         p += l;
533                 }
534         }
535 
536         return 0;
537 }
538 
539 static int cbcmac_final(struct shash_desc *desc, u8 *out)
540 {
541         struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
542         struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
543 
544         mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1, 0);
545 
546         memcpy(out, ctx->dg, AES_BLOCK_SIZE);
547 
548         return 0;
549 }
550 
551 static int cmac_final(struct shash_desc *desc, u8 *out)
552 {
553         struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
554         struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
555         u8 *consts = tctx->consts;
556 
557         if (ctx->len != AES_BLOCK_SIZE) {
558                 ctx->dg[ctx->len] ^= 0x80;
559                 consts += AES_BLOCK_SIZE;
560         }
561 
562         mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
563 
564         memcpy(out, ctx->dg, AES_BLOCK_SIZE);
565 
566         return 0;
567 }
568 
569 static struct shash_alg mac_algs[] = { {
570         .base.cra_name          = "cmac(aes)",
571         .base.cra_driver_name   = "cmac-aes-" MODE,
572         .base.cra_priority      = PRIO,
573         .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
574         .base.cra_blocksize     = AES_BLOCK_SIZE,
575         .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx) +
576                                   2 * AES_BLOCK_SIZE,
577         .base.cra_module        = THIS_MODULE,
578 
579         .digestsize             = AES_BLOCK_SIZE,
580         .init                   = mac_init,
581         .update                 = mac_update,
582         .final                  = cmac_final,
583         .setkey                 = cmac_setkey,
584         .descsize               = sizeof(struct mac_desc_ctx),
585 }, {
586         .base.cra_name          = "xcbc(aes)",
587         .base.cra_driver_name   = "xcbc-aes-" MODE,
588         .base.cra_priority      = PRIO,
589         .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
590         .base.cra_blocksize     = AES_BLOCK_SIZE,
591         .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx) +
592                                   2 * AES_BLOCK_SIZE,
593         .base.cra_module        = THIS_MODULE,
594 
595         .digestsize             = AES_BLOCK_SIZE,
596         .init                   = mac_init,
597         .update                 = mac_update,
598         .final                  = cmac_final,
599         .setkey                 = xcbc_setkey,
600         .descsize               = sizeof(struct mac_desc_ctx),
601 }, {
602         .base.cra_name          = "cbcmac(aes)",
603         .base.cra_driver_name   = "cbcmac-aes-" MODE,
604         .base.cra_priority      = PRIO,
605         .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
606         .base.cra_blocksize     = 1,
607         .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx),
608         .base.cra_module        = THIS_MODULE,
609 
610         .digestsize             = AES_BLOCK_SIZE,
611         .init                   = mac_init,
612         .update                 = mac_update,
613         .final                  = cbcmac_final,
614         .setkey                 = cbcmac_setkey,
615         .descsize               = sizeof(struct mac_desc_ctx),
616 } };
617 
618 static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
619 
620 static void aes_exit(void)
621 {
622         int i;
623 
624         for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
625                 if (aes_simd_algs[i])
626                         simd_skcipher_free(aes_simd_algs[i]);
627 
628         crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
629         crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
630 }
631 
632 static int __init aes_init(void)
633 {
634         struct simd_skcipher_alg *simd;
635         const char *basename;
636         const char *algname;
637         const char *drvname;
638         int err;
639         int i;
640 
641         err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
642         if (err)
643                 return err;
644 
645         err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
646         if (err)
647                 goto unregister_ciphers;
648 
649         for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
650                 if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
651                         continue;
652 
653                 algname = aes_algs[i].base.cra_name + 2;
654                 drvname = aes_algs[i].base.cra_driver_name + 2;
655                 basename = aes_algs[i].base.cra_driver_name;
656                 simd = simd_skcipher_create_compat(algname, drvname, basename);
657                 err = PTR_ERR(simd);
658                 if (IS_ERR(simd))
659                         goto unregister_simds;
660 
661                 aes_simd_algs[i] = simd;
662         }
663 
664         return 0;
665 
666 unregister_simds:
667         aes_exit();
668 unregister_ciphers:
669         crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
670         return err;
671 }
672 
673 #ifdef USE_V8_CRYPTO_EXTENSIONS
674 module_cpu_feature_match(AES, aes_init);
675 #else
676 module_init(aes_init);
677 EXPORT_SYMBOL(neon_aes_ecb_encrypt);
678 EXPORT_SYMBOL(neon_aes_cbc_encrypt);
679 #endif
680 module_exit(aes_exit);
681 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | Wiki (Japanese) | Wiki (English) | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

osdn.jp