1 From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
2 From: Ard Biesheuvel <ardb@kernel.org>
3 Date: Fri, 8 Nov 2019 13:22:08 +0100
4 Subject: [PATCH] crypto: chacha - move existing library code into lib/crypto
6 commit 5fb8ef25803ef33e2eb60b626435828b937bed75 upstream.
8 Currently, our generic ChaCha implementation consists of a permute
9 function in lib/chacha.c that operates on the 64-byte ChaCha state
10 directly [and which is always included into the core kernel since it
11 is used by the /dev/random driver], and the crypto API plumbing to
12 expose it as a skcipher.
14 In order to support in-kernel users that need the ChaCha streamcipher
15 but have no need [or tolerance] for going through the abstractions of
16 the crypto API, let's expose the streamcipher bits via a library API
17 as well, in a way that permits the implementation to be superseded by
18 an architecture specific one if provided.
20 So move the streamcipher code into a separate module in lib/crypto,
21 and expose the init() and crypt() routines to users of the library.
23 Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
24 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
25 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
27 arch/arm/crypto/chacha-neon-glue.c | 2 +-
28 arch/arm64/crypto/chacha-neon-glue.c | 2 +-
29 arch/x86/crypto/chacha_glue.c | 2 +-
31 crypto/chacha_generic.c | 60 ++--------------------
32 include/crypto/chacha.h | 77 ++++++++++++++++++++++------
33 include/crypto/internal/chacha.h | 53 +++++++++++++++++++
35 lib/crypto/Kconfig | 26 ++++++++++
36 lib/crypto/Makefile | 4 ++
37 lib/{ => crypto}/chacha.c | 20 ++++----
38 lib/crypto/libchacha.c | 35 +++++++++++++
39 12 files changed, 199 insertions(+), 86 deletions(-)
40 create mode 100644 include/crypto/internal/chacha.h
41 rename lib/{ => crypto}/chacha.c (88%)
42 create mode 100644 lib/crypto/libchacha.c
44 --- a/arch/arm/crypto/chacha-neon-glue.c
45 +++ b/arch/arm/crypto/chacha-neon-glue.c
49 #include <crypto/algapi.h>
50 -#include <crypto/chacha.h>
51 +#include <crypto/internal/chacha.h>
52 #include <crypto/internal/simd.h>
53 #include <crypto/internal/skcipher.h>
54 #include <linux/kernel.h>
55 --- a/arch/arm64/crypto/chacha-neon-glue.c
56 +++ b/arch/arm64/crypto/chacha-neon-glue.c
60 #include <crypto/algapi.h>
61 -#include <crypto/chacha.h>
62 +#include <crypto/internal/chacha.h>
63 #include <crypto/internal/simd.h>
64 #include <crypto/internal/skcipher.h>
65 #include <linux/kernel.h>
66 --- a/arch/x86/crypto/chacha_glue.c
67 +++ b/arch/x86/crypto/chacha_glue.c
71 #include <crypto/algapi.h>
72 -#include <crypto/chacha.h>
73 +#include <crypto/internal/chacha.h>
74 #include <crypto/internal/simd.h>
75 #include <crypto/internal/skcipher.h>
76 #include <linux/kernel.h>
79 @@ -1393,6 +1393,7 @@ config CRYPTO_SALSA20
81 config CRYPTO_CHACHA20
82 tristate "ChaCha stream cipher algorithms"
83 + select CRYPTO_LIB_CHACHA_GENERIC
84 select CRYPTO_BLKCIPHER
86 The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms.
87 --- a/crypto/chacha_generic.c
88 +++ b/crypto/chacha_generic.c
91 #include <asm/unaligned.h>
92 #include <crypto/algapi.h>
93 -#include <crypto/chacha.h>
94 +#include <crypto/internal/chacha.h>
95 #include <crypto/internal/skcipher.h>
96 #include <linux/module.h>
98 -static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src,
99 - unsigned int bytes, int nrounds)
101 - /* aligned to potentially speed up crypto_xor() */
102 - u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
104 - while (bytes >= CHACHA_BLOCK_SIZE) {
105 - chacha_block(state, stream, nrounds);
106 - crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
107 - bytes -= CHACHA_BLOCK_SIZE;
108 - dst += CHACHA_BLOCK_SIZE;
109 - src += CHACHA_BLOCK_SIZE;
112 - chacha_block(state, stream, nrounds);
113 - crypto_xor_cpy(dst, src, stream, bytes);
117 static int chacha_stream_xor(struct skcipher_request *req,
118 const struct chacha_ctx *ctx, const u8 *iv)
120 @@ -48,8 +29,8 @@ static int chacha_stream_xor(struct skci
121 if (nbytes < walk.total)
122 nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
124 - chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
125 - nbytes, ctx->nrounds);
126 + chacha_crypt_generic(state, walk.dst.virt.addr,
127 + walk.src.virt.addr, nbytes, ctx->nrounds);
128 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
131 @@ -58,41 +39,10 @@ static int chacha_stream_xor(struct skci
133 void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
135 - state[0] = 0x61707865; /* "expa" */
136 - state[1] = 0x3320646e; /* "nd 3" */
137 - state[2] = 0x79622d32; /* "2-by" */
138 - state[3] = 0x6b206574; /* "te k" */
139 - state[4] = ctx->key[0];
140 - state[5] = ctx->key[1];
141 - state[6] = ctx->key[2];
142 - state[7] = ctx->key[3];
143 - state[8] = ctx->key[4];
144 - state[9] = ctx->key[5];
145 - state[10] = ctx->key[6];
146 - state[11] = ctx->key[7];
147 - state[12] = get_unaligned_le32(iv + 0);
148 - state[13] = get_unaligned_le32(iv + 4);
149 - state[14] = get_unaligned_le32(iv + 8);
150 - state[15] = get_unaligned_le32(iv + 12);
151 + chacha_init_generic(state, ctx->key, iv);
153 EXPORT_SYMBOL_GPL(crypto_chacha_init);
155 -static int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
156 - unsigned int keysize, int nrounds)
158 - struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
161 - if (keysize != CHACHA_KEY_SIZE)
164 - for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
165 - ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
167 - ctx->nrounds = nrounds;
171 int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
172 unsigned int keysize)
174 @@ -126,7 +76,7 @@ int crypto_xchacha_crypt(struct skcipher
176 /* Compute the subkey given the original key and first 128 nonce bits */
177 crypto_chacha_init(state, ctx, req->iv);
178 - hchacha_block(state, subctx.key, ctx->nrounds);
179 + hchacha_block_generic(state, subctx.key, ctx->nrounds);
180 subctx.nrounds = ctx->nrounds;
182 /* Build the real IV */
183 --- a/include/crypto/chacha.h
184 +++ b/include/crypto/chacha.h
186 #ifndef _CRYPTO_CHACHA_H
187 #define _CRYPTO_CHACHA_H
189 -#include <crypto/skcipher.h>
190 +#include <asm/unaligned.h>
191 #include <linux/types.h>
192 -#include <linux/crypto.h>
194 /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
195 #define CHACHA_IV_SIZE 16
197 /* 192-bit nonce, then 64-bit stream position */
198 #define XCHACHA_IV_SIZE 32
205 -void chacha_block(u32 *state, u8 *stream, int nrounds);
206 +void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
207 static inline void chacha20_block(u32 *state, u8 *stream)
209 - chacha_block(state, stream, 20);
210 + chacha_block_generic(state, stream, 20);
212 -void hchacha_block(const u32 *in, u32 *out, int nrounds);
214 -void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
215 +void hchacha_block_arch(const u32 *state, u32 *out, int nrounds);
216 +void hchacha_block_generic(const u32 *state, u32 *out, int nrounds);
218 +static inline void hchacha_block(const u32 *state, u32 *out, int nrounds)
220 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
221 + hchacha_block_arch(state, out, nrounds);
223 + hchacha_block_generic(state, out, nrounds);
226 -int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
227 - unsigned int keysize);
228 -int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
229 - unsigned int keysize);
230 +void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
231 +static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
233 + state[0] = 0x61707865; /* "expa" */
234 + state[1] = 0x3320646e; /* "nd 3" */
235 + state[2] = 0x79622d32; /* "2-by" */
236 + state[3] = 0x6b206574; /* "te k" */
243 + state[10] = key[6];
244 + state[11] = key[7];
245 + state[12] = get_unaligned_le32(iv + 0);
246 + state[13] = get_unaligned_le32(iv + 4);
247 + state[14] = get_unaligned_le32(iv + 8);
248 + state[15] = get_unaligned_le32(iv + 12);
251 -int crypto_chacha_crypt(struct skcipher_request *req);
252 -int crypto_xchacha_crypt(struct skcipher_request *req);
253 +static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
255 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
256 + chacha_init_arch(state, key, iv);
258 + chacha_init_generic(state, key, iv);
261 +void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
262 + unsigned int bytes, int nrounds);
263 +void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
264 + unsigned int bytes, int nrounds);
266 +static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
267 + unsigned int bytes, int nrounds)
269 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
270 + chacha_crypt_arch(state, dst, src, bytes, nrounds);
272 + chacha_crypt_generic(state, dst, src, bytes, nrounds);
275 +static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src,
276 + unsigned int bytes)
278 + chacha_crypt(state, dst, src, bytes, 20);
281 #endif /* _CRYPTO_CHACHA_H */
283 +++ b/include/crypto/internal/chacha.h
285 +/* SPDX-License-Identifier: GPL-2.0 */
287 +#ifndef _CRYPTO_INTERNAL_CHACHA_H
288 +#define _CRYPTO_INTERNAL_CHACHA_H
290 +#include <crypto/chacha.h>
291 +#include <crypto/internal/skcipher.h>
292 +#include <linux/crypto.h>
299 +void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
301 +static inline int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
302 + unsigned int keysize, int nrounds)
304 + struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
307 + if (keysize != CHACHA_KEY_SIZE)
310 + for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
311 + ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
313 + ctx->nrounds = nrounds;
317 +static inline int chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
318 + unsigned int keysize)
320 + return chacha_setkey(tfm, key, keysize, 20);
323 +static int inline chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
324 + unsigned int keysize)
326 + return chacha_setkey(tfm, key, keysize, 12);
329 +int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
330 + unsigned int keysize);
331 +int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
332 + unsigned int keysize);
334 +int crypto_chacha_crypt(struct skcipher_request *req);
335 +int crypto_xchacha_crypt(struct skcipher_request *req);
337 +#endif /* _CRYPTO_CHACHA_H */
340 @@ -26,8 +26,7 @@ endif
342 lib-y := ctype.o string.o vsprintf.o cmdline.o \
343 rbtree.o radix-tree.o timerqueue.o xarray.o \
345 - sha1.o chacha.o irq_regs.o argv_split.o \
346 + idr.o extable.o sha1.o irq_regs.o argv_split.o \
347 flex_proportions.o ratelimit.o show_mem.o \
348 is_single_threaded.o plist.o decompress.o kobject_uevent.o \
349 earlycpio.o seq_buf.o siphash.o dec_and_lock.o \
350 --- a/lib/crypto/Kconfig
351 +++ b/lib/crypto/Kconfig
352 @@ -8,6 +8,32 @@ config CRYPTO_LIB_AES
353 config CRYPTO_LIB_ARC4
356 +config CRYPTO_ARCH_HAVE_LIB_CHACHA
359 + Declares whether the architecture provides an arch-specific
360 + accelerated implementation of the ChaCha library interface,
361 + either builtin or as a module.
363 +config CRYPTO_LIB_CHACHA_GENERIC
365 + select CRYPTO_ALGAPI
367 + This symbol can be depended upon by arch implementations of the
368 + ChaCha library interface that require the generic code as a
369 + fallback, e.g., for SIMD implementations. If no arch specific
370 + implementation is enabled, this implementation serves the users
371 + of CRYPTO_LIB_CHACHA.
373 +config CRYPTO_LIB_CHACHA
374 + tristate "ChaCha library interface"
375 + depends on CRYPTO_ARCH_HAVE_LIB_CHACHA || !CRYPTO_ARCH_HAVE_LIB_CHACHA
376 + select CRYPTO_LIB_CHACHA_GENERIC if CRYPTO_ARCH_HAVE_LIB_CHACHA=n
378 + Enable the ChaCha library interface. This interface may be fulfilled
379 + by either the generic implementation or an arch-specific one, if one
380 + is available and enabled.
382 config CRYPTO_LIB_DES
385 --- a/lib/crypto/Makefile
386 +++ b/lib/crypto/Makefile
388 # SPDX-License-Identifier: GPL-2.0
390 +# chacha is used by the /dev/random driver which is always builtin
392 +obj-$(CONFIG_CRYPTO_LIB_CHACHA_GENERIC) += libchacha.o
394 obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
400 -// SPDX-License-Identifier: GPL-2.0-or-later
402 - * The "hash function" used as the core of the ChaCha stream cipher (RFC7539)
404 - * Copyright (C) 2015 Martin Willi
407 -#include <linux/kernel.h>
408 -#include <linux/export.h>
409 -#include <linux/bitops.h>
410 -#include <linux/cryptohash.h>
411 -#include <asm/unaligned.h>
412 -#include <crypto/chacha.h>
414 -static void chacha_permute(u32 *x, int nrounds)
418 - /* whitelist the allowed round counts */
419 - WARN_ON_ONCE(nrounds != 20 && nrounds != 12);
421 - for (i = 0; i < nrounds; i += 2) {
422 - x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 16);
423 - x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 16);
424 - x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 16);
425 - x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 16);
427 - x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 12);
428 - x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 12);
429 - x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 12);
430 - x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 12);
432 - x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 8);
433 - x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 8);
434 - x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 8);
435 - x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 8);
437 - x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 7);
438 - x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 7);
439 - x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 7);
440 - x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 7);
442 - x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 16);
443 - x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 16);
444 - x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 16);
445 - x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 16);
447 - x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 12);
448 - x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 12);
449 - x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 12);
450 - x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 12);
452 - x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 8);
453 - x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 8);
454 - x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 8);
455 - x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 8);
457 - x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 7);
458 - x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 7);
459 - x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 7);
460 - x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 7);
465 - * chacha_block - generate one keystream block and increment block counter
466 - * @state: input state matrix (16 32-bit words)
467 - * @stream: output keystream block (64 bytes)
468 - * @nrounds: number of rounds (20 or 12; 20 is recommended)
470 - * This is the ChaCha core, a function from 64-byte strings to 64-byte strings.
471 - * The caller has already converted the endianness of the input. This function
472 - * also handles incrementing the block counter in the input matrix.
474 -void chacha_block(u32 *state, u8 *stream, int nrounds)
479 - memcpy(x, state, 64);
481 - chacha_permute(x, nrounds);
483 - for (i = 0; i < ARRAY_SIZE(x); i++)
484 - put_unaligned_le32(x[i] + state[i], &stream[i * sizeof(u32)]);
488 -EXPORT_SYMBOL(chacha_block);
491 - * hchacha_block - abbreviated ChaCha core, for XChaCha
492 - * @in: input state matrix (16 32-bit words)
493 - * @out: output (8 32-bit words)
494 - * @nrounds: number of rounds (20 or 12; 20 is recommended)
496 - * HChaCha is the ChaCha equivalent of HSalsa and is an intermediate step
497 - * towards XChaCha (see https://cr.yp.to/snuffle/xsalsa-20081128.pdf). HChaCha
498 - * skips the final addition of the initial state, and outputs only certain words
499 - * of the state. It should not be used for streaming directly.
501 -void hchacha_block(const u32 *in, u32 *out, int nrounds)
507 - chacha_permute(x, nrounds);
509 - memcpy(&out[0], &x[0], 16);
510 - memcpy(&out[4], &x[12], 16);
512 -EXPORT_SYMBOL(hchacha_block);
514 +++ b/lib/crypto/chacha.c
516 +// SPDX-License-Identifier: GPL-2.0-or-later
518 + * The "hash function" used as the core of the ChaCha stream cipher (RFC7539)
520 + * Copyright (C) 2015 Martin Willi
523 +#include <linux/bug.h>
524 +#include <linux/kernel.h>
525 +#include <linux/export.h>
526 +#include <linux/bitops.h>
527 +#include <linux/string.h>
528 +#include <linux/cryptohash.h>
529 +#include <asm/unaligned.h>
530 +#include <crypto/chacha.h>
532 +static void chacha_permute(u32 *x, int nrounds)
536 + /* whitelist the allowed round counts */
537 + WARN_ON_ONCE(nrounds != 20 && nrounds != 12);
539 + for (i = 0; i < nrounds; i += 2) {
540 + x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 16);
541 + x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 16);
542 + x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 16);
543 + x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 16);
545 + x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 12);
546 + x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 12);
547 + x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 12);
548 + x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 12);
550 + x[0] += x[4]; x[12] = rol32(x[12] ^ x[0], 8);
551 + x[1] += x[5]; x[13] = rol32(x[13] ^ x[1], 8);
552 + x[2] += x[6]; x[14] = rol32(x[14] ^ x[2], 8);
553 + x[3] += x[7]; x[15] = rol32(x[15] ^ x[3], 8);
555 + x[8] += x[12]; x[4] = rol32(x[4] ^ x[8], 7);
556 + x[9] += x[13]; x[5] = rol32(x[5] ^ x[9], 7);
557 + x[10] += x[14]; x[6] = rol32(x[6] ^ x[10], 7);
558 + x[11] += x[15]; x[7] = rol32(x[7] ^ x[11], 7);
560 + x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 16);
561 + x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 16);
562 + x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 16);
563 + x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 16);
565 + x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 12);
566 + x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 12);
567 + x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 12);
568 + x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 12);
570 + x[0] += x[5]; x[15] = rol32(x[15] ^ x[0], 8);
571 + x[1] += x[6]; x[12] = rol32(x[12] ^ x[1], 8);
572 + x[2] += x[7]; x[13] = rol32(x[13] ^ x[2], 8);
573 + x[3] += x[4]; x[14] = rol32(x[14] ^ x[3], 8);
575 + x[10] += x[15]; x[5] = rol32(x[5] ^ x[10], 7);
576 + x[11] += x[12]; x[6] = rol32(x[6] ^ x[11], 7);
577 + x[8] += x[13]; x[7] = rol32(x[7] ^ x[8], 7);
578 + x[9] += x[14]; x[4] = rol32(x[4] ^ x[9], 7);
583 + * chacha_block - generate one keystream block and increment block counter
584 + * @state: input state matrix (16 32-bit words)
585 + * @stream: output keystream block (64 bytes)
586 + * @nrounds: number of rounds (20 or 12; 20 is recommended)
588 + * This is the ChaCha core, a function from 64-byte strings to 64-byte strings.
589 + * The caller has already converted the endianness of the input. This function
590 + * also handles incrementing the block counter in the input matrix.
592 +void chacha_block_generic(u32 *state, u8 *stream, int nrounds)
597 + memcpy(x, state, 64);
599 + chacha_permute(x, nrounds);
601 + for (i = 0; i < ARRAY_SIZE(x); i++)
602 + put_unaligned_le32(x[i] + state[i], &stream[i * sizeof(u32)]);
606 +EXPORT_SYMBOL(chacha_block_generic);
609 + * hchacha_block_generic - abbreviated ChaCha core, for XChaCha
610 + * @state: input state matrix (16 32-bit words)
611 + * @out: output (8 32-bit words)
612 + * @nrounds: number of rounds (20 or 12; 20 is recommended)
614 + * HChaCha is the ChaCha equivalent of HSalsa and is an intermediate step
615 + * towards XChaCha (see https://cr.yp.to/snuffle/xsalsa-20081128.pdf). HChaCha
616 + * skips the final addition of the initial state, and outputs only certain words
617 + * of the state. It should not be used for streaming directly.
619 +void hchacha_block_generic(const u32 *state, u32 *stream, int nrounds)
623 + memcpy(x, state, 64);
625 + chacha_permute(x, nrounds);
627 + memcpy(&stream[0], &x[0], 16);
628 + memcpy(&stream[4], &x[12], 16);
630 +EXPORT_SYMBOL(hchacha_block_generic);
632 +++ b/lib/crypto/libchacha.c
634 +// SPDX-License-Identifier: GPL-2.0-or-later
636 + * The ChaCha stream cipher (RFC7539)
638 + * Copyright (C) 2015 Martin Willi
641 +#include <linux/kernel.h>
642 +#include <linux/export.h>
643 +#include <linux/module.h>
645 +#include <crypto/algapi.h> // for crypto_xor_cpy
646 +#include <crypto/chacha.h>
648 +void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
649 + unsigned int bytes, int nrounds)
651 + /* aligned to potentially speed up crypto_xor() */
652 + u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
654 + while (bytes >= CHACHA_BLOCK_SIZE) {
655 + chacha_block_generic(state, stream, nrounds);
656 + crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
657 + bytes -= CHACHA_BLOCK_SIZE;
658 + dst += CHACHA_BLOCK_SIZE;
659 + src += CHACHA_BLOCK_SIZE;
662 + chacha_block_generic(state, stream, nrounds);
663 + crypto_xor_cpy(dst, src, stream, bytes);
666 +EXPORT_SYMBOL(chacha_crypt_generic);
668 +MODULE_LICENSE("GPL");