summaryrefslogtreecommitdiffhomepage
path: root/src/crypto/chacha20.c
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2018-08-23 18:08:03 -0700
committerJason A. Donenfeld <Jason@zx2c4.com>2018-08-28 23:20:13 -0600
commit470a0a36d579980431361f23e8f319d5c68aa4af (patch)
tree624317ee7c194f1a8ec61137726adb1215ff276a /src/crypto/chacha20.c
parent4e71a11616a7763219e23bd34708751a702c80c7 (diff)
crypto: use unaligned helpers
This is not useful for WireGuard, but for the general use case we probably want it this way, and the speed difference is mostly lost in the noise. Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
Diffstat (limited to 'src/crypto/chacha20.c')
-rw-r--r--src/crypto/chacha20.c7
1 files changed, 4 insertions, 3 deletions
diff --git a/src/crypto/chacha20.c b/src/crypto/chacha20.c
index 815d777..c23928e 100644
--- a/src/crypto/chacha20.c
+++ b/src/crypto/chacha20.c
@@ -5,6 +5,7 @@
#include "chacha20.h"
+#include <asm/unaligned.h>
#include <linux/kernel.h>
#include <crypto/algapi.h>
@@ -210,9 +211,9 @@ static void hchacha20_generic(u8 derived_key[CHACHA20_KEY_SIZE], const u8 nonce[
__le32 *out = (__force __le32 *)derived_key;
u32 x[] = {
EXPAND_32_BYTE_K,
- le32_to_cpup((__le32 *)(key + 0)), le32_to_cpup((__le32 *)(key + 4)), le32_to_cpup((__le32 *)(key + 8)), le32_to_cpup((__le32 *)(key + 12)),
- le32_to_cpup((__le32 *)(key + 16)), le32_to_cpup((__le32 *)(key + 20)), le32_to_cpup((__le32 *)(key + 24)), le32_to_cpup((__le32 *)(key + 28)),
- le32_to_cpup((__le32 *)(nonce + 0)), le32_to_cpup((__le32 *)(nonce + 4)), le32_to_cpup((__le32 *)(nonce + 8)), le32_to_cpup((__le32 *)(nonce + 12))
+ get_unaligned_le32(key + 0), get_unaligned_le32(key + 4), get_unaligned_le32(key + 8), get_unaligned_le32(key + 12),
+ get_unaligned_le32(key + 16), get_unaligned_le32(key + 20), get_unaligned_le32(key + 24), get_unaligned_le32(key + 28),
+ get_unaligned_le32(nonce + 0), get_unaligned_le32(nonce + 4), get_unaligned_le32(nonce + 8), get_unaligned_le32(nonce + 12)
};
TWENTY_ROUNDS(x);