summaryrefslogtreecommitdiffhomepage
path: root/src/crypto
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2018-09-27 00:28:47 +0200
committerJason A. Donenfeld <Jason@zx2c4.com>2018-10-02 03:41:49 +0200
commitcfa6a82310de91038a153a2b5d011d5f08573285 (patch)
treee4bfddbd855be754b3bb6e9453f8e25d8dcdad38 /src/crypto
parentae383dd701c0f27349893d4fbd48c7b72a39b92a (diff)
crypto: prefer IS_ENABLED to ifdefs
Suggested-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
Diffstat (limited to 'src/crypto')
-rw-r--r--src/crypto/zinc/blake2s/blake2s-x86_64-glue.h13
-rw-r--r--src/crypto/zinc/chacha20/chacha20-arm-glue.h55
-rw-r--r--src/crypto/zinc/chacha20/chacha20-mips-glue.h1
-rw-r--r--src/crypto/zinc/chacha20/chacha20-x86_64-glue.h43
-rw-r--r--src/crypto/zinc/curve25519/curve25519-arm-glue.h8
-rw-r--r--src/crypto/zinc/poly1305/poly1305-arm-glue.h36
-rw-r--r--src/crypto/zinc/poly1305/poly1305-mips-glue.h1
-rw-r--r--src/crypto/zinc/poly1305/poly1305-x86_64-glue.h51
8 files changed, 73 insertions, 135 deletions
diff --git a/src/crypto/zinc/blake2s/blake2s-x86_64-glue.h b/src/crypto/zinc/blake2s/blake2s-x86_64-glue.h
index ffbdc1b..b1e86d1 100644
--- a/src/crypto/zinc/blake2s/blake2s-x86_64-glue.h
+++ b/src/crypto/zinc/blake2s/blake2s-x86_64-glue.h
@@ -8,16 +8,12 @@
#include <asm/fpu/api.h>
#include <asm/simd.h>
-#ifdef CONFIG_AS_AVX
asmlinkage void blake2s_compress_avx(struct blake2s_state *state,
const u8 *block, const size_t nblocks,
const u32 inc);
-#endif
-#ifdef CONFIG_AS_AVX512
asmlinkage void blake2s_compress_avx512(struct blake2s_state *state,
const u8 *block, const size_t nblocks,
const u32 inc);
-#endif
static bool blake2s_use_avx __ro_after_init;
static bool blake2s_use_avx512 __ro_after_init;
@@ -41,21 +37,18 @@ static void __init blake2s_fpu_init(void)
static inline bool blake2s_arch(struct blake2s_state *state, const u8 *block,
size_t nblocks, const u32 inc)
{
-#ifdef CONFIG_AS_AVX512
- if (blake2s_use_avx512 && irq_fpu_usable()) {
+ if (IS_ENABLED(CONFIG_AS_AVX512) && blake2s_use_avx512 &&
+ irq_fpu_usable()) {
kernel_fpu_begin();
blake2s_compress_avx512(state, block, nblocks, inc);
kernel_fpu_end();
return true;
}
-#endif
-#ifdef CONFIG_AS_AVX
- if (blake2s_use_avx && irq_fpu_usable()) {
+ if (IS_ENABLED(CONFIG_AS_AVX) && blake2s_use_avx && irq_fpu_usable()) {
kernel_fpu_begin();
blake2s_compress_avx(state, block, nblocks, inc);
kernel_fpu_end();
return true;
}
-#endif
return false;
}
diff --git a/src/crypto/zinc/chacha20/chacha20-arm-glue.h b/src/crypto/zinc/chacha20/chacha20-arm-glue.h
index 86cce85..b8d9e12 100644
--- a/src/crypto/zinc/chacha20/chacha20-arm-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-arm-glue.h
@@ -12,13 +12,9 @@
asmlinkage void chacha20_arm(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
-#if defined(CONFIG_ARM)
asmlinkage void hchacha20_arm(const u32 state[16], u32 out[8]);
-#endif
-#if defined(CONFIG_KERNEL_MODE_NEON)
asmlinkage void chacha20_neon(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
-#endif
static bool chacha20_use_neon __ro_after_init;
@@ -42,15 +38,13 @@ static void __init chacha20_fpu_init(void)
}
static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
- const u8 *src, size_t len,
+ const u8 *src, const size_t len,
simd_context_t *simd_context)
{
-#if defined(CONFIG_KERNEL_MODE_NEON)
- if (chacha20_use_neon && len >= CHACHA20_BLOCK_SIZE * 3 &&
- simd_use(simd_context))
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && chacha20_use_neon &&
+ len >= CHACHA20_BLOCK_SIZE * 3 && simd_use(simd_context))
chacha20_neon(dst, src, len, state->key, state->counter);
else
-#endif
chacha20_arm(dst, src, len, state->key, state->counter);
state->counter[0] += (len + 63) / 64;
@@ -62,27 +56,26 @@ static inline bool hchacha20_arch(u32 derived_key[CHACHA20_KEY_WORDS],
const u8 key[HCHACHA20_KEY_SIZE],
simd_context_t *simd_context)
{
-#if defined(CONFIG_ARM)
- u32 x[] = { CHACHA20_CONSTANT_EXPA,
- CHACHA20_CONSTANT_ND_3,
- CHACHA20_CONSTANT_2_BY,
- CHACHA20_CONSTANT_TE_K,
- get_unaligned_le32(key + 0),
- get_unaligned_le32(key + 4),
- get_unaligned_le32(key + 8),
- get_unaligned_le32(key + 12),
- get_unaligned_le32(key + 16),
- get_unaligned_le32(key + 20),
- get_unaligned_le32(key + 24),
- get_unaligned_le32(key + 28),
- get_unaligned_le32(nonce + 0),
- get_unaligned_le32(nonce + 4),
- get_unaligned_le32(nonce + 8),
- get_unaligned_le32(nonce + 12)
- };
- hchacha20_arm(x, derived_key);
- return true;
-#else
+ if (IS_ENABLED(CONFIG_ARM)) {
+ u32 x[] = { CHACHA20_CONSTANT_EXPA,
+ CHACHA20_CONSTANT_ND_3,
+ CHACHA20_CONSTANT_2_BY,
+ CHACHA20_CONSTANT_TE_K,
+ get_unaligned_le32(key + 0),
+ get_unaligned_le32(key + 4),
+ get_unaligned_le32(key + 8),
+ get_unaligned_le32(key + 12),
+ get_unaligned_le32(key + 16),
+ get_unaligned_le32(key + 20),
+ get_unaligned_le32(key + 24),
+ get_unaligned_le32(key + 28),
+ get_unaligned_le32(nonce + 0),
+ get_unaligned_le32(nonce + 4),
+ get_unaligned_le32(nonce + 8),
+ get_unaligned_le32(nonce + 12)
+ };
+ hchacha20_arm(x, derived_key);
+ return true;
+ }
return false;
-#endif
}
diff --git a/src/crypto/zinc/chacha20/chacha20-mips-glue.h b/src/crypto/zinc/chacha20/chacha20-mips-glue.h
index 6e70dd6..13e9e8d 100644
--- a/src/crypto/zinc/chacha20/chacha20-mips-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-mips-glue.h
@@ -5,6 +5,7 @@
asmlinkage void chacha20_mips(u32 state[16], u8 *out, const u8 *in,
const size_t len);
+
static void __init chacha20_fpu_init(void)
{
}
diff --git a/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h b/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
index 1b7fc05..03075c9 100644
--- a/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
@@ -8,22 +8,16 @@
#include <asm/processor.h>
#include <asm/intel-family.h>
-#ifdef CONFIG_AS_SSSE3
asmlinkage void hchacha20_ssse3(u32 *derived_key, const u8 *nonce,
const u8 *key);
asmlinkage void chacha20_ssse3(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
-#endif
-#ifdef CONFIG_AS_AVX2
asmlinkage void chacha20_avx2(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
-#endif
-#ifdef CONFIG_AS_AVX512
asmlinkage void chacha20_avx512(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
asmlinkage void chacha20_avx512vl(u8 *out, const u8 *in, const size_t len,
const u32 key[8], const u32 counter[4]);
-#endif
static bool chacha20_use_ssse3 __ro_after_init;
static bool chacha20_use_avx2 __ro_after_init;
@@ -60,34 +54,22 @@ static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
const u8 *src, const size_t len,
simd_context_t *simd_context)
{
- if (!chacha20_use_ssse3 || len <= CHACHA20_BLOCK_SIZE ||
- !simd_use(simd_context))
+ if (!IS_ENABLED(CONFIG_AS_SSSE3) || !chacha20_use_ssse3 ||
+ len <= CHACHA20_BLOCK_SIZE || !simd_use(simd_context))
return false;
-#ifdef CONFIG_AS_AVX512
- if (chacha20_use_avx512 && len >= CHACHA20_BLOCK_SIZE * 8) {
+ if (IS_ENABLED(CONFIG_AS_AVX512) && chacha20_use_avx512 &&
+ len >= CHACHA20_BLOCK_SIZE * 8)
chacha20_avx512(dst, src, len, state->key, state->counter);
- goto success;
- }
- if (chacha20_use_avx512vl && len >= CHACHA20_BLOCK_SIZE * 4) {
+ else if (IS_ENABLED(CONFIG_AS_AVX512) && chacha20_use_avx512vl &&
+ len >= CHACHA20_BLOCK_SIZE * 4)
chacha20_avx512vl(dst, src, len, state->key, state->counter);
- goto success;
- }
-#endif
-#ifdef CONFIG_AS_AVX2
- if (chacha20_use_avx2 && len >= CHACHA20_BLOCK_SIZE * 4) {
+ else if (IS_ENABLED(CONFIG_AS_AVX2) && chacha20_use_avx2 &&
+ len >= CHACHA20_BLOCK_SIZE * 4)
chacha20_avx2(dst, src, len, state->key, state->counter);
- goto success;
- }
-#endif
-#ifdef CONFIG_AS_SSSE3
- if (chacha20_use_ssse3) {
+ else
chacha20_ssse3(dst, src, len, state->key, state->counter);
- goto success;
- }
-#endif
- return false;
-success:
+
state->counter[0] += (len + 63) / 64;
return true;
}
@@ -97,11 +79,10 @@ static inline bool hchacha20_arch(u32 derived_key[CHACHA20_KEY_WORDS],
const u8 key[HCHACHA20_KEY_SIZE],
simd_context_t *simd_context)
{
-#if defined(CONFIG_AS_SSSE3)
- if (chacha20_use_ssse3 && simd_use(simd_context)) {
+ if (IS_ENABLED(CONFIG_AS_SSSE3) && chacha20_use_ssse3 &&
+ simd_use(simd_context)) {
hchacha20_ssse3(derived_key, nonce, key);
return true;
}
-#endif
return false;
}
diff --git a/src/crypto/zinc/curve25519/curve25519-arm-glue.h b/src/crypto/zinc/curve25519/curve25519-arm-glue.h
index 7969fec..1e6769c 100644
--- a/src/crypto/zinc/curve25519/curve25519-arm-glue.h
+++ b/src/crypto/zinc/curve25519/curve25519-arm-glue.h
@@ -7,11 +7,9 @@
#include <asm/neon.h>
#include <asm/simd.h>
-#if defined(CONFIG_KERNEL_MODE_NEON) && !defined(CONFIG_CPU_BIG_ENDIAN)
asmlinkage void curve25519_neon(u8 mypublic[CURVE25519_KEY_SIZE],
const u8 secret[CURVE25519_KEY_SIZE],
const u8 basepoint[CURVE25519_KEY_SIZE]);
-#endif
static bool curve25519_use_neon __ro_after_init;
@@ -24,14 +22,14 @@ static inline bool curve25519_arch(u8 mypublic[CURVE25519_KEY_SIZE],
const u8 secret[CURVE25519_KEY_SIZE],
const u8 basepoint[CURVE25519_KEY_SIZE])
{
-#if defined(CONFIG_KERNEL_MODE_NEON) && !defined(CONFIG_CPU_BIG_ENDIAN)
- if (curve25519_use_neon && may_use_simd()) {
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+ !IS_ENABLED(CONFIG_CPU_BIG_ENDIAN) && curve25519_use_neon &&
+ may_use_simd()) {
kernel_neon_begin();
curve25519_neon(mypublic, secret, basepoint);
kernel_neon_end();
return true;
}
-#endif
return false;
}
diff --git a/src/crypto/zinc/poly1305/poly1305-arm-glue.h b/src/crypto/zinc/poly1305/poly1305-arm-glue.h
index 9d34d21..15ad53f 100644
--- a/src/crypto/zinc/poly1305/poly1305-arm-glue.h
+++ b/src/crypto/zinc/poly1305/poly1305-arm-glue.h
@@ -10,11 +10,9 @@ asmlinkage void poly1305_init_arm(void *ctx, const u8 key[16]);
asmlinkage void poly1305_blocks_arm(void *ctx, const u8 *inp, const size_t len,
const u32 padbit);
asmlinkage void poly1305_emit_arm(void *ctx, u8 mac[16], const u32 nonce[4]);
-#if defined(CONFIG_KERNEL_MODE_NEON)
asmlinkage void poly1305_blocks_neon(void *ctx, const u8 *inp, const size_t len,
const u32 padbit);
asmlinkage void poly1305_emit_neon(void *ctx, u8 mac[16], const u32 nonce[4]);
-#endif
static bool poly1305_use_neon __ro_after_init;
@@ -52,7 +50,6 @@ struct poly1305_arch_internal {
};
#endif
-#if defined(CONFIG_KERNEL_MODE_NEON)
static void convert_to_base2_64(void *ctx)
{
struct poly1305_arch_internal *state = ctx;
@@ -68,10 +65,10 @@ static void convert_to_base2_64(void *ctx)
state->h0 = ((u64)state->h[2] << 52) | ((u64)state->h[1] << 26) | state->h[0];
state->h1 = ((u64)state->h[4] << 40) | ((u64)state->h[3] << 14) | (state->h[2] >> 12);
state->h2 = state->h[4] >> 24;
-#if defined(CONFIG_ARM) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
- state->h0 = rol64(state->h0, 32);
- state->h1 = rol64(state->h1, 32);
-#endif
+ if (IS_ENABLED(CONFIG_ARM) && IS_ENABLED(CONFIG_CPU_BIG_ENDIAN)) {
+ state->h0 = rol64(state->h0, 32);
+ state->h1 = rol64(state->h1, 32);
+ }
#define ULT(a, b) ((a ^ ((a ^ b) | ((a - b) ^ b))) >> (sizeof(a) * 8 - 1))
cy = (state->h2 >> 2) + (state->h2 & ~3ULL);
state->h2 &= 3;
@@ -81,7 +78,6 @@ static void convert_to_base2_64(void *ctx)
#undef ULT
state->is_base2_26 = 0;
}
-#endif
static inline bool poly1305_init_arch(void *ctx,
const u8 key[POLY1305_KEY_SIZE])
@@ -94,13 +90,13 @@ static inline bool poly1305_blocks_arch(void *ctx, const u8 *inp,
const size_t len, const u32 padbit,
simd_context_t *simd_context)
{
-#if defined(CONFIG_KERNEL_MODE_NEON)
- if (poly1305_use_neon && simd_use(simd_context)) {
- poly1305_blocks_neon(ctx, inp, len, padbit);
- return true;
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON)) {
+ if (poly1305_use_neon && simd_use(simd_context)) {
+ poly1305_blocks_neon(ctx, inp, len, padbit);
+ return true;
+ }
+ convert_to_base2_64(ctx);
}
- convert_to_base2_64(ctx);
-#endif
poly1305_blocks_arm(ctx, inp, len, padbit);
return true;
@@ -110,13 +106,13 @@ static inline bool poly1305_emit_arch(void *ctx, u8 mac[POLY1305_MAC_SIZE],
const u32 nonce[4],
simd_context_t *simd_context)
{
-#if defined(CONFIG_KERNEL_MODE_NEON)
- if (poly1305_use_neon && simd_use(simd_context)) {
- poly1305_emit_neon(ctx, mac, nonce);
- return true;
+ if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON)) {
+ if (poly1305_use_neon && simd_use(simd_context)) {
+ poly1305_emit_neon(ctx, mac, nonce);
+ return true;
+ }
+ convert_to_base2_64(ctx);
}
- convert_to_base2_64(ctx);
-#endif
poly1305_emit_arm(ctx, mac, nonce);
return true;
diff --git a/src/crypto/zinc/poly1305/poly1305-mips-glue.h b/src/crypto/zinc/poly1305/poly1305-mips-glue.h
index eb38cd5..4a0badb 100644
--- a/src/crypto/zinc/poly1305/poly1305-mips-glue.h
+++ b/src/crypto/zinc/poly1305/poly1305-mips-glue.h
@@ -7,6 +7,7 @@ asmlinkage void poly1305_init_mips(void *ctx, const u8 key[16]);
asmlinkage void poly1305_blocks_mips(void *ctx, const u8 *inp, const size_t len,
const u32 padbit);
asmlinkage void poly1305_emit_mips(void *ctx, u8 mac[16], const u32 nonce[4]);
+
static void __init poly1305_fpu_init(void)
{
}
diff --git a/src/crypto/zinc/poly1305/poly1305-x86_64-glue.h b/src/crypto/zinc/poly1305/poly1305-x86_64-glue.h
index 4926d27..285cb31 100644
--- a/src/crypto/zinc/poly1305/poly1305-x86_64-glue.h
+++ b/src/crypto/zinc/poly1305/poly1305-x86_64-glue.h
@@ -13,20 +13,14 @@ asmlinkage void poly1305_blocks_x86_64(void *ctx, const u8 *inp,
const size_t len, const u32 padbit);
asmlinkage void poly1305_emit_x86_64(void *ctx, u8 mac[POLY1305_MAC_SIZE],
const u32 nonce[4]);
-#ifdef CONFIG_AS_AVX
asmlinkage void poly1305_emit_avx(void *ctx, u8 mac[POLY1305_MAC_SIZE],
const u32 nonce[4]);
asmlinkage void poly1305_blocks_avx(void *ctx, const u8 *inp, const size_t len,
const u32 padbit);
-#endif
-#ifdef CONFIG_AS_AVX2
asmlinkage void poly1305_blocks_avx2(void *ctx, const u8 *inp, const size_t len,
const u32 padbit);
-#endif
-#ifdef CONFIG_AS_AVX512
asmlinkage void poly1305_blocks_avx512(void *ctx, const u8 *inp,
const size_t len, const u32 padbit);
-#endif
static bool poly1305_use_avx __ro_after_init;
static bool poly1305_use_avx2 __ro_after_init;
@@ -104,35 +98,21 @@ static inline bool poly1305_blocks_arch(void *ctx, const u8 *inp,
{
struct poly1305_arch_internal *state = ctx;
- if (!poly1305_use_avx ||
+ if (!IS_ENABLED(CONFIG_AS_AVX) || !poly1305_use_avx ||
(len < (POLY1305_BLOCK_SIZE * 18) && !state->is_base2_26) ||
- !simd_use(simd_context))
- goto scalar;
-
-#ifdef CONFIG_AS_AVX512
- if (poly1305_use_avx512) {
- poly1305_blocks_avx512(ctx, inp, len, padbit);
+ !simd_use(simd_context)) {
+ convert_to_base2_64(ctx);
+ poly1305_blocks_x86_64(ctx, inp, len, padbit);
return true;
}
-#endif
-#ifdef CONFIG_AS_AVX2
- if (poly1305_use_avx2) {
+ if (IS_ENABLED(CONFIG_AS_AVX512) && poly1305_use_avx512)
+ poly1305_blocks_avx512(ctx, inp, len, padbit);
+ else if (IS_ENABLED(CONFIG_AS_AVX2) && poly1305_use_avx2)
poly1305_blocks_avx2(ctx, inp, len, padbit);
- return true;
- }
-#endif
-
-#ifdef CONFIG_AS_AVX
- if (poly1305_use_avx) {
+ else
poly1305_blocks_avx(ctx, inp, len, padbit);
- return true;
- }
-#endif
-scalar:
- convert_to_base2_64(ctx);
- poly1305_blocks_x86_64(ctx, inp, len, padbit);
return true;
}
@@ -142,18 +122,13 @@ static inline bool poly1305_emit_arch(void *ctx, u8 mac[POLY1305_MAC_SIZE],
{
struct poly1305_arch_internal *state = ctx;
- if (!poly1305_use_avx || !state->is_base2_26 ||!simd_use(simd_context))
- goto scalar;
-
-#ifdef CONFIG_AS_AVX
- if (poly1305_use_avx || poly1305_use_avx2 || poly1305_use_avx512) {
- poly1305_emit_avx(ctx, mac, nonce);
+ if (!IS_ENABLED(CONFIG_AS_AVX) || !poly1305_use_avx ||
+ !state->is_base2_26 || !simd_use(simd_context)) {
+ convert_to_base2_64(ctx);
+ poly1305_emit_x86_64(ctx, mac, nonce);
return true;
}
-#endif
-scalar:
- convert_to_base2_64(ctx);
- poly1305_emit_x86_64(ctx, mac, nonce);
+ poly1305_emit_avx(ctx, mac, nonce);
return true;
}