From 31701000f52600cb1dd335dec9333addf1a69494 Mon Sep 17 00:00:00 2001 From: Kees Cook Date: Tue, 26 Nov 2019 22:08:02 -0800 Subject: crypto: x86 - Regularize glue function prototypes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The crypto glue performed function prototype casting via macros to make indirect calls to assembly routines. Instead of performing casts at the call sites (which trips Control Flow Integrity prototype checking), switch each prototype to a common standard set of arguments which allows the removal of the existing macros. In order to keep pointer math unchanged, internal casting between u128 pointers and u8 pointers is added. Co-developed-by: João Moreira Signed-off-by: João Moreira Signed-off-by: Kees Cook Reviewed-by: Eric Biggers Signed-off-by: Herbert Xu --- crypto/cast6_generic.c | 18 ++++++++++-------- crypto/serpent_generic.c | 6 ++++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/crypto/cast6_generic.c b/crypto/cast6_generic.c index a8248f8e..85328522 100644 --- a/crypto/cast6_generic.c +++ b/crypto/cast6_generic.c @@ -154,7 +154,7 @@ int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) EXPORT_SYMBOL_GPL(cast6_setkey); /*forward quad round*/ -static inline void Q(u32 *block, u8 *Kr, u32 *Km) +static inline void Q(u32 *block, const u8 *Kr, const u32 *Km) { u32 I; block[2] ^= F1(block[3], Kr[0], Km[0]); @@ -164,7 +164,7 @@ static inline void Q(u32 *block, u8 *Kr, u32 *Km) } /*reverse quad round*/ -static inline void QBAR(u32 *block, u8 *Kr, u32 *Km) +static inline void QBAR(u32 *block, const u8 *Kr, const u32 *Km) { u32 I; block[3] ^= F1(block[0], Kr[3], Km[3]); @@ -173,13 +173,14 @@ static inline void QBAR(u32 *block, u8 *Kr, u32 *Km) block[2] ^= F1(block[3], Kr[0], Km[0]); } -void __cast6_encrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf) +void __cast6_encrypt(const void *ctx, u8 *outbuf, const u8 *inbuf) { + const struct cast6_ctx *c = ctx; const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 block[4]; - u32 *Km; - u8 *Kr; + const u32 *Km; + const u8 *Kr; block[0] = be32_to_cpu(src[0]); block[1] = be32_to_cpu(src[1]); @@ -211,13 +212,14 @@ static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) __cast6_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf); } -void __cast6_decrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf) +void __cast6_decrypt(const void *ctx, u8 *outbuf, const u8 *inbuf) { + const struct cast6_ctx *c = ctx; const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 block[4]; - u32 *Km; - u8 *Kr; + const u32 *Km; + const u8 *Kr; block[0] = be32_to_cpu(src[0]); block[1] = be32_to_cpu(src[1]); diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c index 56fa665a..492c1d0b 100644 --- a/crypto/serpent_generic.c +++ b/crypto/serpent_generic.c @@ -449,8 +449,9 @@ int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) } EXPORT_SYMBOL_GPL(serpent_setkey); -void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) +void __serpent_encrypt(const void *c, u8 *dst, const u8 *src) { + const struct serpent_ctx *ctx = c; const u32 *k = ctx->expkey; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; @@ -514,8 +515,9 @@ static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) __serpent_encrypt(ctx, dst, src); } -void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) +void __serpent_decrypt(const void *c, u8 *dst, const u8 *src) { + const struct serpent_ctx *ctx = c; const u32 *k = ctx->expkey; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; -- cgit v1.2.3