diff options
Diffstat (limited to 'releases/2.6.32.58/crypto-sha512-use-standard-ror64.patch')
-rw-r--r-- | releases/2.6.32.58/crypto-sha512-use-standard-ror64.patch | 86 |
1 files changed, 86 insertions, 0 deletions
diff --git a/releases/2.6.32.58/crypto-sha512-use-standard-ror64.patch b/releases/2.6.32.58/crypto-sha512-use-standard-ror64.patch new file mode 100644 index 0000000..7c6ad3b --- /dev/null +++ b/releases/2.6.32.58/crypto-sha512-use-standard-ror64.patch @@ -0,0 +1,86 @@ +From f2ea0f5f04c97b48c88edccba52b0682fbe45087 Mon Sep 17 00:00:00 2001 +From: Alexey Dobriyan <adobriyan@gmail.com> +Date: Sat, 14 Jan 2012 21:44:49 +0300 +Subject: crypto: sha512 - use standard ror64() + +From: Alexey Dobriyan <adobriyan@gmail.com> + +commit f2ea0f5f04c97b48c88edccba52b0682fbe45087 upstream. + +Use standard ror64() instead of hand-written. +There is no standard ror64, so create it. + +The difference is shift value being "unsigned int" instead of uint64_t +(for which there is no reason). gcc starts to emit native ROR instructions +which it doesn't do for some reason currently. This should make the code +faster. + +Patch survives in-tree crypto test and ping flood with hmac(sha512) on. + +Signed-off-by: Alexey Dobriyan <adobriyan@gmail.com> +Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> +Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org> + +--- + crypto/sha512_generic.c | 13 ++++--------- + include/linux/bitops.h | 20 ++++++++++++++++++++ + 2 files changed, 24 insertions(+), 9 deletions(-) + +--- a/crypto/sha512_generic.c ++++ b/crypto/sha512_generic.c +@@ -31,11 +31,6 @@ static inline u64 Maj(u64 x, u64 y, u64 + return (x & y) | (z & (x | y)); + } + +-static inline u64 RORu64(u64 x, u64 y) +-{ +- return (x >> y) | (x << (64 - y)); +-} +- + static const u64 sha512_K[80] = { + 0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL, 0xb5c0fbcfec4d3b2fULL, + 0xe9b5dba58189dbbcULL, 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL, +@@ -66,10 +61,10 @@ static const u64 sha512_K[80] = { + 0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL, + }; + +-#define e0(x) (RORu64(x,28) ^ RORu64(x,34) ^ RORu64(x,39)) +-#define e1(x) (RORu64(x,14) ^ RORu64(x,18) ^ RORu64(x,41)) +-#define s0(x) (RORu64(x, 1) ^ RORu64(x, 8) ^ (x >> 7)) +-#define s1(x) (RORu64(x,19) ^ RORu64(x,61) ^ (x >> 6)) ++#define e0(x) (ror64(x,28) ^ ror64(x,34) ^ ror64(x,39)) ++#define e1(x) (ror64(x,14) ^ ror64(x,18) ^ ror64(x,41)) ++#define s0(x) (ror64(x, 1) ^ ror64(x, 8) ^ (x >> 7)) ++#define s1(x) (ror64(x,19) ^ ror64(x,61) ^ (x >> 6)) + + static inline void LOAD_OP(int I, u64 *W, const u8 *input) + { +--- a/include/linux/bitops.h ++++ b/include/linux/bitops.h +@@ -46,6 +46,26 @@ static inline unsigned long hweight_long + } + + /** ++ * rol64 - rotate a 64-bit value left ++ * @word: value to rotate ++ * @shift: bits to roll ++ */ ++static inline __u64 rol64(__u64 word, unsigned int shift) ++{ ++ return (word << shift) | (word >> (64 - shift)); ++} ++ ++/** ++ * ror64 - rotate a 64-bit value right ++ * @word: value to rotate ++ * @shift: bits to roll ++ */ ++static inline __u64 ror64(__u64 word, unsigned int shift) ++{ ++ return (word >> shift) | (word << (64 - shift)); ++} ++ ++/** + * rol32 - rotate a 32-bit value left + * @word: value to rotate + * @shift: bits to roll |