16 #ifndef CRYPTOPP_PPC_CRYPTO_H 17 #define CRYPTOPP_PPC_CRYPTO_H 22 #if defined(CRYPTOPP_ALTIVEC_AVAILABLE) || defined(CRYPTOPP_DOXYGEN_PROCESSING) 31 #if defined(CRYPTOPP_ALTIVEC_AVAILABLE) || defined(CRYPTOPP_DOXYGEN_PROCESSING) 33 typedef __vector
unsigned char uint8x16_p;
34 typedef __vector
unsigned short uint16x8_p;
35 typedef __vector
unsigned int uint32x4_p;
37 #if defined(CRYPTOPP_POWER8_AVAILABLE) 38 typedef __vector
unsigned long long uint64x2_p;
41 #endif // CRYPTOPP_ALTIVEC_AVAILABLE 43 #if defined(CRYPTOPP_ALTIVEC_AVAILABLE) && !defined(CRYPTOPP_POWER7_AVAILABLE) 45 inline uint32x4_p
VectorLoad(
const byte src[16])
50 data = vec_ld(0, src);
55 const uint8x16_p perm = vec_lvsl(0, src);
56 const uint8x16_p low = vec_ld(0, src);
57 const uint8x16_p high = vec_ld(15, src);
58 data = vec_perm(low, high, perm);
61 #if defined(CRYPTOPP_BIG_ENDIAN) 62 return (uint32x4_p)data;
64 const uint8x16_p mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
65 return (uint32x4_p)vec_perm(data, data, mask);
69 inline void VectorStore(
const uint32x4_p data,
byte dest[16])
71 #if defined(CRYPTOPP_LITTLE_ENDIAN) 72 const uint8x16_p mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
73 const uint8x16_p t1 = (uint8x16_p)vec_perm(data, data, mask);
75 const uint8x16_p t1 = (uint8x16_p)data;
85 const uint8x16_p t2 = vec_perm(t1, t1, vec_lvsr(0, dest));
86 vec_ste((uint8x16_p) t2, 0, (
unsigned char*) dest);
87 vec_ste((uint16x8_p) t2, 1, (
unsigned short*)dest);
88 vec_ste((uint32x4_p) t2, 3, (
unsigned int*) dest);
89 vec_ste((uint32x4_p) t2, 4, (
unsigned int*) dest);
90 vec_ste((uint32x4_p) t2, 8, (
unsigned int*) dest);
91 vec_ste((uint32x4_p) t2, 12, (
unsigned int*) dest);
92 vec_ste((uint16x8_p) t2, 14, (
unsigned short*)dest);
93 vec_ste((uint8x16_p) t2, 15, (
unsigned char*) dest);
97 inline uint32x4_p
VectorXor(
const uint32x4_p vec1,
const uint32x4_p vec2)
99 return vec_xor(vec1, vec2);
102 inline uint32x4_p
VectorAdd(
const uint32x4_p vec1,
const uint32x4_p vec2)
104 return vec_add(vec1, vec2);
109 #if defined(CRYPTOPP_POWER7_AVAILABLE) || defined(CRYPTOPP_DOXYGEN_PROCESSING) 118 #if defined(CRYPTOPP_XLC_VERSION) && defined(CRYPTOPP_LITTLE_ENDIAN) 119 vec_st(vec_reve(vec_ld(0, src)), 0, src);
120 #elif defined(CRYPTOPP_LITTLE_ENDIAN) 121 const uint8x16_p mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
122 const uint8x16_p zero = {0};
123 vec_vsx_st(vec_perm(vec_vsx_ld(0, src), zero, mask), 0, src);
136 const uint8x16_p mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
137 return vec_perm(src, src, mask);
149 #if defined(CRYPTOPP_XLC_VERSION) 150 return (uint32x4_p)vec_xl_be(0, src);
152 # if defined(CRYPTOPP_LITTLE_ENDIAN) 153 return (uint32x4_p)
Reverse(vec_vsx_ld(0, src));
155 return (uint32x4_p)vec_vsx_ld(0, src);
170 #if defined(CRYPTOPP_XLC_VERSION) 171 return (uint32x4_p)vec_xl_be(off, src);
173 # if defined(CRYPTOPP_LITTLE_ENDIAN) 174 return (uint32x4_p)
Reverse(vec_vsx_ld(off, src));
176 return (uint32x4_p)vec_vsx_ld(off, src);
215 #if defined(CRYPTOPP_XLC_VERSION) 216 return (uint32x4_p)vec_xl(0, src);
218 return (uint32x4_p)vec_vsx_ld(0, src);
231 #if defined(CRYPTOPP_XLC_VERSION) 232 return (uint32x4_p)vec_xl(0, src);
234 return (uint32x4_p)vec_vsx_ld(0, src);
248 #if defined(CRYPTOPP_XLC_VERSION) 249 return (uint32x4_p)vec_xl(off, src);
251 return (uint32x4_p)vec_vsx_ld(off, src);
267 #if defined(CRYPTOPP_XLC_VERSION) 268 vec_xst_be((uint8x16_p)src, 0, dest);
270 # if defined(CRYPTOPP_LITTLE_ENDIAN) 271 vec_vsx_st(
Reverse((uint8x16_p)src), 0, dest);
273 vec_vsx_st((uint8x16_p)src, 0, dest);
290 #if defined(CRYPTOPP_XLC_VERSION) 291 vec_xst_be((uint8x16_p)src, off, dest);
293 # if defined(CRYPTOPP_LITTLE_ENDIAN) 294 vec_vsx_st(
Reverse((uint8x16_p)src), off, dest);
296 vec_vsx_st((uint8x16_p)src, off, dest);
313 #if defined(CRYPTOPP_XLC_VERSION) 314 vec_xst_be((uint8x16_p)src, 0, dest);
316 # if defined(CRYPTOPP_LITTLE_ENDIAN) 317 vec_vsx_st(
Reverse((uint8x16_p)src), 0, dest);
319 vec_vsx_st((uint8x16_p)src, 0, dest);
337 #if defined(CRYPTOPP_XLC_VERSION) 338 vec_xst_be((uint8x16_p)src, off, dest);
340 # if defined(CRYPTOPP_LITTLE_ENDIAN) 341 vec_vsx_st(
Reverse((uint8x16_p)src), off, dest);
343 vec_vsx_st((uint8x16_p)src, off, dest);
358 template <
class T1,
class T2>
361 return (T1)vec_perm(vec1, vec2, (uint8x16_p)mask);
372 template <
class T1,
class T2>
375 return (T1)vec_xor(vec1, (T1)vec2);
387 template <
class T1,
class T2>
390 return (T1)vec_add(vec1, (T1)vec2);
415 template <
unsigned int C,
class T1,
class T2>
418 #if defined(CRYPTOPP_LITTLE_ENDIAN) 419 return (T1)vec_sld((uint8x16_p)vec2, (uint8x16_p)vec1, 16-C);
421 return (T1)vec_sld((uint8x16_p)vec1, (uint8x16_p)vec2, C);
425 #endif // CRYPTOPP_POWER7_AVAILABLE 427 #if defined(CRYPTOPP_POWER8_AVAILABLE) || defined(CRYPTOPP_DOXYGEN_PROCESSING) 437 template <
class T1,
class T2>
440 #if defined(CRYPTOPP_XLC_VERSION) 441 return (T1)__vcipher((uint8x16_p)state, (uint8x16_p)key);
442 #elif defined(CRYPTOPP_GCC_VERSION) 443 return (T1)__builtin_crypto_vcipher((uint64x2_p)state, (uint64x2_p)key);
457 template <
class T1,
class T2>
460 #if defined(CRYPTOPP_XLC_VERSION) 461 return (T1)__vcipherlast((uint8x16_p)state, (uint8x16_p)key);
462 #elif defined(CRYPTOPP_GCC_VERSION) 463 return (T1)__builtin_crypto_vcipherlast((uint64x2_p)state, (uint64x2_p)key);
477 template <
class T1,
class T2>
480 #if defined(CRYPTOPP_XLC_VERSION) 481 return (T1)__vncipher((uint8x16_p)state, (uint8x16_p)key);
482 #elif defined(CRYPTOPP_GCC_VERSION) 483 return (T1)__builtin_crypto_vncipher((uint64x2_p)state, (uint64x2_p)key);
497 template <
class T1,
class T2>
500 #if defined(CRYPTOPP_XLC_VERSION) 501 return (T1)__vncipherlast((uint8x16_p)state, (uint8x16_p)key);
502 #elif defined(CRYPTOPP_GCC_VERSION) 503 return (T1)__builtin_crypto_vncipherlast((uint64x2_p)state, (uint64x2_p)key);
517 template <
int func,
int subfunc,
class T>
520 #if defined(CRYPTOPP_XLC_VERSION) 521 return (T)__vshasigmaw((uint32x4_p)vec, func, subfunc);
522 #elif defined(CRYPTOPP_GCC_VERSION) 523 return (T)__builtin_crypto_vshasigmaw((uint32x4_p)vec, func, subfunc);
537 template <
int func,
int subfunc,
class T>
540 #if defined(CRYPTOPP_XLC_VERSION) 541 return (T)__vshasigmad((uint64x2_p)vec, func, subfunc);
542 #elif defined(CRYPTOPP_GCC_VERSION) 543 return (T)__builtin_crypto_vshasigmad((uint64x2_p)vec, func, subfunc);
549 #endif // CRYPTOPP_POWER8_AVAILABLE 553 #endif // CRYPTOPP_PPC_CRYPTO_H T1 VectorEncrypt(const T1 &state, const T2 &key)
One round of AES encryption.
Utility functions for the Crypto++ library.
T1 VectorShiftLeft(const T1 &vec1, const T2 &vec2)
Shift two vectors left.
void VectorStoreBE(const T &src, uint8_t dest[16])
Stores a vector to a byte array.
T1 VectorEncryptLast(const T1 &state, const T2 &key)
Final round of AES encryption.
Library configuration file.
T1 VectorDecryptLast(const T1 &state, const T2 &key)
Final round of AES decryption.
bool IsAlignedOn(const void *ptr, unsigned int alignment)
Determines whether ptr is aligned to a minimum value.
T VectorSHA256(const T &vec)
SHA256 Sigma functions.
T Reverse(const T &src)
Reverse a vector.
T1 VectorAdd(const T1 &vec1, const T2 &vec2)
Add two vector.
void ReverseByteArrayLE(byte src[16])
Reverse a 16-byte array.
uint32x4_p VectorLoad(const byte src[16])
Loads a vector from a byte array.
uint32x4_p VectorLoadKey(const byte src[16])
Loads a vector from a byte array.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.
T1 VectorPermute(const T1 &vec1, const T1 &vec2, const T2 &mask)
Permutes two vectors.
T VectorSHA512(const T &vec)
SHA512 Sigma functions.
T1 VectorXor(const T1 &vec1, const T2 &vec2)
XOR two vectors.
T1 VectorDecrypt(const T1 &state, const T2 &key)
One round of AES decryption.
Crypto++ library namespace.
void VectorStore(const T &src, byte dest[16])
Stores a vector to a byte array.
uint32x4_p VectorLoadBE(const uint8_t src[16])
Loads a vector from a byte array.