1 #ifndef CRYPTOPP_MISC_H
2 #define CRYPTOPP_MISC_H
11 #define _interlockedbittestandset CRYPTOPP_DISABLED_INTRINSIC_1
12 #define _interlockedbittestandreset CRYPTOPP_DISABLED_INTRINSIC_2
13 #define _interlockedbittestandset64 CRYPTOPP_DISABLED_INTRINSIC_3
14 #define _interlockedbittestandreset64 CRYPTOPP_DISABLED_INTRINSIC_4
16 #undef _interlockedbittestandset
17 #undef _interlockedbittestandreset
18 #undef _interlockedbittestandset64
19 #undef _interlockedbittestandreset64
20 #define CRYPTOPP_FAST_ROTATE(x) 1
21 #elif _MSC_VER >= 1300
22 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32 | (x) == 64)
24 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32)
26 #elif (defined(__MWERKS__) && TARGET_CPU_PPC) || \
27 (defined(__GNUC__) && (defined(_ARCH_PWR2) || defined(_ARCH_PWR) || defined(_ARCH_PPC) || defined(_ARCH_PPC64) || defined(_ARCH_COM)))
28 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32)
29 #elif defined(__GNUC__) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86) // depend on GCC's peephole optimization to generate rotate instructions
30 #define CRYPTOPP_FAST_ROTATE(x) 1
32 #define CRYPTOPP_FAST_ROTATE(x) 0
39 #if defined(__GNUC__) && defined(__linux__)
40 #define CRYPTOPP_BYTESWAP_AVAILABLE
45 NAMESPACE_BEGIN(CryptoPP)
52 static char dummy[2*b-1];
55 #define CRYPTOPP_COMPILE_ASSERT(assertion) CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, __LINE__)
56 #if defined(CRYPTOPP_EXPORTS) || defined(CRYPTOPP_IMPORTS)
57 #define CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, instance)
59 #define CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, instance) static CompileAssert<(assertion)> CRYPTOPP_ASSERT_JOIN(cryptopp_assert_, instance)
61 #define CRYPTOPP_ASSERT_JOIN(X, Y) CRYPTOPP_DO_ASSERT_JOIN(X, Y)
62 #define CRYPTOPP_DO_ASSERT_JOIN(X, Y) X##Y
71 template <
class BASE1,
class BASE2>
72 class CRYPTOPP_NO_VTABLE
TwoBases :
public BASE1,
public BASE2
77 template <
class BASE1,
class BASE2,
class BASE3>
78 class CRYPTOPP_NO_VTABLE
ThreeBases :
public BASE1,
public BASE2,
public BASE3
101 T* operator()()
const {
return new T;}
108 template <
class T,
class F = NewObject<T>,
int instance=0>
112 Singleton(F objectFactory = F()) : m_objectFactory(objectFactory) {}
115 CRYPTOPP_NOINLINE
const T & Ref(CRYPTOPP_NOINLINE_DOTDOTDOT)
const;
121 template <
class T,
class F,
int instance>
125 T *p = s_pObject.m_p;
130 T *newObject = m_objectFactory();
139 s_pObject.m_p = newObject;
145 #if (!__STDC_WANT_SECURE_LIB__)
146 inline void memcpy_s(
void *dest,
size_t sizeInBytes,
const void *src,
size_t count)
148 if (count > sizeInBytes)
150 memcpy(dest, src, count);
153 inline void memmove_s(
void *dest,
size_t sizeInBytes,
const void *src,
size_t count)
155 if (count > sizeInBytes)
157 memmove(dest, src, count);
160 #if __BORLANDC__ >= 0x620
162 #define memcpy_s CryptoPP::memcpy_s
163 #define memmove_s CryptoPP::memmove_s
167 inline void * memset_z(
void *ptr,
int value,
size_t num)
170 #if CRYPTOPP_GCC_VERSION >= 30001
171 if (__builtin_constant_p(num) && num==0)
174 return memset(ptr, value, num);
178 template <
class T>
inline const T& STDMIN(
const T& a,
const T& b)
180 return b < a ? b : a;
183 template <
class T1,
class T2>
inline const T1 UnsignedMin(
const T1& a,
const T2& b)
185 CRYPTOPP_COMPILE_ASSERT((
sizeof(T1)<=
sizeof(T2) && T2(-1)>0) || (
sizeof(T1)>
sizeof(T2) && T1(-1)>0));
189 if (
sizeof(T1)<=
sizeof(T2))
190 return b < (T2)a ? (T1)b : a;
192 return (T1)b < a ? (T1)b : a;
195 template <class T> inline const T& STDMAX(const T& a, const T& b)
197 return a < b ? b : a;
200 #define RETURN_IF_NONZERO(x) size_t returnedValue = x; if (returnedValue) return returnedValue
203 #define GETBYTE(x, y) (unsigned int)byte((x)>>(8*(y)))
208 #define CRYPTOPP_GET_BYTE_AS_BYTE(x, y) byte((x)>>(8*(y)))
211 unsigned int Parity(T value)
213 for (
unsigned int i=8*
sizeof(value)/2; i>0; i/=2)
215 return (
unsigned int)value&1;
219 unsigned int BytePrecision(
const T &value)
224 unsigned int l=0, h=8*
sizeof(value);
228 unsigned int t = (l+h)/2;
239 unsigned int BitPrecision(
const T &value)
244 unsigned int l=0, h=8*
sizeof(value);
248 unsigned int t = (l+h)/2;
258 inline unsigned int TrailingZeros(word32 v)
260 #if defined(__GNUC__) && CRYPTOPP_GCC_VERSION >= 30400
261 return __builtin_ctz(v);
262 #elif defined(_MSC_VER) && _MSC_VER >= 1400
263 unsigned long result;
264 _BitScanForward(&result, v);
268 static const int MultiplyDeBruijnBitPosition[32] =
270 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8,
271 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9
273 return MultiplyDeBruijnBitPosition[((word32)((v & -v) * 0x077CB531U)) >> 27];
277 inline unsigned int TrailingZeros(word64 v)
279 #if defined(__GNUC__) && CRYPTOPP_GCC_VERSION >= 30400
280 return __builtin_ctzll(v);
281 #elif defined(_MSC_VER) && _MSC_VER >= 1400 && (defined(_M_X64) || defined(_M_IA64))
282 unsigned long result;
283 _BitScanForward64(&result, v);
286 return word32(v) ? TrailingZeros(word32(v)) : 32 + TrailingZeros(word32(v>>32));
291 inline T Crop(T value,
size_t size)
293 if (size < 8*
sizeof(value))
294 return T(value & ((T(1) << size) - 1));
299 template <
class T1,
class T2>
300 inline bool SafeConvert(T1 from, T2 &to)
303 if (from != to || (from > 0) != (to > 0))
308 inline size_t BitsToBytes(
size_t bitCount)
310 return ((bitCount+7)/(8));
313 inline size_t BytesToWords(
size_t byteCount)
315 return ((byteCount+WORD_SIZE-1)/WORD_SIZE);
318 inline size_t BitsToWords(
size_t bitCount)
320 return ((bitCount+WORD_BITS-1)/(WORD_BITS));
323 inline size_t BitsToDwords(
size_t bitCount)
325 return ((bitCount+2*WORD_BITS-1)/(2*WORD_BITS));
328 CRYPTOPP_DLL
void CRYPTOPP_API xorbuf(byte *buf,
const byte *mask,
size_t count);
329 CRYPTOPP_DLL
void CRYPTOPP_API xorbuf(byte *output,
const byte *input,
const byte *mask,
size_t count);
331 CRYPTOPP_DLL
bool CRYPTOPP_API VerifyBufsEqual(
const byte *buf1,
const byte *buf2,
size_t count);
334 inline bool IsPowerOf2(
const T &n)
336 return n > 0 && (n & (n-1)) == 0;
339 template <
class T1,
class T2>
340 inline T2 ModPowerOf2(
const T1 &a,
const T2 &b)
342 assert(IsPowerOf2(b));
343 return T2(a) & (b-1);
346 template <
class T1,
class T2>
347 inline T1 RoundDownToMultipleOf(
const T1 &n,
const T2 &m)
350 return n - ModPowerOf2(n, m);
355 template <
class T1,
class T2>
356 inline T1 RoundUpToMultipleOf(
const T1 &n,
const T2 &m)
360 return RoundDownToMultipleOf(n+m-1, m);
364 inline unsigned int GetAlignmentOf(T *dummy=NULL)
366 #ifdef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
371 #if (_MSC_VER >= 1300)
373 #elif defined(__GNUC__)
374 return __alignof__(T);
375 #elif CRYPTOPP_BOOL_SLOW_WORD64
376 return UnsignedMin(4U,
sizeof(T));
382 inline bool IsAlignedOn(
const void *p,
unsigned int alignment)
384 return alignment==1 || (IsPowerOf2(alignment) ? ModPowerOf2((
size_t)p, alignment) == 0 : (size_t)p % alignment == 0);
388 inline bool IsAligned(
const void *p, T *dummy=NULL)
390 return IsAlignedOn(p, GetAlignmentOf<T>());
393 #ifdef IS_LITTLE_ENDIAN
399 inline ByteOrder GetNativeByteOrder()
401 return NativeByteOrder::ToEnum();
404 inline bool NativeByteOrderIs(ByteOrder order)
406 return order == GetNativeByteOrder();
410 std::string IntToString(T a,
unsigned int base = 10)
424 result = char((digit < 10 ?
'0' : (
'a' - 10)) + digit) + result;
428 result =
"-" + result;
432 template <
class T1,
class T2>
433 inline T1 SaturatingSubtract(
const T1 &a,
const T2 &b)
435 return T1((a > b) ? (a - b) : 0);
439 inline CipherDir GetCipherDir(
const T &obj)
441 return obj.IsForwardTransformation() ? ENCRYPTION : DECRYPTION;
444 CRYPTOPP_DLL
void CRYPTOPP_API CallNewHandler();
446 inline void IncrementCounterByOne(byte *inout,
unsigned int s)
448 for (
int i=s-1, carry=1; i>=0 && carry; i--)
452 inline void IncrementCounterByOne(byte *output,
const byte *input,
unsigned int s)
455 for (i=s-1, carry=1; i>=0 && carry; i--)
456 carry = ((output[i] = input[i]+1) == 0);
457 memcpy_s(output, s, input, i+1);
461 inline void ConditionalSwap(
bool c, T &a, T &b)
469 inline void ConditionalSwapPointers(
bool c, T &a, T &b)
471 ptrdiff_t t = c * (a - b);
479 void SecureWipeBuffer(T *buf,
size_t n)
482 volatile T *p = buf+n;
487 #if (_MSC_VER >= 1400 || defined(__GNUC__)) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86)
489 template<>
inline void SecureWipeBuffer(byte *buf,
size_t n)
491 volatile byte *p = buf;
493 asm volatile(
"rep stosb" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
495 __stosb((byte *)(
size_t)p, 0, n);
499 template<>
inline void SecureWipeBuffer(word16 *buf,
size_t n)
501 volatile word16 *p = buf;
503 asm volatile(
"rep stosw" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
505 __stosw((word16 *)(
size_t)p, 0, n);
509 template<>
inline void SecureWipeBuffer(word32 *buf,
size_t n)
511 volatile word32 *p = buf;
513 asm volatile(
"rep stosl" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
515 __stosd((
unsigned long *)(
size_t)p, 0, n);
519 template<>
inline void SecureWipeBuffer(word64 *buf,
size_t n)
521 #if CRYPTOPP_BOOL_X64
522 volatile word64 *p = buf;
524 asm volatile(
"rep stosq" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
526 __stosq((word64 *)(
size_t)p, 0, n);
529 SecureWipeBuffer((word32 *)buf, 2*n);
533 #endif // #if (_MSC_VER >= 1400 || defined(__GNUC__)) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86)
536 inline void SecureWipeArray(T *buf,
size_t n)
538 if (
sizeof(T) % 8 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word64>() == 0)
539 SecureWipeBuffer((word64 *)buf, n * (
sizeof(T)/8));
540 else if (
sizeof(T) % 4 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word32>() == 0)
541 SecureWipeBuffer((word32 *)buf, n * (
sizeof(T)/4));
542 else if (
sizeof(T) % 2 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word16>() == 0)
543 SecureWipeBuffer((word16 *)buf, n * (
sizeof(T)/2));
545 SecureWipeBuffer((byte *)buf, n *
sizeof(T));
549 static std::string StringNarrow(
const wchar_t *str,
bool throwOnError =
true)
552 #pragma warning(push)
553 #pragma warning(disable: 4996) // 'wcstombs': This function or variable may be unsafe.
555 size_t size = wcstombs(NULL, str, 0);
556 if (size ==
size_t(0)-1)
561 return std::string();
563 std::string result(size, 0);
564 wcstombs(&result[0], str, size);
571 #if CRYPTOPP_BOOL_ALIGN16_ENABLED
572 CRYPTOPP_DLL
void * CRYPTOPP_API AlignedAllocate(
size_t size);
573 CRYPTOPP_DLL
void CRYPTOPP_API AlignedDeallocate(
void *p);
576 CRYPTOPP_DLL
void * CRYPTOPP_API UnalignedAllocate(
size_t size);
577 CRYPTOPP_DLL
void CRYPTOPP_API UnalignedDeallocate(
void *p);
581 template <
class T>
inline T rotlFixed(T x,
unsigned int y)
583 assert(y <
sizeof(T)*8);
584 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
587 template <
class T>
inline T rotrFixed(T x,
unsigned int y)
589 assert(y <
sizeof(T)*8);
590 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
593 template <
class T>
inline T rotlVariable(T x,
unsigned int y)
595 assert(y <
sizeof(T)*8);
596 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
599 template <
class T>
inline T rotrVariable(T x,
unsigned int y)
601 assert(y <
sizeof(T)*8);
602 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
605 template <
class T>
inline T rotlMod(T x,
unsigned int y)
608 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
611 template <
class T>
inline T rotrMod(T x,
unsigned int y)
614 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
619 template<>
inline word32 rotlFixed<word32>(word32 x,
unsigned int y)
621 assert(y < 8*
sizeof(x));
622 return y ? _lrotl(x, y) : x;
625 template<>
inline word32 rotrFixed<word32>(word32 x,
unsigned int y)
627 assert(y < 8*
sizeof(x));
628 return y ? _lrotr(x, y) : x;
631 template<>
inline word32 rotlVariable<word32>(word32 x,
unsigned int y)
633 assert(y < 8*
sizeof(x));
637 template<>
inline word32 rotrVariable<word32>(word32 x,
unsigned int y)
639 assert(y < 8*
sizeof(x));
643 template<>
inline word32 rotlMod<word32>(word32 x,
unsigned int y)
648 template<>
inline word32 rotrMod<word32>(word32 x,
unsigned int y)
653 #endif // #ifdef _MSC_VER
655 #if _MSC_VER >= 1300 && !defined(__INTEL_COMPILER)
658 template<>
inline word64 rotlFixed<word64>(word64 x,
unsigned int y)
660 assert(y < 8*
sizeof(x));
661 return y ? _rotl64(x, y) : x;
664 template<>
inline word64 rotrFixed<word64>(word64 x,
unsigned int y)
666 assert(y < 8*
sizeof(x));
667 return y ? _rotr64(x, y) : x;
670 template<>
inline word64 rotlVariable<word64>(word64 x,
unsigned int y)
672 assert(y < 8*
sizeof(x));
673 return _rotl64(x, y);
676 template<>
inline word64 rotrVariable<word64>(word64 x,
unsigned int y)
678 assert(y < 8*
sizeof(x));
679 return _rotr64(x, y);
682 template<>
inline word64 rotlMod<word64>(word64 x,
unsigned int y)
684 return _rotl64(x, y);
687 template<>
inline word64 rotrMod<word64>(word64 x,
unsigned int y)
689 return _rotr64(x, y);
692 #endif // #if _MSC_VER >= 1310
694 #if _MSC_VER >= 1400 && !defined(__INTEL_COMPILER)
697 template<>
inline word16 rotlFixed<word16>(word16 x,
unsigned int y)
699 assert(y < 8*
sizeof(x));
700 return y ? _rotl16(x, y) : x;
703 template<>
inline word16 rotrFixed<word16>(word16 x,
unsigned int y)
705 assert(y < 8*
sizeof(x));
706 return y ? _rotr16(x, y) : x;
709 template<>
inline word16 rotlVariable<word16>(word16 x,
unsigned int y)
711 assert(y < 8*
sizeof(x));
712 return _rotl16(x, y);
715 template<>
inline word16 rotrVariable<word16>(word16 x,
unsigned int y)
717 assert(y < 8*
sizeof(x));
718 return _rotr16(x, y);
721 template<>
inline word16 rotlMod<word16>(word16 x,
unsigned int y)
723 return _rotl16(x, y);
726 template<>
inline word16 rotrMod<word16>(word16 x,
unsigned int y)
728 return _rotr16(x, y);
731 template<>
inline byte rotlFixed<byte>(byte x,
unsigned int y)
733 assert(y < 8*
sizeof(x));
734 return y ? _rotl8(x, y) : x;
737 template<>
inline byte rotrFixed<byte>(byte x,
unsigned int y)
739 assert(y < 8*
sizeof(x));
740 return y ? _rotr8(x, y) : x;
743 template<>
inline byte rotlVariable<byte>(byte x,
unsigned int y)
745 assert(y < 8*
sizeof(x));
749 template<>
inline byte rotrVariable<byte>(byte x,
unsigned int y)
751 assert(y < 8*
sizeof(x));
755 template<>
inline byte rotlMod<byte>(byte x,
unsigned int y)
760 template<>
inline byte rotrMod<byte>(byte x,
unsigned int y)
765 #endif // #if _MSC_VER >= 1400
767 #if (defined(__MWERKS__) && TARGET_CPU_PPC)
769 template<>
inline word32 rotlFixed<word32>(word32 x,
unsigned int y)
772 return y ? __rlwinm(x,y,0,31) : x;
775 template<>
inline word32 rotrFixed<word32>(word32 x,
unsigned int y)
778 return y ? __rlwinm(x,32-y,0,31) : x;
781 template<>
inline word32 rotlVariable<word32>(word32 x,
unsigned int y)
784 return (__rlwnm(x,y,0,31));
787 template<>
inline word32 rotrVariable<word32>(word32 x,
unsigned int y)
790 return (__rlwnm(x,32-y,0,31));
793 template<>
inline word32 rotlMod<word32>(word32 x,
unsigned int y)
795 return (__rlwnm(x,y,0,31));
798 template<>
inline word32 rotrMod<word32>(word32 x,
unsigned int y)
800 return (__rlwnm(x,32-y,0,31));
803 #endif // #if (defined(__MWERKS__) && TARGET_CPU_PPC)
808 inline unsigned int GetByte(ByteOrder order, T value,
unsigned int index)
810 if (order == LITTLE_ENDIAN_ORDER)
811 return GETBYTE(value, index);
813 return GETBYTE(value,
sizeof(T)-index-1);
816 inline byte ByteReverse(byte value)
821 inline word16 ByteReverse(word16 value)
823 #ifdef CRYPTOPP_BYTESWAP_AVAILABLE
824 return bswap_16(value);
825 #elif defined(_MSC_VER) && _MSC_VER >= 1300
826 return _byteswap_ushort(value);
828 return rotlFixed(value, 8U);
832 inline word32 ByteReverse(word32 value)
834 #if defined(__GNUC__) && defined(CRYPTOPP_X86_ASM_AVAILABLE)
835 __asm__ (
"bswap %0" :
"=r" (value) :
"0" (value));
837 #elif defined(CRYPTOPP_BYTESWAP_AVAILABLE)
838 return bswap_32(value);
839 #elif defined(__MWERKS__) && TARGET_CPU_PPC
840 return (word32)__lwbrx(&value,0);
841 #elif _MSC_VER >= 1400 || (_MSC_VER >= 1300 && !defined(_DLL))
842 return _byteswap_ulong(value);
843 #elif CRYPTOPP_FAST_ROTATE(32)
845 return (rotrFixed(value, 8U) & 0xff00ff00) | (rotlFixed(value, 8U) & 0x00ff00ff);
848 value = ((value & 0xFF00FF00) >> 8) | ((value & 0x00FF00FF) << 8);
849 return rotlFixed(value, 16U);
853 inline word64 ByteReverse(word64 value)
855 #if defined(__GNUC__) && defined(CRYPTOPP_X86_ASM_AVAILABLE) && defined(__x86_64__)
856 __asm__ (
"bswap %0" :
"=r" (value) :
"0" (value));
858 #elif defined(CRYPTOPP_BYTESWAP_AVAILABLE)
859 return bswap_64(value);
860 #elif defined(_MSC_VER) && _MSC_VER >= 1300
861 return _byteswap_uint64(value);
862 #elif CRYPTOPP_BOOL_SLOW_WORD64
863 return (word64(ByteReverse(word32(value))) << 32) | ByteReverse(word32(value>>32));
865 value = ((value & W64LIT(0xFF00FF00FF00FF00)) >> 8) | ((value & W64LIT(0x00FF00FF00FF00FF)) << 8);
866 value = ((value & W64LIT(0xFFFF0000FFFF0000)) >> 16) | ((value & W64LIT(0x0000FFFF0000FFFF)) << 16);
867 return rotlFixed(value, 32U);
871 inline byte BitReverse(byte value)
873 value = ((value & 0xAA) >> 1) | ((value & 0x55) << 1);
874 value = ((value & 0xCC) >> 2) | ((value & 0x33) << 2);
875 return rotlFixed(value, 4U);
878 inline word16 BitReverse(word16 value)
880 value = ((value & 0xAAAA) >> 1) | ((value & 0x5555) << 1);
881 value = ((value & 0xCCCC) >> 2) | ((value & 0x3333) << 2);
882 value = ((value & 0xF0F0) >> 4) | ((value & 0x0F0F) << 4);
883 return ByteReverse(value);
886 inline word32 BitReverse(word32 value)
888 value = ((value & 0xAAAAAAAA) >> 1) | ((value & 0x55555555) << 1);
889 value = ((value & 0xCCCCCCCC) >> 2) | ((value & 0x33333333) << 2);
890 value = ((value & 0xF0F0F0F0) >> 4) | ((value & 0x0F0F0F0F) << 4);
891 return ByteReverse(value);
894 inline word64 BitReverse(word64 value)
896 #if CRYPTOPP_BOOL_SLOW_WORD64
897 return (word64(BitReverse(word32(value))) << 32) | BitReverse(word32(value>>32));
899 value = ((value & W64LIT(0xAAAAAAAAAAAAAAAA)) >> 1) | ((value & W64LIT(0x5555555555555555)) << 1);
900 value = ((value & W64LIT(0xCCCCCCCCCCCCCCCC)) >> 2) | ((value & W64LIT(0x3333333333333333)) << 2);
901 value = ((value & W64LIT(0xF0F0F0F0F0F0F0F0)) >> 4) | ((value & W64LIT(0x0F0F0F0F0F0F0F0F)) << 4);
902 return ByteReverse(value);
907 inline T BitReverse(T value)
910 return (T)BitReverse((byte)value);
911 else if (
sizeof(T) == 2)
912 return (T)BitReverse((word16)value);
913 else if (
sizeof(T) == 4)
914 return (T)BitReverse((word32)value);
917 assert(
sizeof(T) == 8);
918 return (T)BitReverse((word64)value);
923 inline T ConditionalByteReverse(ByteOrder order, T value)
925 return NativeByteOrderIs(order) ? value : ByteReverse(value);
929 void ByteReverse(T *out,
const T *in,
size_t byteCount)
931 assert(byteCount %
sizeof(T) == 0);
932 size_t count = byteCount/
sizeof(T);
933 for (
size_t i=0; i<count; i++)
934 out[i] = ByteReverse(in[i]);
938 inline void ConditionalByteReverse(ByteOrder order, T *out,
const T *in,
size_t byteCount)
940 if (!NativeByteOrderIs(order))
941 ByteReverse(out, in, byteCount);
943 memcpy_s(out, byteCount, in, byteCount);
947 inline void GetUserKey(ByteOrder order, T *out,
size_t outlen,
const byte *in,
size_t inlen)
949 const size_t U =
sizeof(T);
950 assert(inlen <= outlen*U);
951 memcpy_s(out, outlen*U, in, inlen);
952 memset_z((byte *)out+inlen, 0, outlen*U-inlen);
953 ConditionalByteReverse(order, out, out, RoundUpToMultipleOf(inlen, U));
956 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
957 inline byte UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const byte *)
962 inline word16 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word16 *)
964 return (order == BIG_ENDIAN_ORDER)
965 ? block[1] | (block[0] << 8)
966 : block[0] | (block[1] << 8);
969 inline word32 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word32 *)
971 return (order == BIG_ENDIAN_ORDER)
972 ? word32(block[3]) | (word32(block[2]) << 8) | (word32(block[1]) << 16) | (word32(block[0]) << 24)
973 : word32(block[0]) | (word32(block[1]) << 8) | (word32(block[2]) << 16) | (word32(block[3]) << 24);
976 inline word64 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word64 *)
978 return (order == BIG_ENDIAN_ORDER)
981 (word64(block[6]) << 8) |
982 (word64(block[5]) << 16) |
983 (word64(block[4]) << 24) |
984 (word64(block[3]) << 32) |
985 (word64(block[2]) << 40) |
986 (word64(block[1]) << 48) |
987 (word64(block[0]) << 56))
990 (word64(block[1]) << 8) |
991 (word64(block[2]) << 16) |
992 (word64(block[3]) << 24) |
993 (word64(block[4]) << 32) |
994 (word64(block[5]) << 40) |
995 (word64(block[6]) << 48) |
996 (word64(block[7]) << 56));
999 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, byte value,
const byte *xorBlock)
1001 block[0] = xorBlock ? (value ^ xorBlock[0]) : value;
1004 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word16 value,
const byte *xorBlock)
1006 if (order == BIG_ENDIAN_ORDER)
1010 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1011 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1015 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1016 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1023 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1024 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1028 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1029 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1034 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word32 value,
const byte *xorBlock)
1036 if (order == BIG_ENDIAN_ORDER)
1040 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1041 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1042 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1043 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1047 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1048 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1049 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1050 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1057 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1058 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1059 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1060 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1064 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1065 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1066 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1067 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1072 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word64 value,
const byte *xorBlock)
1074 if (order == BIG_ENDIAN_ORDER)
1078 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1079 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1080 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1081 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1082 block[4] = xorBlock[4] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1083 block[5] = xorBlock[5] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1084 block[6] = xorBlock[6] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1085 block[7] = xorBlock[7] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1089 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1090 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1091 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1092 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1093 block[4] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1094 block[5] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1095 block[6] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1096 block[7] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1103 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1104 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1105 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1106 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1107 block[4] = xorBlock[4] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1108 block[5] = xorBlock[5] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1109 block[6] = xorBlock[6] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1110 block[7] = xorBlock[7] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1114 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1115 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1116 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1117 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1118 block[4] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1119 block[5] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1120 block[6] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1121 block[7] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1125 #endif // #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1128 inline T GetWord(
bool assumeAligned, ByteOrder order,
const byte *block)
1130 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1132 return UnalignedGetWordNonTemplate(order, block, (T*)NULL);
1133 assert(IsAligned<T>(block));
1135 return ConditionalByteReverse(order, *reinterpret_cast<const T *>(block));
1139 inline void GetWord(
bool assumeAligned, ByteOrder order, T &result,
const byte *block)
1141 result = GetWord<T>(assumeAligned, order, block);
1145 inline void PutWord(
bool assumeAligned, ByteOrder order, byte *block, T value,
const byte *xorBlock = NULL)
1147 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1149 return UnalignedPutWordNonTemplate(order, block, value, xorBlock);
1150 assert(IsAligned<T>(block));
1151 assert(IsAligned<T>(xorBlock));
1153 *
reinterpret_cast<T *
>(block) = ConditionalByteReverse(order, value) ^ (xorBlock ? *
reinterpret_cast<const T *
>(xorBlock) : 0);
1156 template <
class T,
class B,
bool A=false>
1161 : m_block((
const byte *)block) {}
1166 CRYPTOPP_COMPILE_ASSERT(
sizeof(U) >=
sizeof(T));
1167 x = GetWord<T>(A, B::ToEnum(), m_block);
1168 m_block +=
sizeof(T);
1173 const byte *m_block;
1176 template <
class T,
class B,
bool A=false>
1180 PutBlock(
const void *xorBlock,
void *block)
1181 : m_xorBlock((
const byte *)xorBlock), m_block((byte *)block) {}
1186 PutWord(A, B::ToEnum(), m_block, (T)x, m_xorBlock);
1187 m_block +=
sizeof(T);
1189 m_xorBlock +=
sizeof(T);
1194 const byte *m_xorBlock;
1198 template <
class T,
class B,
bool GA=false,
bool PA=false>
1207 std::string WordToString(T value, ByteOrder order = BIG_ENDIAN_ORDER)
1209 if (!NativeByteOrderIs(order))
1210 value = ByteReverse(value);
1212 return std::string((
char *)&value,
sizeof(value));
1216 T StringToWord(
const std::string &str, ByteOrder order = BIG_ENDIAN_ORDER)
1219 memcpy_s(&value,
sizeof(value), str.data(), UnsignedMin(str.size(),
sizeof(value)));
1220 return NativeByteOrderIs(order) ? value : ByteReverse(value);
1230 static inline T RightShift(T value,
unsigned int bits)
1236 static inline T LeftShift(T value,
unsigned int bits)
1245 static inline T RightShift(T value,
unsigned int bits)
1247 return value >> bits;
1251 static inline T LeftShift(T value,
unsigned int bits)
1253 return value << bits;
1257 template <
unsigned int bits,
class T>
1258 inline T SafeRightShift(T value)
1263 template <
unsigned int bits,
class T>
1264 inline T SafeLeftShift(T value)
1271 #define CRYPTOPP_BLOCK_1(n, t, s) t* m_##n() {return (t *)(m_aggregate+0);} size_t SS1() {return sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1272 #define CRYPTOPP_BLOCK_2(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS1());} size_t SS2() {return SS1()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1273 #define CRYPTOPP_BLOCK_3(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS2());} size_t SS3() {return SS2()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1274 #define CRYPTOPP_BLOCK_4(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS3());} size_t SS4() {return SS3()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1275 #define CRYPTOPP_BLOCK_5(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS4());} size_t SS5() {return SS4()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1276 #define CRYPTOPP_BLOCK_6(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS5());} size_t SS6() {return SS5()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1277 #define CRYPTOPP_BLOCK_7(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS6());} size_t SS7() {return SS6()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1278 #define CRYPTOPP_BLOCK_8(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS7());} size_t SS8() {return SS7()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1279 #define CRYPTOPP_BLOCKS_END(i) size_t SST() {return SS##i();} void AllocateBlocks() {m_aggregate.New(SST());} AlignedSecByteBlock m_aggregate;
exception thrown when an invalid argument is detected
CipherDir
used to specify a direction for a cipher to operate in (encrypt or decrypt)