X-Git-Url: https://git.librecmc.org/?a=blobdiff_plain;f=crypto%2Fmd32_common.h;h=a5b838442b398eb32ac7b901736fe50c850844de;hb=ec1258dd446e39649f846ee870721ca4dfe56cff;hp=4270862d68be7e713d2decab1aef5b380de6465d;hpb=dbd1e384828d002a040ebb8486b0a13444fa1602;p=oweals%2Fopenssl.git diff --git a/crypto/md32_common.h b/crypto/md32_common.h index 4270862d68..a5b838442b 100644 --- a/crypto/md32_common.h +++ b/crypto/md32_common.h @@ -94,6 +94,8 @@ * in original (data) byte order, implemented externally (it * actually is optional if data and host are of the same * "endianess"). + * HASH_MAKE_STRING + * macro convering context variables to an ASCII hash string. * * Optional macros: * @@ -178,8 +180,17 @@ #undef ROTATE #ifndef PEDANTIC # if defined(_MSC_VER) -# define ROTATE(a,n) _lrotl(a,n) -# elif defined(__GNUC__) && __GNUC__>=2 +# define ROTATE(a,n) _lrotl(a,n) +# elif defined(__MWERKS__) +# if defined(__POWERPC__) +# define ROTATE(a,n) __rlwinm(a,n,0,31) +# elif defined(__MC68K__) + /* Motorola specific tweak. */ +# define ROTATE(a,n) ( n<24 ? __rol(a,n) : __ror(a,32-n) ) +# else +# define ROTATE(a,n) __rol(a,n) +# endif +# elif defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) /* * Some GNU C inline assembler templates. Note that these are * rotates by *constant* number of bits! But that's exactly @@ -211,7 +222,7 @@ * Engage compiler specific "fetch in reverse byte order" * intrinsic function if available. */ -# if defined(__GNUC__) && __GNUC__>=2 +# if defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) /* some GNU C inline assembler templates by */ # if defined(__i386) && !defined(I386_ONLY) # define BE_FETCH32(a) ({ register unsigned int l=(a);\ @@ -315,7 +326,7 @@ # endif #endif -#if !defined(HASH_BLOCK_DATA_ORDER_ALIGNED) && HASH_BLOCK_DATA_ORDER_ALIGNED!=1 +#if !defined(HASH_BLOCK_DATA_ORDER_ALIGNED) #ifndef HASH_BLOCK_DATA_ORDER #error "HASH_BLOCK_DATA_ORDER must be defined!" #endif @@ -461,13 +472,14 @@ void HASH_UPDATE (HASH_CTX *c, const unsigned char *data, unsigned long len) sw=len/HASH_CBLOCK; if (sw > 0) { -#if defined(HASH_BLOCK_DATA_ORDER_ALIGNED) && HASH_BLOCK_DATA_ORDER_ALIGNED!=1 +#if defined(HASH_BLOCK_DATA_ORDER_ALIGNED) /* * Note that HASH_BLOCK_DATA_ORDER_ALIGNED gets defined * only if sizeof(HASH_LONG)==4. */ if ((((unsigned long)data)%4) == 0) { + /* data is properly aligned so that we can cast it: */ HASH_BLOCK_DATA_ORDER_ALIGNED (c,(HASH_LONG *)data,sw); sw*=HASH_CBLOCK; data+=sw; @@ -486,7 +498,7 @@ void HASH_UPDATE (HASH_CTX *c, const unsigned char *data, unsigned long len) #endif #if defined(HASH_BLOCK_DATA_ORDER) { - HASH_BLOCK_DATA_ORDER(c,(const unsigned char *)data,sw); + HASH_BLOCK_DATA_ORDER(c,data,sw); sw*=HASH_CBLOCK; data+=sw; len-=sw; @@ -510,10 +522,11 @@ void HASH_UPDATE (HASH_CTX *c, const unsigned char *data, unsigned long len) } -void HASH_TRANSFORM (HASH_CTX *c, unsigned char *data) +void HASH_TRANSFORM (HASH_CTX *c, const unsigned char *data) { -#if defined(HASH_BLOCK_DATA_ORDER_ALIGNED) && HASH_BLOCK_DATA_ORDER_ALIGNED!=1 +#if defined(HASH_BLOCK_DATA_ORDER_ALIGNED) if ((((unsigned long)data)%4) == 0) + /* data is properly aligned so that we can cast it: */ HASH_BLOCK_DATA_ORDER_ALIGNED (c,(HASH_LONG *)data,1); else #if !defined(HASH_BLOCK_DATA_ORDER) @@ -524,7 +537,7 @@ void HASH_TRANSFORM (HASH_CTX *c, unsigned char *data) #endif #endif #if defined(HASH_BLOCK_DATA_ORDER) - HASH_BLOCK_DATA_ORDER (c,(const unsigned char *)data,1); + HASH_BLOCK_DATA_ORDER (c,data,1); #endif } @@ -579,10 +592,11 @@ void HASH_FINAL (unsigned char *md, HASH_CTX *c) #endif HASH_BLOCK_HOST_ORDER (c,p,1); - l=c->A; HOST_l2c(l,md); - l=c->B; HOST_l2c(l,md); - l=c->C; HOST_l2c(l,md); - l=c->D; HOST_l2c(l,md); +#ifndef HASH_MAKE_STRING +#error "HASH_MAKE_STRING must be defined!" +#else + HASH_MAKE_STRING(c,md); +#endif c->num=0; /* clear stuff, HASH_BLOCK may be leaving some stuff on the stack