1 /* crypto/sha/sha_locl.h */
2 /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
5 * This package is an SSL implementation written
6 * by Eric Young (eay@cryptsoft.com).
7 * The implementation was written so as to conform with Netscapes SSL.
9 * This library is free for commercial and non-commercial use as long as
10 * the following conditions are aheared to. The following conditions
11 * apply to all code found in this distribution, be it the RC4, RSA,
12 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
13 * included with this distribution is covered by the same copyright terms
14 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
16 * Copyright remains Eric Young's, and as such any Copyright notices in
17 * the code are not to be removed.
18 * If this package is used in a product, Eric Young should be given attribution
19 * as the author of the parts of the library used.
20 * This can be in the form of a textual message at program startup or
21 * in documentation (online or textual) provided with the package.
23 * Redistribution and use in source and binary forms, with or without
24 * modification, are permitted provided that the following conditions
26 * 1. Redistributions of source code must retain the copyright
27 * notice, this list of conditions and the following disclaimer.
28 * 2. Redistributions in binary form must reproduce the above copyright
29 * notice, this list of conditions and the following disclaimer in the
30 * documentation and/or other materials provided with the distribution.
31 * 3. All advertising materials mentioning features or use of this software
32 * must display the following acknowledgement:
33 * "This product includes cryptographic software written by
34 * Eric Young (eay@cryptsoft.com)"
35 * The word 'cryptographic' can be left out if the rouines from the library
36 * being used are not cryptographic related :-).
37 * 4. If you include any Windows specific code (or a derivative thereof) from
38 * the apps directory (application code) you must include an acknowledgement:
39 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
41 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
42 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
43 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
44 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
45 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
46 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
47 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
48 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
49 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
50 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
53 * The licence and distribution terms for any publically available version or
54 * derivative of this code cannot be changed. i.e. this code cannot simply be
55 * copied and put under another distribution licence
56 * [including the GNU Public Licence.]
62 #include <openssl/opensslconf.h>
63 #include <openssl/sha.h>
66 #define SHA_LONG_LOG2 2 /* default to 32 bits */
69 #define DATA_ORDER_IS_BIG_ENDIAN
71 #define HASH_LONG SHA_LONG
72 #define HASH_LONG_LOG2 SHA_LONG_LOG2
73 #define HASH_CTX SHA_CTX
74 #define HASH_CBLOCK SHA_CBLOCK
75 #define HASH_LBLOCK SHA_LBLOCK
76 #define HASH_MAKE_STRING(c,s) do { \
78 ll=(c)->h0; HOST_l2c(ll,(s)); \
79 ll=(c)->h1; HOST_l2c(ll,(s)); \
80 ll=(c)->h2; HOST_l2c(ll,(s)); \
81 ll=(c)->h3; HOST_l2c(ll,(s)); \
82 ll=(c)->h4; HOST_l2c(ll,(s)); \
87 # define HASH_UPDATE SHA_Update
88 # define HASH_TRANSFORM SHA_Transform
89 # define HASH_FINAL SHA_Final
90 # define HASH_INIT SHA_Init
91 # define HASH_BLOCK_HOST_ORDER sha_block_host_order
92 # define HASH_BLOCK_DATA_ORDER sha_block_data_order
93 # define Xupdate(a,ix,ia,ib,ic,id) (ix=(a)=(ia^ib^ic^id))
95 void sha_block_host_order (SHA_CTX *c, const void *p,size_t num);
96 void sha_block_data_order (SHA_CTX *c, const void *p,size_t num);
100 # define HASH_UPDATE SHA1_Update
101 # define HASH_TRANSFORM SHA1_Transform
102 # define HASH_FINAL SHA1_Final
103 # define HASH_INIT SHA1_Init
104 # define HASH_BLOCK_HOST_ORDER sha1_block_host_order
105 # define HASH_BLOCK_DATA_ORDER sha1_block_data_order
106 # if defined(__MWERKS__) && defined(__MC68K__)
107 /* Metrowerks for Motorola fails otherwise:-( <appro@fy.chalmers.se> */
108 # define Xupdate(a,ix,ia,ib,ic,id) do { (a)=(ia^ib^ic^id); \
109 ix=(a)=ROTATE((a),1); \
112 # define Xupdate(a,ix,ia,ib,ic,id) ( (a)=(ia^ib^ic^id), \
113 ix=(a)=ROTATE((a),1) \
118 # if defined(__i386) || defined(__i386__) || defined(_M_IX86) || defined(__INTEL__) \
119 || defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
120 # define sha1_block_host_order sha1_block_asm_host_order
121 # define DONT_IMPLEMENT_BLOCK_HOST_ORDER
122 # define sha1_block_data_order sha1_block_asm_data_order
123 # define DONT_IMPLEMENT_BLOCK_DATA_ORDER
124 # define HASH_BLOCK_DATA_ORDER_ALIGNED sha1_block_asm_data_order
125 # elif defined(__ia64) || defined(__ia64__) || defined(_M_IA64)
126 # define sha1_block_host_order sha1_block_asm_host_order
127 # define DONT_IMPLEMENT_BLOCK_HOST_ORDER
128 # define sha1_block_data_order sha1_block_asm_data_order
129 # define DONT_IMPLEMENT_BLOCK_DATA_ORDER
132 void sha1_block_host_order (SHA_CTX *c, const void *p,size_t num);
133 void sha1_block_data_order (SHA_CTX *c, const void *p,size_t num);
136 # error "Either SHA_0 or SHA_1 must be defined."
139 #include "md32_common.h"
141 #define INIT_DATA_h0 0x67452301UL
142 #define INIT_DATA_h1 0xefcdab89UL
143 #define INIT_DATA_h2 0x98badcfeUL
144 #define INIT_DATA_h3 0x10325476UL
145 #define INIT_DATA_h4 0xc3d2e1f0UL
147 int HASH_INIT (SHA_CTX *c)
160 #define K_00_19 0x5a827999UL
161 #define K_20_39 0x6ed9eba1UL
162 #define K_40_59 0x8f1bbcdcUL
163 #define K_60_79 0xca62c1d6UL
165 /* As pointed out by Wei Dai <weidai@eskimo.com>, F() below can be
166 * simplified to the code in F_00_19. Wei attributes these optimisations
167 * to Peter Gutmann's SHS code, and he attributes it to Rich Schroeppel.
168 * #define F(x,y,z) (((x) & (y)) | ((~(x)) & (z)))
169 * I've just become aware of another tweak to be made, again from Wei Dai,
170 * in F_40_59, (x&a)|(y&a) -> (x|y)&a
172 #define F_00_19(b,c,d) ((((c) ^ (d)) & (b)) ^ (d))
173 #define F_20_39(b,c,d) ((b) ^ (c) ^ (d))
174 #define F_40_59(b,c,d) (((b) & (c)) | (((b)|(c)) & (d)))
175 #define F_60_79(b,c,d) F_20_39(b,c,d)
177 #ifndef OPENSSL_SMALL_FOOTPRINT
179 #define BODY_00_15(i,a,b,c,d,e,f,xi) \
180 (f)=xi+(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
183 #define BODY_16_19(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
184 Xupdate(f,xi,xa,xb,xc,xd); \
185 (f)+=(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
188 #define BODY_20_31(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
189 Xupdate(f,xi,xa,xb,xc,xd); \
190 (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
193 #define BODY_32_39(i,a,b,c,d,e,f,xa,xb,xc,xd) \
194 Xupdate(f,xa,xa,xb,xc,xd); \
195 (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
198 #define BODY_40_59(i,a,b,c,d,e,f,xa,xb,xc,xd) \
199 Xupdate(f,xa,xa,xb,xc,xd); \
200 (f)+=(e)+K_40_59+ROTATE((a),5)+F_40_59((b),(c),(d)); \
203 #define BODY_60_79(i,a,b,c,d,e,f,xa,xb,xc,xd) \
204 Xupdate(f,xa,xa,xb,xc,xd); \
205 (f)=xa+(e)+K_60_79+ROTATE((a),5)+F_60_79((b),(c),(d)); \
213 * Originally X was an array. As it's automatic it's natural
214 * to expect RISC compiler to accomodate at least part of it in
215 * the register bank, isn't it? Unfortunately not all compilers
216 * "find" this expectation reasonable:-( On order to make such
217 * compilers generate better code I replace X[] with a bunch of
218 * X0, X1, etc. See the function body below...
219 * <appro@fy.chalmers.se>
224 * However! Some compilers (most notably HP C) get overwhelmed by
225 * that many local variables so that we have to have the way to
226 * fall down to the original behavior.
231 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
232 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
235 register unsigned MD32_REG_T A,B,C,D,E,T;
237 unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
238 XX8, XX9,XX10,XX11,XX12,XX13,XX14,XX15;
251 BODY_00_15( 0,A,B,C,D,E,T,W[ 0]);
252 BODY_00_15( 1,T,A,B,C,D,E,W[ 1]);
253 BODY_00_15( 2,E,T,A,B,C,D,W[ 2]);
254 BODY_00_15( 3,D,E,T,A,B,C,W[ 3]);
255 BODY_00_15( 4,C,D,E,T,A,B,W[ 4]);
256 BODY_00_15( 5,B,C,D,E,T,A,W[ 5]);
257 BODY_00_15( 6,A,B,C,D,E,T,W[ 6]);
258 BODY_00_15( 7,T,A,B,C,D,E,W[ 7]);
259 BODY_00_15( 8,E,T,A,B,C,D,W[ 8]);
260 BODY_00_15( 9,D,E,T,A,B,C,W[ 9]);
261 BODY_00_15(10,C,D,E,T,A,B,W[10]);
262 BODY_00_15(11,B,C,D,E,T,A,W[11]);
263 BODY_00_15(12,A,B,C,D,E,T,W[12]);
264 BODY_00_15(13,T,A,B,C,D,E,W[13]);
265 BODY_00_15(14,E,T,A,B,C,D,W[14]);
266 BODY_00_15(15,D,E,T,A,B,C,W[15]);
268 BODY_16_19(16,C,D,E,T,A,B,X( 0),W[ 0],W[ 2],W[ 8],W[13]);
269 BODY_16_19(17,B,C,D,E,T,A,X( 1),W[ 1],W[ 3],W[ 9],W[14]);
270 BODY_16_19(18,A,B,C,D,E,T,X( 2),W[ 2],W[ 4],W[10],W[15]);
271 BODY_16_19(19,T,A,B,C,D,E,X( 3),W[ 3],W[ 5],W[11],X( 0));
273 BODY_20_31(20,E,T,A,B,C,D,X( 4),W[ 4],W[ 6],W[12],X( 1));
274 BODY_20_31(21,D,E,T,A,B,C,X( 5),W[ 5],W[ 7],W[13],X( 2));
275 BODY_20_31(22,C,D,E,T,A,B,X( 6),W[ 6],W[ 8],W[14],X( 3));
276 BODY_20_31(23,B,C,D,E,T,A,X( 7),W[ 7],W[ 9],W[15],X( 4));
277 BODY_20_31(24,A,B,C,D,E,T,X( 8),W[ 8],W[10],X( 0),X( 5));
278 BODY_20_31(25,T,A,B,C,D,E,X( 9),W[ 9],W[11],X( 1),X( 6));
279 BODY_20_31(26,E,T,A,B,C,D,X(10),W[10],W[12],X( 2),X( 7));
280 BODY_20_31(27,D,E,T,A,B,C,X(11),W[11],W[13],X( 3),X( 8));
281 BODY_20_31(28,C,D,E,T,A,B,X(12),W[12],W[14],X( 4),X( 9));
282 BODY_20_31(29,B,C,D,E,T,A,X(13),W[13],W[15],X( 5),X(10));
283 BODY_20_31(30,A,B,C,D,E,T,X(14),W[14],X( 0),X( 6),X(11));
284 BODY_20_31(31,T,A,B,C,D,E,X(15),W[15],X( 1),X( 7),X(12));
286 BODY_32_39(32,E,T,A,B,C,D,X( 0),X( 2),X( 8),X(13));
287 BODY_32_39(33,D,E,T,A,B,C,X( 1),X( 3),X( 9),X(14));
288 BODY_32_39(34,C,D,E,T,A,B,X( 2),X( 4),X(10),X(15));
289 BODY_32_39(35,B,C,D,E,T,A,X( 3),X( 5),X(11),X( 0));
290 BODY_32_39(36,A,B,C,D,E,T,X( 4),X( 6),X(12),X( 1));
291 BODY_32_39(37,T,A,B,C,D,E,X( 5),X( 7),X(13),X( 2));
292 BODY_32_39(38,E,T,A,B,C,D,X( 6),X( 8),X(14),X( 3));
293 BODY_32_39(39,D,E,T,A,B,C,X( 7),X( 9),X(15),X( 4));
295 BODY_40_59(40,C,D,E,T,A,B,X( 8),X(10),X( 0),X( 5));
296 BODY_40_59(41,B,C,D,E,T,A,X( 9),X(11),X( 1),X( 6));
297 BODY_40_59(42,A,B,C,D,E,T,X(10),X(12),X( 2),X( 7));
298 BODY_40_59(43,T,A,B,C,D,E,X(11),X(13),X( 3),X( 8));
299 BODY_40_59(44,E,T,A,B,C,D,X(12),X(14),X( 4),X( 9));
300 BODY_40_59(45,D,E,T,A,B,C,X(13),X(15),X( 5),X(10));
301 BODY_40_59(46,C,D,E,T,A,B,X(14),X( 0),X( 6),X(11));
302 BODY_40_59(47,B,C,D,E,T,A,X(15),X( 1),X( 7),X(12));
303 BODY_40_59(48,A,B,C,D,E,T,X( 0),X( 2),X( 8),X(13));
304 BODY_40_59(49,T,A,B,C,D,E,X( 1),X( 3),X( 9),X(14));
305 BODY_40_59(50,E,T,A,B,C,D,X( 2),X( 4),X(10),X(15));
306 BODY_40_59(51,D,E,T,A,B,C,X( 3),X( 5),X(11),X( 0));
307 BODY_40_59(52,C,D,E,T,A,B,X( 4),X( 6),X(12),X( 1));
308 BODY_40_59(53,B,C,D,E,T,A,X( 5),X( 7),X(13),X( 2));
309 BODY_40_59(54,A,B,C,D,E,T,X( 6),X( 8),X(14),X( 3));
310 BODY_40_59(55,T,A,B,C,D,E,X( 7),X( 9),X(15),X( 4));
311 BODY_40_59(56,E,T,A,B,C,D,X( 8),X(10),X( 0),X( 5));
312 BODY_40_59(57,D,E,T,A,B,C,X( 9),X(11),X( 1),X( 6));
313 BODY_40_59(58,C,D,E,T,A,B,X(10),X(12),X( 2),X( 7));
314 BODY_40_59(59,B,C,D,E,T,A,X(11),X(13),X( 3),X( 8));
316 BODY_60_79(60,A,B,C,D,E,T,X(12),X(14),X( 4),X( 9));
317 BODY_60_79(61,T,A,B,C,D,E,X(13),X(15),X( 5),X(10));
318 BODY_60_79(62,E,T,A,B,C,D,X(14),X( 0),X( 6),X(11));
319 BODY_60_79(63,D,E,T,A,B,C,X(15),X( 1),X( 7),X(12));
320 BODY_60_79(64,C,D,E,T,A,B,X( 0),X( 2),X( 8),X(13));
321 BODY_60_79(65,B,C,D,E,T,A,X( 1),X( 3),X( 9),X(14));
322 BODY_60_79(66,A,B,C,D,E,T,X( 2),X( 4),X(10),X(15));
323 BODY_60_79(67,T,A,B,C,D,E,X( 3),X( 5),X(11),X( 0));
324 BODY_60_79(68,E,T,A,B,C,D,X( 4),X( 6),X(12),X( 1));
325 BODY_60_79(69,D,E,T,A,B,C,X( 5),X( 7),X(13),X( 2));
326 BODY_60_79(70,C,D,E,T,A,B,X( 6),X( 8),X(14),X( 3));
327 BODY_60_79(71,B,C,D,E,T,A,X( 7),X( 9),X(15),X( 4));
328 BODY_60_79(72,A,B,C,D,E,T,X( 8),X(10),X( 0),X( 5));
329 BODY_60_79(73,T,A,B,C,D,E,X( 9),X(11),X( 1),X( 6));
330 BODY_60_79(74,E,T,A,B,C,D,X(10),X(12),X( 2),X( 7));
331 BODY_60_79(75,D,E,T,A,B,C,X(11),X(13),X( 3),X( 8));
332 BODY_60_79(76,C,D,E,T,A,B,X(12),X(14),X( 4),X( 9));
333 BODY_60_79(77,B,C,D,E,T,A,X(13),X(15),X( 5),X(10));
334 BODY_60_79(78,A,B,C,D,E,T,X(14),X( 0),X( 6),X(11));
335 BODY_60_79(79,T,A,B,C,D,E,X(15),X( 1),X( 7),X(12));
337 c->h0=(c->h0+E)&0xffffffffL;
338 c->h1=(c->h1+T)&0xffffffffL;
339 c->h2=(c->h2+A)&0xffffffffL;
340 c->h3=(c->h3+B)&0xffffffffL;
341 c->h4=(c->h4+C)&0xffffffffL;
343 if (--num == 0) break;
356 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
357 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
359 const unsigned char *data=p;
360 register unsigned MD32_REG_T A,B,C,D,E,T,l;
362 unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
363 XX8, XX9,XX10,XX11,XX12,XX13,XX14,XX15;
377 HOST_c2l(data,l); X( 0)=l; HOST_c2l(data,l); X( 1)=l;
378 BODY_00_15( 0,A,B,C,D,E,T,X( 0)); HOST_c2l(data,l); X( 2)=l;
379 BODY_00_15( 1,T,A,B,C,D,E,X( 1)); HOST_c2l(data,l); X( 3)=l;
380 BODY_00_15( 2,E,T,A,B,C,D,X( 2)); HOST_c2l(data,l); X( 4)=l;
381 BODY_00_15( 3,D,E,T,A,B,C,X( 3)); HOST_c2l(data,l); X( 5)=l;
382 BODY_00_15( 4,C,D,E,T,A,B,X( 4)); HOST_c2l(data,l); X( 6)=l;
383 BODY_00_15( 5,B,C,D,E,T,A,X( 5)); HOST_c2l(data,l); X( 7)=l;
384 BODY_00_15( 6,A,B,C,D,E,T,X( 6)); HOST_c2l(data,l); X( 8)=l;
385 BODY_00_15( 7,T,A,B,C,D,E,X( 7)); HOST_c2l(data,l); X( 9)=l;
386 BODY_00_15( 8,E,T,A,B,C,D,X( 8)); HOST_c2l(data,l); X(10)=l;
387 BODY_00_15( 9,D,E,T,A,B,C,X( 9)); HOST_c2l(data,l); X(11)=l;
388 BODY_00_15(10,C,D,E,T,A,B,X(10)); HOST_c2l(data,l); X(12)=l;
389 BODY_00_15(11,B,C,D,E,T,A,X(11)); HOST_c2l(data,l); X(13)=l;
390 BODY_00_15(12,A,B,C,D,E,T,X(12)); HOST_c2l(data,l); X(14)=l;
391 BODY_00_15(13,T,A,B,C,D,E,X(13)); HOST_c2l(data,l); X(15)=l;
392 BODY_00_15(14,E,T,A,B,C,D,X(14));
393 BODY_00_15(15,D,E,T,A,B,C,X(15));
395 BODY_16_19(16,C,D,E,T,A,B,X( 0),X( 0),X( 2),X( 8),X(13));
396 BODY_16_19(17,B,C,D,E,T,A,X( 1),X( 1),X( 3),X( 9),X(14));
397 BODY_16_19(18,A,B,C,D,E,T,X( 2),X( 2),X( 4),X(10),X(15));
398 BODY_16_19(19,T,A,B,C,D,E,X( 3),X( 3),X( 5),X(11),X( 0));
400 BODY_20_31(20,E,T,A,B,C,D,X( 4),X( 4),X( 6),X(12),X( 1));
401 BODY_20_31(21,D,E,T,A,B,C,X( 5),X( 5),X( 7),X(13),X( 2));
402 BODY_20_31(22,C,D,E,T,A,B,X( 6),X( 6),X( 8),X(14),X( 3));
403 BODY_20_31(23,B,C,D,E,T,A,X( 7),X( 7),X( 9),X(15),X( 4));
404 BODY_20_31(24,A,B,C,D,E,T,X( 8),X( 8),X(10),X( 0),X( 5));
405 BODY_20_31(25,T,A,B,C,D,E,X( 9),X( 9),X(11),X( 1),X( 6));
406 BODY_20_31(26,E,T,A,B,C,D,X(10),X(10),X(12),X( 2),X( 7));
407 BODY_20_31(27,D,E,T,A,B,C,X(11),X(11),X(13),X( 3),X( 8));
408 BODY_20_31(28,C,D,E,T,A,B,X(12),X(12),X(14),X( 4),X( 9));
409 BODY_20_31(29,B,C,D,E,T,A,X(13),X(13),X(15),X( 5),X(10));
410 BODY_20_31(30,A,B,C,D,E,T,X(14),X(14),X( 0),X( 6),X(11));
411 BODY_20_31(31,T,A,B,C,D,E,X(15),X(15),X( 1),X( 7),X(12));
413 BODY_32_39(32,E,T,A,B,C,D,X( 0),X( 2),X( 8),X(13));
414 BODY_32_39(33,D,E,T,A,B,C,X( 1),X( 3),X( 9),X(14));
415 BODY_32_39(34,C,D,E,T,A,B,X( 2),X( 4),X(10),X(15));
416 BODY_32_39(35,B,C,D,E,T,A,X( 3),X( 5),X(11),X( 0));
417 BODY_32_39(36,A,B,C,D,E,T,X( 4),X( 6),X(12),X( 1));
418 BODY_32_39(37,T,A,B,C,D,E,X( 5),X( 7),X(13),X( 2));
419 BODY_32_39(38,E,T,A,B,C,D,X( 6),X( 8),X(14),X( 3));
420 BODY_32_39(39,D,E,T,A,B,C,X( 7),X( 9),X(15),X( 4));
422 BODY_40_59(40,C,D,E,T,A,B,X( 8),X(10),X( 0),X( 5));
423 BODY_40_59(41,B,C,D,E,T,A,X( 9),X(11),X( 1),X( 6));
424 BODY_40_59(42,A,B,C,D,E,T,X(10),X(12),X( 2),X( 7));
425 BODY_40_59(43,T,A,B,C,D,E,X(11),X(13),X( 3),X( 8));
426 BODY_40_59(44,E,T,A,B,C,D,X(12),X(14),X( 4),X( 9));
427 BODY_40_59(45,D,E,T,A,B,C,X(13),X(15),X( 5),X(10));
428 BODY_40_59(46,C,D,E,T,A,B,X(14),X( 0),X( 6),X(11));
429 BODY_40_59(47,B,C,D,E,T,A,X(15),X( 1),X( 7),X(12));
430 BODY_40_59(48,A,B,C,D,E,T,X( 0),X( 2),X( 8),X(13));
431 BODY_40_59(49,T,A,B,C,D,E,X( 1),X( 3),X( 9),X(14));
432 BODY_40_59(50,E,T,A,B,C,D,X( 2),X( 4),X(10),X(15));
433 BODY_40_59(51,D,E,T,A,B,C,X( 3),X( 5),X(11),X( 0));
434 BODY_40_59(52,C,D,E,T,A,B,X( 4),X( 6),X(12),X( 1));
435 BODY_40_59(53,B,C,D,E,T,A,X( 5),X( 7),X(13),X( 2));
436 BODY_40_59(54,A,B,C,D,E,T,X( 6),X( 8),X(14),X( 3));
437 BODY_40_59(55,T,A,B,C,D,E,X( 7),X( 9),X(15),X( 4));
438 BODY_40_59(56,E,T,A,B,C,D,X( 8),X(10),X( 0),X( 5));
439 BODY_40_59(57,D,E,T,A,B,C,X( 9),X(11),X( 1),X( 6));
440 BODY_40_59(58,C,D,E,T,A,B,X(10),X(12),X( 2),X( 7));
441 BODY_40_59(59,B,C,D,E,T,A,X(11),X(13),X( 3),X( 8));
443 BODY_60_79(60,A,B,C,D,E,T,X(12),X(14),X( 4),X( 9));
444 BODY_60_79(61,T,A,B,C,D,E,X(13),X(15),X( 5),X(10));
445 BODY_60_79(62,E,T,A,B,C,D,X(14),X( 0),X( 6),X(11));
446 BODY_60_79(63,D,E,T,A,B,C,X(15),X( 1),X( 7),X(12));
447 BODY_60_79(64,C,D,E,T,A,B,X( 0),X( 2),X( 8),X(13));
448 BODY_60_79(65,B,C,D,E,T,A,X( 1),X( 3),X( 9),X(14));
449 BODY_60_79(66,A,B,C,D,E,T,X( 2),X( 4),X(10),X(15));
450 BODY_60_79(67,T,A,B,C,D,E,X( 3),X( 5),X(11),X( 0));
451 BODY_60_79(68,E,T,A,B,C,D,X( 4),X( 6),X(12),X( 1));
452 BODY_60_79(69,D,E,T,A,B,C,X( 5),X( 7),X(13),X( 2));
453 BODY_60_79(70,C,D,E,T,A,B,X( 6),X( 8),X(14),X( 3));
454 BODY_60_79(71,B,C,D,E,T,A,X( 7),X( 9),X(15),X( 4));
455 BODY_60_79(72,A,B,C,D,E,T,X( 8),X(10),X( 0),X( 5));
456 BODY_60_79(73,T,A,B,C,D,E,X( 9),X(11),X( 1),X( 6));
457 BODY_60_79(74,E,T,A,B,C,D,X(10),X(12),X( 2),X( 7));
458 BODY_60_79(75,D,E,T,A,B,C,X(11),X(13),X( 3),X( 8));
459 BODY_60_79(76,C,D,E,T,A,B,X(12),X(14),X( 4),X( 9));
460 BODY_60_79(77,B,C,D,E,T,A,X(13),X(15),X( 5),X(10));
461 BODY_60_79(78,A,B,C,D,E,T,X(14),X( 0),X( 6),X(11));
462 BODY_60_79(79,T,A,B,C,D,E,X(15),X( 1),X( 7),X(12));
464 c->h0=(c->h0+E)&0xffffffffL;
465 c->h1=(c->h1+T)&0xffffffffL;
466 c->h2=(c->h2+A)&0xffffffffL;
467 c->h3=(c->h3+B)&0xffffffffL;
468 c->h4=(c->h4+C)&0xffffffffL;
470 if (--num == 0) break;
482 #else /* OPENSSL_SMALL_FOOTPRINT */
484 #define BODY_00_15(xi) do { \
485 T=E+K_00_19+F_00_19(B,C,D); \
486 E=D, D=C, C=ROTATE(B,30), B=A; \
487 A=ROTATE(A,5)+T+xi; } while(0)
489 #define BODY_16_19(xa,xb,xc,xd) do { \
490 Xupdate(T,xa,xa,xb,xc,xd); \
491 T+=E+K_00_19+F_00_19(B,C,D); \
492 E=D, D=C, C=ROTATE(B,30), B=A; \
493 A=ROTATE(A,5)+T; } while(0)
495 #define BODY_20_39(xa,xb,xc,xd) do { \
496 Xupdate(T,xa,xa,xb,xc,xd); \
497 T+=E+K_20_39+F_20_39(B,C,D); \
498 E=D, D=C, C=ROTATE(B,30), B=A; \
499 A=ROTATE(A,5)+T; } while(0)
501 #define BODY_40_59(xa,xb,xc,xd) do { \
502 Xupdate(T,xa,xa,xb,xc,xd); \
503 T+=E+K_40_59+F_40_59(B,C,D); \
504 E=D, D=C, C=ROTATE(B,30), B=A; \
505 A=ROTATE(A,5)+T; } while(0)
507 #define BODY_60_79(xa,xb,xc,xd) do { \
508 Xupdate(T,xa,xa,xb,xc,xd); \
509 T=E+K_60_79+F_60_79(B,C,D); \
510 E=D, D=C, C=ROTATE(B,30), B=A; \
511 A=ROTATE(A,5)+T+xa; } while(0)
513 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
514 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
517 register unsigned MD32_REG_T A,B,C,D,E,T;
530 { X[i]=W[i]; BODY_00_15(X[i]); }
532 { BODY_16_19(X[i], X[i+2], X[i+8], X[(i+13)&15]); }
534 { BODY_20_39(X[i&15], X[(i+2)&15], X[(i+8)&15],X[(i+13)&15]); }
536 { BODY_40_59(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); }
538 { BODY_60_79(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); }
540 c->h0=(c->h0+A)&0xffffffffL;
541 c->h1=(c->h1+B)&0xffffffffL;
542 c->h2=(c->h2+C)&0xffffffffL;
543 c->h3=(c->h3+D)&0xffffffffL;
544 c->h4=(c->h4+E)&0xffffffffL;
546 if (--num == 0) break;
559 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
560 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
562 const unsigned char *data=p;
563 register unsigned MD32_REG_T A,B,C,D,E,T,l;
576 { HOST_c2l(data,l); X[i]=l; BODY_00_15(X[i]); }
578 { BODY_16_19(X[i], X[i+2], X[i+8], X[(i+13)&15]); }
580 { BODY_20_39(X[i&15], X[(i+2)&15], X[(i+8)&15],X[(i+13)&15]); }
582 { BODY_40_59(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); }
584 { BODY_60_79(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); }
586 c->h0=(c->h0+A)&0xffffffffL;
587 c->h1=(c->h1+B)&0xffffffffL;
588 c->h2=(c->h2+C)&0xffffffffL;
589 c->h3=(c->h3+D)&0xffffffffL;
590 c->h4=(c->h4+E)&0xffffffffL;
592 if (--num == 0) break;