Unified - adapt the generation of chacha assembler to use GENERATE
[oweals/openssl.git] / crypto / chacha / asm / chacha-c64xplus.pl
1 #!/usr/bin/env perl
2 #
3 # ====================================================================
4 # Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
5 # project. The module is, however, dual licensed under OpenSSL and
6 # CRYPTOGAMS licenses depending on where you obtain it. For further
7 # details see http://www.openssl.org/~appro/cryptogams/.
8 # ====================================================================
9 #
10 # ChaCha20 for C64x+.
11 #
12 # October 2015
13 #
14 # Performance is 3.54 cycles per processed byte, which is ~4.3 times
15 # faster than code generated by TI compiler. Compiler also disables
16 # interrupts for some reason, thus making interrupt response time
17 # dependent on input length. This module on the other hand is free
18 # from such limiation.
19
20 $output=pop;
21 open STDOUT,">$output";
22
23 ($OUT,$INP,$LEN,$KEYB,$COUNTERA)=("A4","B4","A6","B6","A8");
24 ($KEYA,$COUNTERB,$STEP)=("A7","B7","A3");
25
26 @X=  ("A16","B16","A17","B17","A18","B18","A19","B19",
27       "A20","B20","A21","B21","A22","B22","A23","B23");
28 @Y=  ("A24","B24","A25","B25","A26","B26","A27","B27",
29       "A28","B28","A29","B29","A30","B30","A31","B31");
30 @DAT=("A6", "A7", "B6", "B7", "A8", "A9", "B8", "B9",
31       "A10","A11","B10","B11","A12","A13","B12","B13");
32
33 # yes, overlaps with @DAT, used only in 2x interleave code path...
34 @K2x=("A6", "B6", "A7", "B7", "A8", "B8", "A9", "B9",
35       "A10","B10","A11","B11","A2", "B2", "A13","B13");
36
37 $code.=<<___;
38         .text
39
40         .if     .ASSEMBLER_VERSION<7000000
41         .asg    0,__TI_EABI__
42         .endif
43         .if     __TI_EABI__
44         .asg    ChaCha20_ctr32,_ChaCha20_ctr32
45         .endif
46
47         .asg    B3,RA
48         .asg    A15,FP
49         .asg    B15,SP
50
51         .global _ChaCha20_ctr32
52         .align  32
53 _ChaCha20_ctr32:
54         .asmfunc        stack_usage(40+64)
55         MV      $LEN,A0                 ; reassign
56   [!A0] BNOP    RA                      ; no data
57 || [A0] STW     FP,*SP--(40+64)         ; save frame pointer and alloca(40+64)
58 || [A0] MV      SP,FP
59    [A0] STDW    B13:B12,*SP[4+8]        ; ABI says so
60 || [A0] MV      $KEYB,$KEYA
61 || [A0] MV      $COUNTERA,$COUNTERB
62    [A0] STDW    B11:B10,*SP[3+8]
63 || [A0] STDW    A13:A12,*FP[-3]
64    [A0] STDW    A11:A10,*FP[-4]
65 || [A0] MVK     128,$STEP               ; 2 * input block size
66
67    [A0] LDW     *${KEYA}[0],@Y[4]       ; load key
68 || [A0] LDW     *${KEYB}[1],@Y[5]
69 || [A0] MVK     0x00007865,@Y[0]        ; synthesize sigma
70 || [A0] MVK     0x0000646e,@Y[1]
71    [A0] LDW     *${KEYA}[2],@Y[6]
72 || [A0] LDW     *${KEYB}[3],@Y[7]
73 || [A0] MVKH    0x61700000,@Y[0]
74 || [A0] MVKH    0x33200000,@Y[1]
75         LDW     *${KEYA}[4],@Y[8]
76 ||      LDW     *${KEYB}[5],@Y[9]
77 ||      MVK     0x00002d32,@Y[2]
78 ||      MVK     0x00006574,@Y[3]
79         LDW     *${KEYA}[6],@Y[10]
80 ||      LDW     *${KEYB}[7],@Y[11]
81 ||      MVKH    0x79620000,@Y[2]
82 ||      MVKH    0x6b200000,@Y[3]
83         LDW     *${COUNTERA}[0],@Y[12]  ; load counter||nonce
84 ||      LDW     *${COUNTERB}[1],@Y[13]
85 ||      CMPLTU  A0,$STEP,A1             ; is length < 2*blocks?
86         LDW     *${COUNTERA}[2],@Y[14]
87 ||      LDW     *${COUNTERB}[3],@Y[15]
88 || [A1] BNOP    top1x?
89    [A1] MVK     64,$STEP                ; input block size
90 ||      MVK     10,B0                   ; inner loop counter
91
92         DMV     @Y[2],@Y[0],@X[2]:@X[0] ; copy block
93 ||      DMV     @Y[3],@Y[1],@X[3]:@X[1]
94 ||[!A1] STDW    @Y[2]:@Y[0],*FP[-12]    ; offload key material to stack
95 ||[!A1] STDW    @Y[3]:@Y[1],*SP[2]
96         DMV     @Y[6],@Y[4],@X[6]:@X[4]
97 ||      DMV     @Y[7],@Y[5],@X[7]:@X[5]
98 ||[!A1] STDW    @Y[6]:@Y[4],*FP[-10]
99 ||[!A1] STDW    @Y[7]:@Y[5],*SP[4]
100         DMV     @Y[10],@Y[8],@X[10]:@X[8]
101 ||      DMV     @Y[11],@Y[9],@X[11]:@X[9]
102 ||[!A1] STDW    @Y[10]:@Y[8],*FP[-8]
103 ||[!A1] STDW    @Y[11]:@Y[9],*SP[6]
104         DMV     @Y[14],@Y[12],@X[14]:@X[12]
105 ||      DMV     @Y[15],@Y[13],@X[15]:@X[13]
106 ||[!A1] MV      @Y[12],@K2x[12]         ; counter
107 ||[!A1] MV      @Y[13],@K2x[13]
108 ||[!A1] STW     @Y[14],*FP[-6*2]
109 ||[!A1] STW     @Y[15],*SP[8*2]
110 ___
111 {       ################################################################
112         # 2x interleave gives 50% performance improvement
113         #
114 my ($a0,$a1,$a2,$a3) = (0..3);
115 my ($b0,$b1,$b2,$b3) = (4..7);
116 my ($c0,$c1,$c2,$c3) = (8..11);
117 my ($d0,$d1,$d2,$d3) = (12..15);
118
119 $code.=<<___;
120 outer2x?:
121         ADD     @X[$b1],@X[$a1],@X[$a1]
122 ||      ADD     @X[$b2],@X[$a2],@X[$a2]
123 ||      ADD     @X[$b0],@X[$a0],@X[$a0]
124 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
125 ||       DMV    @Y[2],@Y[0],@K2x[2]:@K2x[0]
126 ||       DMV    @Y[3],@Y[1],@K2x[3]:@K2x[1]
127         XOR     @X[$a1],@X[$d1],@X[$d1]
128 ||      XOR     @X[$a2],@X[$d2],@X[$d2]
129 ||      XOR     @X[$a0],@X[$d0],@X[$d0]
130 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
131 ||       DMV    @Y[6],@Y[4],@K2x[6]:@K2x[4]
132 ||       DMV    @Y[7],@Y[5],@K2x[7]:@K2x[5]
133         SWAP2   @X[$d1],@X[$d1]         ; rotate by 16
134 ||      SWAP2   @X[$d2],@X[$d2]
135 ||      SWAP2   @X[$d0],@X[$d0]
136 ||      SWAP2   @X[$d3],@X[$d3]
137
138         ADD     @X[$d1],@X[$c1],@X[$c1]
139 ||      ADD     @X[$d2],@X[$c2],@X[$c2]
140 ||      ADD     @X[$d0],@X[$c0],@X[$c0]
141 ||      ADD     @X[$d3],@X[$c3],@X[$c3]
142 ||       DMV    @Y[10],@Y[8],@K2x[10]:@K2x[8]
143 ||       DMV    @Y[11],@Y[9],@K2x[11]:@K2x[9]
144         XOR     @X[$c1],@X[$b1],@X[$b1]
145 ||      XOR     @X[$c2],@X[$b2],@X[$b2]
146 ||      XOR     @X[$c0],@X[$b0],@X[$b0]
147 ||      XOR     @X[$c3],@X[$b3],@X[$b3]
148 ||       ADD    1,@Y[12],@Y[12]         ; adjust counter for 2nd block
149         ROTL    @X[$b1],12,@X[$b1]
150 ||      ROTL    @X[$b2],12,@X[$b2]
151 ||       MV     @Y[14],@K2x[14]
152 ||       MV     @Y[15],@K2x[15]
153 top2x?:
154         ROTL    @X[$b0],12,@X[$b0]
155 ||      ROTL    @X[$b3],12,@X[$b3]
156 ||       ADD    @Y[$b1],@Y[$a1],@Y[$a1]
157 ||       ADD    @Y[$b2],@Y[$a2],@Y[$a2]
158          ADD    @Y[$b0],@Y[$a0],@Y[$a0]
159 ||       ADD    @Y[$b3],@Y[$a3],@Y[$a3]
160
161 ||      ADD     @X[$b1],@X[$a1],@X[$a1]
162 ||      ADD     @X[$b2],@X[$a2],@X[$a2]
163 ||       XOR    @Y[$a1],@Y[$d1],@Y[$d1]
164 ||       XOR    @Y[$a2],@Y[$d2],@Y[$d2]
165          XOR    @Y[$a0],@Y[$d0],@Y[$d0]
166 ||       XOR    @Y[$a3],@Y[$d3],@Y[$d3]
167 ||      ADD     @X[$b0],@X[$a0],@X[$a0]
168 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
169 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
170 ||      XOR     @X[$a2],@X[$d2],@X[$d2]
171         XOR     @X[$a0],@X[$d0],@X[$d0]
172 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
173 ||      ROTL    @X[$d1],8,@X[$d1]
174 ||      ROTL    @X[$d2],8,@X[$d2]
175 ||       SWAP2  @Y[$d1],@Y[$d1]         ; rotate by 16
176 ||       SWAP2  @Y[$d2],@Y[$d2]
177 ||       SWAP2  @Y[$d0],@Y[$d0]
178 ||       SWAP2  @Y[$d3],@Y[$d3]
179         ROTL    @X[$d0],8,@X[$d0]
180 ||      ROTL    @X[$d3],8,@X[$d3]
181 ||       ADD    @Y[$d1],@Y[$c1],@Y[$c1]
182 ||       ADD    @Y[$d2],@Y[$c2],@Y[$c2]
183 ||       ADD    @Y[$d0],@Y[$c0],@Y[$c0]
184 ||       ADD    @Y[$d3],@Y[$c3],@Y[$c3]
185 ||      BNOP    middle2x1?              ; protect from interrupt
186
187         ADD     @X[$d1],@X[$c1],@X[$c1]
188 ||      ADD     @X[$d2],@X[$c2],@X[$c2]
189 ||       XOR    @Y[$c1],@Y[$b1],@Y[$b1]
190 ||       XOR    @Y[$c2],@Y[$b2],@Y[$b2]
191 ||       XOR    @Y[$c0],@Y[$b0],@Y[$b0]
192 ||       XOR    @Y[$c3],@Y[$b3],@Y[$b3]
193         ADD     @X[$d0],@X[$c0],@X[$c0]
194 ||      ADD     @X[$d3],@X[$c3],@X[$c3]
195 ||      XOR     @X[$c1],@X[$b1],@X[$b1]
196 ||      XOR     @X[$c2],@X[$b2],@X[$b2]
197 ||      ROTL    @X[$d1],0,@X[$d2]       ; moved to avoid cross-path stall
198 ||      ROTL    @X[$d2],0,@X[$d3]
199         XOR     @X[$c0],@X[$b0],@X[$b0]
200 ||      XOR     @X[$c3],@X[$b3],@X[$b3]
201 ||      MV      @X[$d0],@X[$d1]
202 ||      MV      @X[$d3],@X[$d0]
203 ||       ROTL   @Y[$b1],12,@Y[$b1]
204 ||       ROTL   @Y[$b2],12,@Y[$b2]
205         ROTL    @X[$b1],7,@X[$b0]       ; avoided cross-path stall
206 ||      ROTL    @X[$b2],7,@X[$b1]
207         ROTL    @X[$b0],7,@X[$b3]
208 ||      ROTL    @X[$b3],7,@X[$b2]
209 middle2x1?:
210
211          ROTL   @Y[$b0],12,@Y[$b0]
212 ||       ROTL   @Y[$b3],12,@Y[$b3]
213 ||      ADD     @X[$b0],@X[$a0],@X[$a0]
214 ||      ADD     @X[$b1],@X[$a1],@X[$a1]
215         ADD     @X[$b2],@X[$a2],@X[$a2]
216 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
217
218 ||       ADD    @Y[$b1],@Y[$a1],@Y[$a1]
219 ||       ADD    @Y[$b2],@Y[$a2],@Y[$a2]
220 ||      XOR     @X[$a0],@X[$d0],@X[$d0]
221 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
222         XOR     @X[$a2],@X[$d2],@X[$d2]
223 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
224 ||       ADD    @Y[$b0],@Y[$a0],@Y[$a0]
225 ||       ADD    @Y[$b3],@Y[$a3],@Y[$a3]
226 ||       XOR    @Y[$a1],@Y[$d1],@Y[$d1]
227 ||       XOR    @Y[$a2],@Y[$d2],@Y[$d2]
228          XOR    @Y[$a0],@Y[$d0],@Y[$d0]
229 ||       XOR    @Y[$a3],@Y[$d3],@Y[$d3]
230 ||       ROTL   @Y[$d1],8,@Y[$d1]
231 ||       ROTL   @Y[$d2],8,@Y[$d2]
232 ||      SWAP2   @X[$d0],@X[$d0]         ; rotate by 16
233 ||      SWAP2   @X[$d1],@X[$d1]
234 ||      SWAP2   @X[$d2],@X[$d2]
235 ||      SWAP2   @X[$d3],@X[$d3]
236          ROTL   @Y[$d0],8,@Y[$d0]
237 ||       ROTL   @Y[$d3],8,@Y[$d3]
238 ||      ADD     @X[$d0],@X[$c2],@X[$c2]
239 ||      ADD     @X[$d1],@X[$c3],@X[$c3]
240 ||      ADD     @X[$d2],@X[$c0],@X[$c0]
241 ||      ADD     @X[$d3],@X[$c1],@X[$c1]
242 ||      BNOP    middle2x2?              ; protect from interrupt
243
244          ADD    @Y[$d1],@Y[$c1],@Y[$c1]
245 ||       ADD    @Y[$d2],@Y[$c2],@Y[$c2]
246 ||      XOR     @X[$c2],@X[$b0],@X[$b0]
247 ||      XOR     @X[$c3],@X[$b1],@X[$b1]
248 ||      XOR     @X[$c0],@X[$b2],@X[$b2]
249 ||      XOR     @X[$c1],@X[$b3],@X[$b3]
250          ADD    @Y[$d0],@Y[$c0],@Y[$c0]
251 ||       ADD    @Y[$d3],@Y[$c3],@Y[$c3]
252 ||       XOR    @Y[$c1],@Y[$b1],@Y[$b1]
253 ||       XOR    @Y[$c2],@Y[$b2],@Y[$b2]
254 ||       ROTL   @Y[$d1],0,@Y[$d2]       ; moved to avoid cross-path stall
255 ||       ROTL   @Y[$d2],0,@Y[$d3]
256          XOR    @Y[$c0],@Y[$b0],@Y[$b0]
257 ||       XOR    @Y[$c3],@Y[$b3],@Y[$b3]
258 ||       MV     @Y[$d0],@Y[$d1]
259 ||       MV     @Y[$d3],@Y[$d0]
260 ||      ROTL    @X[$b0],12,@X[$b0]
261 ||      ROTL    @X[$b1],12,@X[$b1]
262          ROTL   @Y[$b1],7,@Y[$b0]       ; avoided cross-path stall
263 ||       ROTL   @Y[$b2],7,@Y[$b1]
264          ROTL   @Y[$b0],7,@Y[$b3]
265 ||       ROTL   @Y[$b3],7,@Y[$b2]
266 middle2x2?:
267
268         ROTL    @X[$b2],12,@X[$b2]
269 ||      ROTL    @X[$b3],12,@X[$b3]
270 ||       ADD    @Y[$b0],@Y[$a0],@Y[$a0]
271 ||       ADD    @Y[$b1],@Y[$a1],@Y[$a1]
272          ADD    @Y[$b2],@Y[$a2],@Y[$a2]
273 ||       ADD    @Y[$b3],@Y[$a3],@Y[$a3]
274
275 ||      ADD     @X[$b0],@X[$a0],@X[$a0]
276 ||      ADD     @X[$b1],@X[$a1],@X[$a1]
277 ||       XOR    @Y[$a0],@Y[$d0],@Y[$d0]
278 ||       XOR    @Y[$a1],@Y[$d1],@Y[$d1]
279          XOR    @Y[$a2],@Y[$d2],@Y[$d2]
280 ||       XOR    @Y[$a3],@Y[$d3],@Y[$d3]
281 ||      ADD     @X[$b2],@X[$a2],@X[$a2]
282 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
283 ||      XOR     @X[$a0],@X[$d0],@X[$d0]
284 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
285         XOR     @X[$a2],@X[$d2],@X[$d2]
286 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
287 ||      ROTL    @X[$d0],8,@X[$d0]
288 ||      ROTL    @X[$d1],8,@X[$d1]
289 ||       SWAP2  @Y[$d0],@Y[$d0]         ; rotate by 16
290 ||       SWAP2  @Y[$d1],@Y[$d1]
291 ||       SWAP2  @Y[$d2],@Y[$d2]
292 ||       SWAP2  @Y[$d3],@Y[$d3]
293         ROTL    @X[$d2],8,@X[$d2]
294 ||      ROTL    @X[$d3],8,@X[$d3]
295 ||       ADD    @Y[$d0],@Y[$c2],@Y[$c2]
296 ||       ADD    @Y[$d1],@Y[$c3],@Y[$c3]
297 ||       ADD    @Y[$d2],@Y[$c0],@Y[$c0]
298 ||       ADD    @Y[$d3],@Y[$c1],@Y[$c1]
299 ||      BNOP    bottom2x1?              ; protect from interrupt
300
301         ADD     @X[$d0],@X[$c2],@X[$c2]
302 ||      ADD     @X[$d1],@X[$c3],@X[$c3]
303 ||       XOR    @Y[$c2],@Y[$b0],@Y[$b0]
304 ||       XOR    @Y[$c3],@Y[$b1],@Y[$b1]
305 ||       XOR    @Y[$c0],@Y[$b2],@Y[$b2]
306 ||       XOR    @Y[$c1],@Y[$b3],@Y[$b3]
307         ADD     @X[$d2],@X[$c0],@X[$c0]
308 ||      ADD     @X[$d3],@X[$c1],@X[$c1]
309 ||      XOR     @X[$c2],@X[$b0],@X[$b0]
310 ||      XOR     @X[$c3],@X[$b1],@X[$b1]
311 ||      ROTL    @X[$d0],0,@X[$d3]       ; moved to avoid cross-path stall
312 ||      ROTL    @X[$d1],0,@X[$d0]
313         XOR     @X[$c0],@X[$b2],@X[$b2]
314 ||      XOR     @X[$c1],@X[$b3],@X[$b3]
315 ||      MV      @X[$d2],@X[$d1]
316 ||      MV      @X[$d3],@X[$d2]
317 ||       ROTL   @Y[$b0],12,@Y[$b0]
318 ||       ROTL   @Y[$b1],12,@Y[$b1]
319         ROTL    @X[$b0],7,@X[$b1]       ; avoided cross-path stall
320 ||      ROTL    @X[$b1],7,@X[$b2]
321         ROTL    @X[$b2],7,@X[$b3]
322 ||      ROTL    @X[$b3],7,@X[$b0]
323 || [B0] SUB     B0,1,B0                 ; decrement inner loop counter
324 bottom2x1?:
325
326          ROTL   @Y[$b2],12,@Y[$b2]
327 ||       ROTL   @Y[$b3],12,@Y[$b3]
328 || [B0] ADD     @X[$b1],@X[$a1],@X[$a1] ; modulo-scheduled
329 || [B0] ADD     @X[$b2],@X[$a2],@X[$a2]
330    [B0] ADD     @X[$b0],@X[$a0],@X[$a0]
331 || [B0] ADD     @X[$b3],@X[$a3],@X[$a3]
332
333 ||       ADD    @Y[$b0],@Y[$a0],@Y[$a0]
334 ||       ADD    @Y[$b1],@Y[$a1],@Y[$a1]
335 || [B0] XOR     @X[$a1],@X[$d1],@X[$d1]
336 || [B0] XOR     @X[$a2],@X[$d2],@X[$d2]
337    [B0] XOR     @X[$a0],@X[$d0],@X[$d0]
338 || [B0] XOR     @X[$a3],@X[$d3],@X[$d3]
339 ||       ADD    @Y[$b2],@Y[$a2],@Y[$a2]
340 ||       ADD    @Y[$b3],@Y[$a3],@Y[$a3]
341 ||       XOR    @Y[$a0],@Y[$d0],@Y[$d0]
342 ||       XOR    @Y[$a1],@Y[$d1],@Y[$d1]
343          XOR    @Y[$a2],@Y[$d2],@Y[$d2]
344 ||       XOR    @Y[$a3],@Y[$d3],@Y[$d3]
345 ||       ROTL   @Y[$d0],8,@Y[$d0]
346 ||       ROTL   @Y[$d1],8,@Y[$d1]
347 || [B0] SWAP2   @X[$d1],@X[$d1]         ; rotate by 16
348 || [B0] SWAP2   @X[$d2],@X[$d2]
349 || [B0] SWAP2   @X[$d0],@X[$d0]
350 || [B0] SWAP2   @X[$d3],@X[$d3]
351          ROTL   @Y[$d2],8,@Y[$d2]
352 ||       ROTL   @Y[$d3],8,@Y[$d3]
353 || [B0] ADD     @X[$d1],@X[$c1],@X[$c1]
354 || [B0] ADD     @X[$d2],@X[$c2],@X[$c2]
355 || [B0] ADD     @X[$d0],@X[$c0],@X[$c0]
356 || [B0] ADD     @X[$d3],@X[$c3],@X[$c3]
357 || [B0] BNOP    top2x?                  ; even protects from interrupt
358
359          ADD    @Y[$d0],@Y[$c2],@Y[$c2]
360 ||       ADD    @Y[$d1],@Y[$c3],@Y[$c3]
361 || [B0] XOR     @X[$c1],@X[$b1],@X[$b1]
362 || [B0] XOR     @X[$c2],@X[$b2],@X[$b2]
363 || [B0] XOR     @X[$c0],@X[$b0],@X[$b0]
364 || [B0] XOR     @X[$c3],@X[$b3],@X[$b3]
365          ADD    @Y[$d2],@Y[$c0],@Y[$c0]
366 ||       ADD    @Y[$d3],@Y[$c1],@Y[$c1]
367 ||       XOR    @Y[$c2],@Y[$b0],@Y[$b0]
368 ||       XOR    @Y[$c3],@Y[$b1],@Y[$b1]
369 ||       ROTL   @Y[$d0],0,@Y[$d3]       ; moved to avoid cross-path stall
370 ||       ROTL   @Y[$d1],0,@Y[$d0]
371          XOR    @Y[$c0],@Y[$b2],@Y[$b2]
372 ||       XOR    @Y[$c1],@Y[$b3],@Y[$b3]
373 ||       MV     @Y[$d2],@Y[$d1]
374 ||       MV     @Y[$d3],@Y[$d2]
375 || [B0] ROTL    @X[$b1],12,@X[$b1]
376 || [B0] ROTL    @X[$b2],12,@X[$b2]
377          ROTL   @Y[$b0],7,@Y[$b1]       ; avoided cross-path stall
378 ||       ROTL   @Y[$b1],7,@Y[$b2]
379          ROTL   @Y[$b2],7,@Y[$b3]
380 ||       ROTL   @Y[$b3],7,@Y[$b0]
381 bottom2x2?:
382 ___
383 }
384
385 $code.=<<___;
386         ADD     @K2x[0],@X[0],@X[0]     ; accumulate key material
387 ||      ADD     @K2x[1],@X[1],@X[1]
388 ||      ADD     @K2x[2],@X[2],@X[2]
389 ||      ADD     @K2x[3],@X[3],@X[3]
390          ADD    @K2x[0],@Y[0],@Y[0]
391 ||       ADD    @K2x[1],@Y[1],@Y[1]
392 ||       ADD    @K2x[2],@Y[2],@Y[2]
393 ||       ADD    @K2x[3],@Y[3],@Y[3]
394 ||      LDNDW   *${INP}++[8],@DAT[1]:@DAT[0]
395         ADD     @K2x[4],@X[4],@X[4]
396 ||      ADD     @K2x[5],@X[5],@X[5]
397 ||      ADD     @K2x[6],@X[6],@X[6]
398 ||      ADD     @K2x[7],@X[7],@X[7]
399 ||      LDNDW   *${INP}[-7],@DAT[3]:@DAT[2]
400          ADD    @K2x[4],@Y[4],@Y[4]
401 ||       ADD    @K2x[5],@Y[5],@Y[5]
402 ||       ADD    @K2x[6],@Y[6],@Y[6]
403 ||       ADD    @K2x[7],@Y[7],@Y[7]
404 ||      LDNDW   *${INP}[-6],@DAT[5]:@DAT[4]
405         ADD     @K2x[8],@X[8],@X[8]
406 ||      ADD     @K2x[9],@X[9],@X[9]
407 ||      ADD     @K2x[10],@X[10],@X[10]
408 ||      ADD     @K2x[11],@X[11],@X[11]
409 ||      LDNDW   *${INP}[-5],@DAT[7]:@DAT[6]
410          ADD    @K2x[8],@Y[8],@Y[8]
411 ||       ADD    @K2x[9],@Y[9],@Y[9]
412 ||       ADD    @K2x[10],@Y[10],@Y[10]
413 ||       ADD    @K2x[11],@Y[11],@Y[11]
414 ||      LDNDW   *${INP}[-4],@DAT[9]:@DAT[8]
415         ADD     @K2x[12],@X[12],@X[12]
416 ||      ADD     @K2x[13],@X[13],@X[13]
417 ||      ADD     @K2x[14],@X[14],@X[14]
418 ||      ADD     @K2x[15],@X[15],@X[15]
419 ||      LDNDW   *${INP}[-3],@DAT[11]:@DAT[10]
420          ADD    @K2x[12],@Y[12],@Y[12]
421 ||       ADD    @K2x[13],@Y[13],@Y[13]
422 ||       ADD    @K2x[14],@Y[14],@Y[14]
423 ||       ADD    @K2x[15],@Y[15],@Y[15]
424 ||      LDNDW   *${INP}[-2],@DAT[13]:@DAT[12]
425          ADD    1,@Y[12],@Y[12]         ; adjust counter for 2nd block
426 ||      ADD     2,@K2x[12],@K2x[12]     ; increment counter
427 ||      LDNDW   *${INP}[-1],@DAT[15]:@DAT[14]
428
429         .if     .BIG_ENDIAN
430         SWAP2   @X[0],@X[0]
431 ||      SWAP2   @X[1],@X[1]
432 ||      SWAP2   @X[2],@X[2]
433 ||      SWAP2   @X[3],@X[3]
434         SWAP2   @X[4],@X[4]
435 ||      SWAP2   @X[5],@X[5]
436 ||      SWAP2   @X[6],@X[6]
437 ||      SWAP2   @X[7],@X[7]
438         SWAP2   @X[8],@X[8]
439 ||      SWAP2   @X[9],@X[9]
440 ||      SWAP4   @X[0],@X[1]
441 ||      SWAP4   @X[1],@X[0]
442         SWAP2   @X[10],@X[10]
443 ||      SWAP2   @X[11],@X[11]
444 ||      SWAP4   @X[2],@X[3]
445 ||      SWAP4   @X[3],@X[2]
446         SWAP2   @X[12],@X[12]
447 ||      SWAP2   @X[13],@X[13]
448 ||      SWAP4   @X[4],@X[5]
449 ||      SWAP4   @X[5],@X[4]
450         SWAP2   @X[14],@X[14]
451 ||      SWAP2   @X[15],@X[15]
452 ||      SWAP4   @X[6],@X[7]
453 ||      SWAP4   @X[7],@X[6]
454         SWAP4   @X[8],@X[9]
455 ||      SWAP4   @X[9],@X[8]
456 ||       SWAP2  @Y[0],@Y[0]
457 ||       SWAP2  @Y[1],@Y[1]
458         SWAP4   @X[10],@X[11]
459 ||      SWAP4   @X[11],@X[10]
460 ||       SWAP2  @Y[2],@Y[2]
461 ||       SWAP2  @Y[3],@Y[3]
462         SWAP4   @X[12],@X[13]
463 ||      SWAP4   @X[13],@X[12]
464 ||       SWAP2  @Y[4],@Y[4]
465 ||       SWAP2  @Y[5],@Y[5]
466         SWAP4   @X[14],@X[15]
467 ||      SWAP4   @X[15],@X[14]
468 ||       SWAP2  @Y[6],@Y[6]
469 ||       SWAP2  @Y[7],@Y[7]
470          SWAP2  @Y[8],@Y[8]
471 ||       SWAP2  @Y[9],@Y[9]
472 ||       SWAP4  @Y[0],@Y[1]
473 ||       SWAP4  @Y[1],@Y[0]
474          SWAP2  @Y[10],@Y[10]
475 ||       SWAP2  @Y[11],@Y[11]
476 ||       SWAP4  @Y[2],@Y[3]
477 ||       SWAP4  @Y[3],@Y[2]
478          SWAP2  @Y[12],@Y[12]
479 ||       SWAP2  @Y[13],@Y[13]
480 ||       SWAP4  @Y[4],@Y[5]
481 ||       SWAP4  @Y[5],@Y[4]
482          SWAP2  @Y[14],@Y[14]
483 ||       SWAP2  @Y[15],@Y[15]
484 ||       SWAP4  @Y[6],@Y[7]
485 ||       SWAP4  @Y[7],@Y[6]
486          SWAP4  @Y[8],@Y[9]
487 ||       SWAP4  @Y[9],@Y[8]
488          SWAP4  @Y[10],@Y[11]
489 ||       SWAP4  @Y[11],@Y[10]
490          SWAP4  @Y[12],@Y[13]
491 ||       SWAP4  @Y[13],@Y[12]
492          SWAP4  @Y[14],@Y[15]
493 ||       SWAP4  @Y[15],@Y[14]
494         .endif
495
496         XOR     @DAT[0],@X[0],@X[0]     ; xor 1st block
497 ||      XOR     @DAT[3],@X[3],@X[3]
498 ||      XOR     @DAT[2],@X[2],@X[1]
499 ||      XOR     @DAT[1],@X[1],@X[2]
500 ||      LDNDW   *${INP}++[8],@DAT[1]:@DAT[0]
501         XOR     @DAT[4],@X[4],@X[4]
502 ||      XOR     @DAT[7],@X[7],@X[7]
503 ||      LDNDW   *${INP}[-7],@DAT[3]:@DAT[2]
504         XOR     @DAT[6],@X[6],@X[5]
505 ||      XOR     @DAT[5],@X[5],@X[6]
506 ||      LDNDW   *${INP}[-6],@DAT[5]:@DAT[4]
507         XOR     @DAT[8],@X[8],@X[8]
508 ||      XOR     @DAT[11],@X[11],@X[11]
509 ||      LDNDW   *${INP}[-5],@DAT[7]:@DAT[6]
510         XOR     @DAT[10],@X[10],@X[9]
511 ||      XOR     @DAT[9],@X[9],@X[10]
512 ||      LDNDW   *${INP}[-4],@DAT[9]:@DAT[8]
513         XOR     @DAT[12],@X[12],@X[12]
514 ||      XOR     @DAT[15],@X[15],@X[15]
515 ||      LDNDW   *${INP}[-3],@DAT[11]:@DAT[10]
516         XOR     @DAT[14],@X[14],@X[13]
517 ||      XOR     @DAT[13],@X[13],@X[14]
518 ||      LDNDW   *${INP}[-2],@DAT[13]:@DAT[12]
519    [A0] SUB     A0,$STEP,A0             ; SUB   A0,128,A0
520 ||      LDNDW   *${INP}[-1],@DAT[15]:@DAT[14]
521
522         XOR     @Y[0],@DAT[0],@DAT[0]   ; xor 2nd block
523 ||      XOR     @Y[1],@DAT[1],@DAT[1]
524 ||      STNDW   @X[2]:@X[0],*${OUT}++[8]
525         XOR     @Y[2],@DAT[2],@DAT[2]
526 ||      XOR     @Y[3],@DAT[3],@DAT[3]
527 ||      STNDW   @X[3]:@X[1],*${OUT}[-7]
528         XOR     @Y[4],@DAT[4],@DAT[4]
529 || [A0] LDDW    *FP[-12],@X[2]:@X[0]    ; re-load key material from stack
530 || [A0] LDDW    *SP[2],  @X[3]:@X[1]
531         XOR     @Y[5],@DAT[5],@DAT[5]
532 ||      STNDW   @X[6]:@X[4],*${OUT}[-6]
533         XOR     @Y[6],@DAT[6],@DAT[6]
534 ||      XOR     @Y[7],@DAT[7],@DAT[7]
535 ||      STNDW   @X[7]:@X[5],*${OUT}[-5]
536         XOR     @Y[8],@DAT[8],@DAT[8]
537 || [A0] LDDW    *FP[-10],@X[6]:@X[4]
538 || [A0] LDDW    *SP[4],  @X[7]:@X[5]
539         XOR     @Y[9],@DAT[9],@DAT[9]
540 ||      STNDW   @X[10]:@X[8],*${OUT}[-4]
541         XOR     @Y[10],@DAT[10],@DAT[10]
542 ||      XOR     @Y[11],@DAT[11],@DAT[11]
543 ||      STNDW   @X[11]:@X[9],*${OUT}[-3]
544         XOR     @Y[12],@DAT[12],@DAT[12]
545 || [A0] LDDW    *FP[-8], @X[10]:@X[8]
546 || [A0] LDDW    *SP[6],  @X[11]:@X[9]
547         XOR     @Y[13],@DAT[13],@DAT[13]
548 ||      STNDW   @X[14]:@X[12],*${OUT}[-2]
549         XOR     @Y[14],@DAT[14],@DAT[14]
550 ||      XOR     @Y[15],@DAT[15],@DAT[15]
551 ||      STNDW   @X[15]:@X[13],*${OUT}[-1]
552
553    [A0] MV      @K2x[12],@X[12]
554 || [A0] MV      @K2x[13],@X[13]
555 || [A0] LDW     *FP[-6*2], @X[14]
556 || [A0] LDW     *SP[8*2],  @X[15]
557
558    [A0] DMV     @X[2],@X[0],@Y[2]:@Y[0] ; duplicate key material
559 ||      STNDW   @DAT[1]:@DAT[0],*${OUT}++[8]
560    [A0] DMV     @X[3],@X[1],@Y[3]:@Y[1]
561 ||      STNDW   @DAT[3]:@DAT[2],*${OUT}[-7]
562    [A0] DMV     @X[6],@X[4],@Y[6]:@Y[4]
563 ||      STNDW   @DAT[5]:@DAT[4],*${OUT}[-6]
564 ||      CMPLTU  A0,$STEP,A1             ; is remaining length < 2*blocks?
565 ||[!A0] BNOP    epilogue?
566    [A0] DMV     @X[7],@X[5],@Y[7]:@Y[5]
567 ||      STNDW   @DAT[7]:@DAT[6],*${OUT}[-5]
568 ||[!A1] BNOP    outer2x?
569    [A0] DMV     @X[10],@X[8],@Y[10]:@Y[8]
570 ||      STNDW   @DAT[9]:@DAT[8],*${OUT}[-4]
571    [A0] DMV     @X[11],@X[9],@Y[11]:@Y[9]
572 ||      STNDW   @DAT[11]:@DAT[10],*${OUT}[-3]
573    [A0] DMV     @X[14],@X[12],@Y[14]:@Y[12]
574 ||      STNDW   @DAT[13]:@DAT[12],*${OUT}[-2]
575    [A0] DMV     @X[15],@X[13],@Y[15]:@Y[13]
576 ||      STNDW   @DAT[15]:@DAT[14],*${OUT}[-1]
577 ;;===== branch to epilogue? is taken here
578    [A1] MVK     64,$STEP
579 || [A0] MVK     10,B0                   ; inner loop counter
580 ;;===== branch to outer2x? is taken here
581 ___
582 {
583 my ($a0,$a1,$a2,$a3) = (0..3);
584 my ($b0,$b1,$b2,$b3) = (4..7);
585 my ($c0,$c1,$c2,$c3) = (8..11);
586 my ($d0,$d1,$d2,$d3) = (12..15);
587
588 $code.=<<___;
589 top1x?:
590         ADD     @X[$b1],@X[$a1],@X[$a1]
591 ||      ADD     @X[$b2],@X[$a2],@X[$a2]
592         ADD     @X[$b0],@X[$a0],@X[$a0]
593 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
594 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
595 ||      XOR     @X[$a2],@X[$d2],@X[$d2]
596         XOR     @X[$a0],@X[$d0],@X[$d0]
597 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
598 ||      SWAP2   @X[$d1],@X[$d1]         ; rotate by 16
599 ||      SWAP2   @X[$d2],@X[$d2]
600         SWAP2   @X[$d0],@X[$d0]
601 ||      SWAP2   @X[$d3],@X[$d3]
602
603 ||      ADD     @X[$d1],@X[$c1],@X[$c1]
604 ||      ADD     @X[$d2],@X[$c2],@X[$c2]
605         ADD     @X[$d0],@X[$c0],@X[$c0]
606 ||      ADD     @X[$d3],@X[$c3],@X[$c3]
607 ||      XOR     @X[$c1],@X[$b1],@X[$b1]
608 ||      XOR     @X[$c2],@X[$b2],@X[$b2]
609         XOR     @X[$c0],@X[$b0],@X[$b0]
610 ||      XOR     @X[$c3],@X[$b3],@X[$b3]
611 ||      ROTL    @X[$b1],12,@X[$b1]
612 ||      ROTL    @X[$b2],12,@X[$b2]
613         ROTL    @X[$b0],12,@X[$b0]
614 ||      ROTL    @X[$b3],12,@X[$b3]
615
616         ADD     @X[$b1],@X[$a1],@X[$a1]
617 ||      ADD     @X[$b2],@X[$a2],@X[$a2]
618         ADD     @X[$b0],@X[$a0],@X[$a0]
619 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
620 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
621 ||      XOR     @X[$a2],@X[$d2],@X[$d2]
622         XOR     @X[$a0],@X[$d0],@X[$d0]
623 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
624 ||      ROTL    @X[$d1],8,@X[$d1]
625 ||      ROTL    @X[$d2],8,@X[$d2]
626         ROTL    @X[$d0],8,@X[$d0]
627 ||      ROTL    @X[$d3],8,@X[$d3]
628 ||      BNOP    middle1x?               ; protect from interrupt
629
630         ADD     @X[$d1],@X[$c1],@X[$c1]
631 ||      ADD     @X[$d2],@X[$c2],@X[$c2]
632         ADD     @X[$d0],@X[$c0],@X[$c0]
633 ||      ADD     @X[$d3],@X[$c3],@X[$c3]
634 ||      XOR     @X[$c1],@X[$b1],@X[$b1]
635 ||      XOR     @X[$c2],@X[$b2],@X[$b2]
636 ||      ROTL    @X[$d1],0,@X[$d2]       ; moved to avoid cross-path stall
637 ||      ROTL    @X[$d2],0,@X[$d3]
638         XOR     @X[$c0],@X[$b0],@X[$b0]
639 ||      XOR     @X[$c3],@X[$b3],@X[$b3]
640 ||      ROTL    @X[$d0],0,@X[$d1]
641 ||      ROTL    @X[$d3],0,@X[$d0]
642         ROTL    @X[$b1],7,@X[$b0]       ; avoided cross-path stall
643 ||      ROTL    @X[$b2],7,@X[$b1]
644         ROTL    @X[$b0],7,@X[$b3]
645 ||      ROTL    @X[$b3],7,@X[$b2]
646 middle1x?:
647
648         ADD     @X[$b0],@X[$a0],@X[$a0]
649 ||      ADD     @X[$b1],@X[$a1],@X[$a1]
650         ADD     @X[$b2],@X[$a2],@X[$a2]
651 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
652 ||      XOR     @X[$a0],@X[$d0],@X[$d0]
653 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
654         XOR     @X[$a2],@X[$d2],@X[$d2]
655 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
656 ||      SWAP2   @X[$d0],@X[$d0]         ; rotate by 16
657 ||      SWAP2   @X[$d1],@X[$d1]
658         SWAP2   @X[$d2],@X[$d2]
659 ||      SWAP2   @X[$d3],@X[$d3]
660
661 ||      ADD     @X[$d0],@X[$c2],@X[$c2]
662 ||      ADD     @X[$d1],@X[$c3],@X[$c3]
663         ADD     @X[$d2],@X[$c0],@X[$c0]
664 ||      ADD     @X[$d3],@X[$c1],@X[$c1]
665 ||      XOR     @X[$c2],@X[$b0],@X[$b0]
666 ||      XOR     @X[$c3],@X[$b1],@X[$b1]
667         XOR     @X[$c0],@X[$b2],@X[$b2]
668 ||      XOR     @X[$c1],@X[$b3],@X[$b3]
669 ||      ROTL    @X[$b0],12,@X[$b0]
670 ||      ROTL    @X[$b1],12,@X[$b1]
671         ROTL    @X[$b2],12,@X[$b2]
672 ||      ROTL    @X[$b3],12,@X[$b3]
673
674         ADD     @X[$b0],@X[$a0],@X[$a0]
675 ||      ADD     @X[$b1],@X[$a1],@X[$a1]
676 || [B0] SUB     B0,1,B0                 ; decrement inner loop counter
677         ADD     @X[$b2],@X[$a2],@X[$a2]
678 ||      ADD     @X[$b3],@X[$a3],@X[$a3]
679 ||      XOR     @X[$a0],@X[$d0],@X[$d0]
680 ||      XOR     @X[$a1],@X[$d1],@X[$d1]
681         XOR     @X[$a2],@X[$d2],@X[$d2]
682 ||      XOR     @X[$a3],@X[$d3],@X[$d3]
683 ||      ROTL    @X[$d0],8,@X[$d0]
684 ||      ROTL    @X[$d1],8,@X[$d1]
685         ROTL    @X[$d2],8,@X[$d2]
686 ||      ROTL    @X[$d3],8,@X[$d3]
687 || [B0] BNOP    top1x?                  ; even protects from interrupt
688
689         ADD     @X[$d0],@X[$c2],@X[$c2]
690 ||      ADD     @X[$d1],@X[$c3],@X[$c3]
691         ADD     @X[$d2],@X[$c0],@X[$c0]
692 ||      ADD     @X[$d3],@X[$c1],@X[$c1]
693 ||      XOR     @X[$c2],@X[$b0],@X[$b0]
694 ||      XOR     @X[$c3],@X[$b1],@X[$b1]
695 ||      ROTL    @X[$d0],0,@X[$d3]       ; moved to avoid cross-path stall
696 ||      ROTL    @X[$d1],0,@X[$d0]
697         XOR     @X[$c0],@X[$b2],@X[$b2]
698 ||      XOR     @X[$c1],@X[$b3],@X[$b3]
699 ||      ROTL    @X[$d2],0,@X[$d1]
700 ||      ROTL    @X[$d3],0,@X[$d2]
701         ROTL    @X[$b0],7,@X[$b1]       ; avoided cross-path stall
702 ||      ROTL    @X[$b1],7,@X[$b2]
703         ROTL    @X[$b2],7,@X[$b3]
704 ||      ROTL    @X[$b3],7,@X[$b0]
705 ||[!B0] CMPLTU  A0,$STEP,A1             ; less than 64 bytes left?
706 bottom1x?:
707 ___
708 }
709
710 $code.=<<___;
711         ADD     @Y[0],@X[0],@X[0]       ; accumulate key material
712 ||      ADD     @Y[1],@X[1],@X[1]
713 ||      ADD     @Y[2],@X[2],@X[2]
714 ||      ADD     @Y[3],@X[3],@X[3]
715 ||[!A1] LDNDW   *${INP}++[8],@DAT[1]:@DAT[0]
716 || [A1] BNOP    tail?
717         ADD     @Y[4],@X[4],@X[4]
718 ||      ADD     @Y[5],@X[5],@X[5]
719 ||      ADD     @Y[6],@X[6],@X[6]
720 ||      ADD     @Y[7],@X[7],@X[7]
721 ||[!A1] LDNDW   *${INP}[-7],@DAT[3]:@DAT[2]
722         ADD     @Y[8],@X[8],@X[8]
723 ||      ADD     @Y[9],@X[9],@X[9]
724 ||      ADD     @Y[10],@X[10],@X[10]
725 ||      ADD     @Y[11],@X[11],@X[11]
726 ||[!A1] LDNDW   *${INP}[-6],@DAT[5]:@DAT[4]
727         ADD     @Y[12],@X[12],@X[12]
728 ||      ADD     @Y[13],@X[13],@X[13]
729 ||      ADD     @Y[14],@X[14],@X[14]
730 ||      ADD     @Y[15],@X[15],@X[15]
731 ||[!A1] LDNDW   *${INP}[-5],@DAT[7]:@DAT[6]
732   [!A1] LDNDW   *${INP}[-4],@DAT[9]:@DAT[8]
733   [!A1] LDNDW   *${INP}[-3],@DAT[11]:@DAT[10]
734         LDNDW   *${INP}[-2],@DAT[13]:@DAT[12]
735         LDNDW   *${INP}[-1],@DAT[15]:@DAT[14]
736
737         .if     .BIG_ENDIAN
738         SWAP2   @X[0],@X[0]
739 ||      SWAP2   @X[1],@X[1]
740 ||      SWAP2   @X[2],@X[2]
741 ||      SWAP2   @X[3],@X[3]
742         SWAP2   @X[4],@X[4]
743 ||      SWAP2   @X[5],@X[5]
744 ||      SWAP2   @X[6],@X[6]
745 ||      SWAP2   @X[7],@X[7]
746         SWAP2   @X[8],@X[8]
747 ||      SWAP2   @X[9],@X[9]
748 ||      SWAP4   @X[0],@X[1]
749 ||      SWAP4   @X[1],@X[0]
750         SWAP2   @X[10],@X[10]
751 ||      SWAP2   @X[11],@X[11]
752 ||      SWAP4   @X[2],@X[3]
753 ||      SWAP4   @X[3],@X[2]
754         SWAP2   @X[12],@X[12]
755 ||      SWAP2   @X[13],@X[13]
756 ||      SWAP4   @X[4],@X[5]
757 ||      SWAP4   @X[5],@X[4]
758         SWAP2   @X[14],@X[14]
759 ||      SWAP2   @X[15],@X[15]
760 ||      SWAP4   @X[6],@X[7]
761 ||      SWAP4   @X[7],@X[6]
762         SWAP4   @X[8],@X[9]
763 ||      SWAP4   @X[9],@X[8]
764         SWAP4   @X[10],@X[11]
765 ||      SWAP4   @X[11],@X[10]
766         SWAP4   @X[12],@X[13]
767 ||      SWAP4   @X[13],@X[12]
768         SWAP4   @X[14],@X[15]
769 ||      SWAP4   @X[15],@X[14]
770         .else
771         NOP     1
772         .endif
773
774         XOR     @X[0],@DAT[0],@DAT[0]   ; xor with input
775 ||      XOR     @X[1],@DAT[1],@DAT[1]
776 ||      XOR     @X[2],@DAT[2],@DAT[2]
777 ||      XOR     @X[3],@DAT[3],@DAT[3]
778 || [A0] SUB     A0,$STEP,A0             ; SUB   A0,64,A0
779         XOR     @X[4],@DAT[4],@DAT[4]
780 ||      XOR     @X[5],@DAT[5],@DAT[5]
781 ||      XOR     @X[6],@DAT[6],@DAT[6]
782 ||      XOR     @X[7],@DAT[7],@DAT[7]
783 ||      STNDW   @DAT[1]:@DAT[0],*${OUT}++[8]
784         XOR     @X[8],@DAT[8],@DAT[8]
785 ||      XOR     @X[9],@DAT[9],@DAT[9]
786 ||      XOR     @X[10],@DAT[10],@DAT[10]
787 ||      XOR     @X[11],@DAT[11],@DAT[11]
788 ||      STNDW   @DAT[3]:@DAT[2],*${OUT}[-7]
789         XOR     @X[12],@DAT[12],@DAT[12]
790 ||      XOR     @X[13],@DAT[13],@DAT[13]
791 ||      XOR     @X[14],@DAT[14],@DAT[14]
792 ||      XOR     @X[15],@DAT[15],@DAT[15]
793 ||      STNDW   @DAT[5]:@DAT[4],*${OUT}[-6]
794 || [A0] BNOP    top1x?
795    [A0] DMV     @Y[2],@Y[0],@X[2]:@X[0] ; duplicate key material
796 || [A0] DMV     @Y[3],@Y[1],@X[3]:@X[1]
797 ||      STNDW   @DAT[7]:@DAT[6],*${OUT}[-5]
798    [A0] DMV     @Y[6],@Y[4],@X[6]:@X[4]
799 || [A0] DMV     @Y[7],@Y[5],@X[7]:@X[5]
800 ||      STNDW   @DAT[9]:@DAT[8],*${OUT}[-4]
801    [A0] DMV     @Y[10],@Y[8],@X[10]:@X[8]
802 || [A0] DMV     @Y[11],@Y[9],@X[11]:@X[9]
803 || [A0] ADD     1,@Y[12],@Y[12]         ; increment counter
804 ||      STNDW   @DAT[11]:@DAT[10],*${OUT}[-3]
805    [A0] DMV     @Y[14],@Y[12],@X[14]:@X[12]
806 || [A0] DMV     @Y[15],@Y[13],@X[15]:@X[13]
807 ||      STNDW   @DAT[13]:@DAT[12],*${OUT}[-2]
808    [A0] MVK     10,B0                   ; inner loop counter
809 ||      STNDW   @DAT[15]:@DAT[14],*${OUT}[-1]
810 ;;===== branch to top1x? is taken here
811
812 epilogue?:
813         LDDW    *FP[-4],A11:A10         ; ABI says so
814         LDDW    *FP[-3],A13:A12
815 ||      LDDW    *SP[3+8],B11:B10
816         LDDW    *SP[4+8],B13:B12
817 ||      BNOP    RA
818         LDW     *++SP(40+64),FP         ; restore frame pointer
819         NOP     4
820
821 tail?:
822         LDBU    *${INP}++[1],B24        ; load byte by byte
823 ||      SUB     A0,1,A0
824 ||      SUB     A0,1,B1
825   [!B1] BNOP    epilogue?               ; interrupts are disabled for whole time
826 || [A0] LDBU    *${INP}++[1],B24
827 || [A0] SUB     A0,1,A0
828 ||      SUB     B1,1,B1
829   [!B1] BNOP    epilogue?
830 || [A0] LDBU    *${INP}++[1],B24
831 || [A0] SUB     A0,1,A0
832 ||      SUB     B1,1,B1
833   [!B1] BNOP    epilogue?
834 ||      ROTL    @X[0],0,A24
835 || [A0] LDBU    *${INP}++[1],B24
836 || [A0] SUB     A0,1,A0
837 ||      SUB     B1,1,B1
838   [!B1] BNOP    epilogue?
839 ||      ROTL    @X[0],24,A24
840 || [A0] LDBU    *${INP}++[1],A24
841 || [A0] SUB     A0,1,A0
842 ||      SUB     B1,1,B1
843   [!B1] BNOP    epilogue?
844 ||      ROTL    @X[0],16,A24
845 || [A0] LDBU    *${INP}++[1],A24
846 || [A0] SUB     A0,1,A0
847 ||      SUB     B1,1,B1
848 ||      XOR     A24,B24,B25
849         STB     B25,*${OUT}++[1]        ; store byte by byte
850 ||[!B1] BNOP    epilogue?
851 ||      ROTL    @X[0],8,A24
852 || [A0] LDBU    *${INP}++[1],A24
853 || [A0] SUB     A0,1,A0
854 ||      SUB     B1,1,B1
855 ||      XOR     A24,B24,B25
856         STB     B25,*${OUT}++[1]
857 ___
858 sub TAIL_STEP {
859 my $Xi= shift;
860 my $T = ($Xi=~/^B/?"B24":"A24");        # match @X[i] to avoid cross path
861 my $D = $T; $D=~tr/AB/BA/;
862 my $O = $D; $O=~s/24/25/;
863
864 $code.=<<___;
865 ||[!B1] BNOP    epilogue?
866 ||      ROTL    $Xi,0,$T
867 || [A0] LDBU    *${INP}++[1],$D
868 || [A0] SUB     A0,1,A0
869 ||      SUB     B1,1,B1
870 ||      XOR     A24,B24,$O
871         STB     $O,*${OUT}++[1]
872 ||[!B1] BNOP    epilogue?
873 ||      ROTL    $Xi,24,$T
874 || [A0] LDBU    *${INP}++[1],$T
875 || [A0] SUB     A0,1,A0
876 ||      SUB     B1,1,B1
877 ||      XOR     A24,B24,$O
878         STB     $O,*${OUT}++[1]
879 ||[!B1] BNOP    epilogue?
880 ||      ROTL    $Xi,16,$T
881 || [A0] LDBU    *${INP}++[1],$T
882 || [A0] SUB     A0,1,A0
883 ||      SUB     B1,1,B1
884 ||      XOR     A24,B24,$O
885         STB     $O,*${OUT}++[1]
886 ||[!B1] BNOP    epilogue?
887 ||      ROTL    $Xi,8,$T
888 || [A0] LDBU    *${INP}++[1],$T
889 || [A0] SUB     A0,1,A0
890 ||      SUB     B1,1,B1
891 ||      XOR     A24,B24,$O
892         STB     $O,*${OUT}++[1]
893 ___
894 }
895         foreach (1..14) { TAIL_STEP(@X[$_]); }
896 $code.=<<___;
897 ||[!B1] BNOP    epilogue?
898 ||      ROTL    @X[15],0,B24
899 ||      XOR     A24,B24,A25
900         STB     A25,*${OUT}++[1]
901 ||      ROTL    @X[15],24,B24
902 ||      XOR     A24,B24,A25
903         STB     A25,*${OUT}++[1]
904 ||      ROTL    @X[15],16,B24
905 ||      XOR     A24,B24,A25
906         STB     A25,*${OUT}++[1]
907 ||      XOR     A24,B24,A25
908         STB     A25,*${OUT}++[1]
909 ||      XOR     A24,B24,B25
910         STB     B25,*${OUT}++[1]
911         .endasmfunc
912
913         .sect   .const
914         .cstring "ChaCha20 for C64x+, CRYPTOGAMS by <appro\@openssl.org>"
915         .align  4
916 ___
917
918 print $code;
919 close STDOUT;