use broken-out patches for the coldfire to make it easier to follow differences again...
[librecmc/librecmc.git] / target / linux / coldfire / patches / 006-mcfv4e_arch_lib_mods.patch
1 From a9faf34ba120d9d39ff0c7656ee3de12a110e22a Mon Sep 17 00:00:00 2001
2 From: Kurt Mahan <kmahan@freescale.com>
3 Date: Wed, 31 Oct 2007 16:57:05 -0600
4 Subject: [PATCH] Core Coldfire/MCF5445x arch lib changes.
5
6 LTIBName: mcfv4e-arch-lib-mods
7 Signed-off-by: Kurt Mahan <kmahan@freescale.com>
8 ---
9  arch/m68k/lib/checksum.c  |  124 +++++++++++++++++++++++
10  arch/m68k/lib/muldi3.c    |   10 ++
11  arch/m68k/lib/semaphore.S |   25 +++++
12  arch/m68k/lib/string.c    |   64 ++++++++++++
13  arch/m68k/lib/uaccess.c   |  242 +++++++++++++++++++++++++++++++++++++++++++++
14  5 files changed, 465 insertions(+), 0 deletions(-)
15
16 --- a/arch/m68k/lib/checksum.c
17 +++ b/arch/m68k/lib/checksum.c
18 @@ -39,8 +39,131 @@
19   * computes a partial checksum, e.g. for TCP/UDP fragments
20   */
21  
22 +#ifdef CONFIG_COLDFIRE
23 +
24 +static inline unsigned short from32to16(unsigned long x)
25 +{
26 +       /* add up 16-bit and 16-bit for 16+c bit */
27 +       x = (x & 0xffff) + (x >> 16);
28 +       /* add up carry.. */
29 +       x = (x & 0xffff) + (x >> 16);
30 +       return x;
31 +}
32 +
33 +static unsigned long do_csum(const unsigned char *buff, int len)
34 +{
35 +       int odd, count;
36 +       unsigned long result = 0;
37 +
38 +       if (len <= 0)
39 +               goto out;
40 +       odd = 1 & (unsigned long) buff;
41 +       if (odd) {
42 +               result = *buff;
43 +               len--;
44 +               buff++;
45 +       }
46 +       count = len >> 1;               /* nr of 16-bit words.. */
47 +       if (count) {
48 +               if (2 & (unsigned long) buff) {
49 +                       result += *(unsigned short *) buff;
50 +                       count--;
51 +                       len -= 2;
52 +                       buff += 2;
53 +               }
54 +               count >>= 1;            /* nr of 32-bit words.. */
55 +               if (count) {
56 +                       unsigned long carry = 0;
57 +                       do {
58 +                               unsigned long w = *(unsigned long *) buff;
59 +                               count--;
60 +                               buff += 4;
61 +                               result += carry;
62 +                               result += w;
63 +                               carry = (w > result);
64 +                       } while (count);
65 +                       result += carry;
66 +                       result = (result & 0xffff) + (result >> 16);
67 +               }
68 +               if (len & 2) {
69 +                       result += *(unsigned short *) buff;
70 +                       buff += 2;
71 +               }
72 +       }
73 +       if (len & 1)
74 +               result += (*buff << 8);
75 +       result = from32to16(result);
76 +       if (odd)
77 +               result = ((result >> 8) & 0xff) | ((result & 0xff) << 8);
78 +out:
79 +       return result;
80 +}
81 +
82 +/*
83 + *     This is a version of ip_compute_csum() optimized for IP headers,
84 + *     which always checksum on 4 octet boundaries.
85 + */
86 +__sum16 ip_fast_csum(const void *iph, unsigned int ihl)
87 +{
88 +       return ~do_csum(iph, ihl*4);
89 +}
90 +EXPORT_SYMBOL(ip_fast_csum);
91 +
92 +/*
93 + * computes the checksum of a memory block at buff, length len,
94 + * and adds in "sum" (32-bit)
95 + *
96 + * returns a 32-bit number suitable for feeding into itself
97 + * or csum_tcpudp_magic
98 + *
99 + * this function must be called with even lengths, except
100 + * for the last fragment, which may be odd
101 + *
102 + * it's best to have buff aligned on a 32-bit boundary
103 + */
104  __wsum csum_partial(const void *buff, int len, __wsum sum)
105  {
106 +       unsigned int result = do_csum(buff, len);
107 +
108 +       /* add in old sum, and carry.. */
109 +       result += sum;
110 +       if (sum > result)
111 +               result += 1;
112 +       return result;
113 +}
114 +EXPORT_SYMBOL(csum_partial);
115 +
116 +/*
117 + * copy from fs while checksumming, otherwise like csum_partial
118 + */
119 +
120 +__wsum
121 +csum_partial_copy_from_user(const void __user *src, void *dst, int len,
122 +                           __wsum sum, int *csum_err)
123 +{
124 +       if (csum_err) *csum_err = 0;
125 +       memcpy(dst, src, len);
126 +       return csum_partial(dst, len, sum);
127 +}
128 +EXPORT_SYMBOL(csum_partial_copy_from_user);
129 +
130 +/*
131 + * copy from ds while checksumming, otherwise like csum_partial
132 + */
133 +
134 +__wsum
135 +csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
136 +{
137 +       memcpy(dst, src, len);
138 +       return csum_partial(dst, len, sum);
139 +}
140 +EXPORT_SYMBOL(csum_partial_copy_nocheck);
141 +
142 +#else /* !CONFIG_COLDFIRE */
143 +
144 +unsigned int
145 +csum_partial(const unsigned char *buff, int len, unsigned int sum)
146 +{
147         unsigned long tmp1, tmp2;
148           /*
149            * Experiments with ethernet and slip connections show that buff
150 @@ -423,3 +546,4 @@ csum_partial_copy_nocheck(const void *sr
151      return(sum);
152  }
153  EXPORT_SYMBOL(csum_partial_copy_nocheck);
154 +#endif /* CONFIG_COLDFIRE */
155 --- a/arch/m68k/lib/muldi3.c
156 +++ b/arch/m68k/lib/muldi3.c
157 @@ -21,12 +21,22 @@ Boston, MA 02111-1307, USA.  */
158  
159  #define BITS_PER_UNIT 8
160  
161 +#ifdef CONFIG_COLDFIRE
162 +#define umul_ppmm(w1, w0, u, v) \
163 +  do { \
164 +       unsigned long long x; \
165 +       x = (unsigned long long)u * v; \
166 +       w0 = (unsigned long)(x & 0x00000000ffffffff); \
167 +       w1 = (unsigned long)(x & 0xffffffff00000000) >> 32; \
168 +     } while (0)
169 +#else /* CONFIG_COLDFIRE */
170  #define umul_ppmm(w1, w0, u, v) \
171    __asm__ ("mulu%.l %3,%1:%0"                                          \
172             : "=d" ((USItype)(w0)),                                     \
173               "=d" ((USItype)(w1))                                      \
174             : "%0" ((USItype)(u)),                                      \
175               "dmi" ((USItype)(v)))
176 +#endif /* CONFIG_COLDFIRE */
177  
178  #define __umulsidi3(u, v) \
179    ({DIunion __w;                                                       \
180 --- a/arch/m68k/lib/semaphore.S
181 +++ b/arch/m68k/lib/semaphore.S
182 @@ -16,11 +16,24 @@
183   * there is contention on the semaphore.
184   */
185  ENTRY(__down_failed)
186 +#ifndef CONFIG_COLDFIRE
187         moveml %a0/%d0/%d1,-(%sp)
188 +#else
189 +       movel %a0,-(%sp)
190 +       movel %d0,-(%sp)
191 +       movel %d1,-(%sp)
192 +#endif
193         movel %a1,-(%sp)
194         jbsr __down
195         movel (%sp)+,%a1
196 +#ifndef CONFIG_COLDFIRE
197         moveml (%sp)+,%a0/%d0/%d1
198 +#else
199 +       movel (%sp)+,%d1
200 +       movel (%sp)+,%d0
201 +       movel (%sp)+,%a0
202 +#endif
203 +
204         rts
205  
206  ENTRY(__down_failed_interruptible)
207 @@ -44,10 +57,22 @@ ENTRY(__down_failed_trylock)
208         rts
209  
210  ENTRY(__up_wakeup)
211 +#ifndef CONFIG_COLDFIRE
212         moveml %a0/%d0/%d1,-(%sp)
213 +#else
214 +       movel %a0,-(%sp)
215 +       movel %d0,-(%sp)
216 +       movel %d1,-(%sp)
217 +#endif
218         movel %a1,-(%sp)
219         jbsr __up
220         movel (%sp)+,%a1
221 +#ifndef CONFIG_COLDFIRE
222         moveml (%sp)+,%a0/%d0/%d1
223 +#else
224 +       movel (%sp)+,%d1
225 +       movel (%sp)+,%d0
226 +       movel (%sp)+,%a0
227 +#endif
228         rts
229  
230 --- a/arch/m68k/lib/string.c
231 +++ b/arch/m68k/lib/string.c
232 @@ -15,6 +15,7 @@ char *strcpy(char *dest, const char *src
233  }
234  EXPORT_SYMBOL(strcpy);
235  
236 +#ifndef CONFIG_COLDFIRE
237  void *memset(void *s, int c, size_t count)
238  {
239         void *xs = s;
240 @@ -143,6 +144,69 @@ void *memcpy(void *to, const void *from,
241  }
242  EXPORT_SYMBOL(memcpy);
243  
244 +#else /* CONFIG_COLDFIRE */
245 +
246 +void *memset(void *s, int c, size_t count)
247 +{
248 +  unsigned long x;
249 +  void *originalTo;
250 +
251 +  for (x = 0; x < count; x++)
252 +    *(unsigned char *)s++ = (unsigned char)c;
253 +
254 +  return originalTo;
255 +}
256 +EXPORT_SYMBOL(memset);
257 +
258 +void *memcpy(void *to, const void *from, size_t n)
259 +{
260 +  void *xto = to;
261 +  size_t temp;
262 +
263 +  if (!n)
264 +    return xto;
265 +  if ((long) to & 1) {
266 +      char *cto = to;
267 +      const char *cfrom = from;
268 +      *cto++ = *cfrom++;
269 +      to = cto;
270 +      from = cfrom;
271 +      n--;
272 +    }
273 +  if (n > 2 && (long) to & 2) {
274 +      short *sto = to;
275 +      const short *sfrom = from;
276 +      *sto++ = *sfrom++;
277 +      to = sto;
278 +      from = sfrom;
279 +      n -= 2;
280 +    }
281 +  temp = n >> 2;
282 +  if (temp) {
283 +      long *lto = to;
284 +      const long *lfrom = from;
285 +      for (; temp; temp--)
286 +       *lto++ = *lfrom++;
287 +      to = lto;
288 +      from = lfrom;
289 +    }
290 +  if (n & 2) {
291 +      short *sto = to;
292 +      const short *sfrom = from;
293 +      *sto++ = *sfrom++;
294 +      to = sto;
295 +      from = sfrom;
296 +    }
297 +  if (n & 1) {
298 +      char *cto = to;
299 +      const char *cfrom = from;
300 +      *cto = *cfrom;
301 +    }
302 +  return xto;
303 +}
304 +EXPORT_SYMBOL(memcpy);
305 +#endif /* CONFIG_COLDFIRE */
306 +
307  void *memmove(void *dest, const void *src, size_t n)
308  {
309         void *xdest = dest;
310 --- a/arch/m68k/lib/uaccess.c
311 +++ b/arch/m68k/lib/uaccess.c
312 @@ -5,6 +5,7 @@
313   */
314  
315  #include <linux/module.h>
316 +#ifndef CONFIG_COLDFIRE
317  #include <asm/uaccess.h>
318  
319  unsigned long __generic_copy_from_user(void *to, const void __user *from,
320 @@ -220,3 +221,244 @@ unsigned long __clear_user(void __user *
321      return res;
322  }
323  EXPORT_SYMBOL(__clear_user);
324 +
325 +#else /* CONFIG_COLDFIRE */
326 +
327 +#include <asm/cf_uaccess.h>
328 +
329 +unsigned long __generic_copy_from_user(void *to, const void *from,
330 +               unsigned long n)
331 +{
332 +    unsigned long tmp;
333 +    __asm__ __volatile__
334 +       ("   tstl %2\n"
335 +        "   jeq 2f\n"
336 +        "1: movel (%1)+,%3\n"
337 +        "   movel %3,(%0)+\n"
338 +        "   subql #1,%2\n"
339 +        "   jne 1b\n"
340 +        "2: movel %4,%2\n"
341 +        "   bclr #1,%2\n"
342 +        "   jeq 4f\n"
343 +        "3: movew (%1)+,%3\n"
344 +        "   movew %3,(%0)+\n"
345 +        "4: bclr #0,%2\n"
346 +        "   jeq 6f\n"
347 +        "5: moveb (%1)+,%3\n"
348 +        "   moveb %3,(%0)+\n"
349 +        "6:\n"
350 +        ".section .fixup,\"ax\"\n"
351 +        "   .even\n"
352 +        "7: movel %2,%%d0\n"
353 +        "71:clrl (%0)+\n"
354 +        "   subql #1,%%d0\n"
355 +        "   jne 71b\n"
356 +        "   lsll #2,%2\n"
357 +        "   addl %4,%2\n"
358 +        "   btst #1,%4\n"
359 +        "   jne 81f\n"
360 +        "   btst #0,%4\n"
361 +        "   jne 91f\n"
362 +        "   jra 6b\n"
363 +        "8: addql #2,%2\n"
364 +        "81:clrw (%0)+\n"
365 +        "   btst #0,%4\n"
366 +        "   jne 91f\n"
367 +        "   jra 6b\n"
368 +        "9: addql #1,%2\n"
369 +        "91:clrb (%0)+\n"
370 +        "   jra 6b\n"
371 +        ".previous\n"
372 +        ".section __ex_table,\"a\"\n"
373 +        "   .align 4\n"
374 +        "   .long 1b,7b\n"
375 +        "   .long 3b,8b\n"
376 +        "   .long 5b,9b\n"
377 +        ".previous"
378 +        : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
379 +        : "d"(n & 3), "0"(to), "1"(from), "2"(n/4)
380 +        : "d0", "memory");
381 +    return n;
382 +}
383 +EXPORT_SYMBOL(__generic_copy_from_user);
384 +
385 +
386 +unsigned long __generic_copy_to_user(void *to, const void *from,
387 +               unsigned long n)
388 +{
389 +    unsigned long tmp;
390 +    __asm__ __volatile__
391 +       ("   tstl %2\n"
392 +        "   jeq 3f\n"
393 +        "1: movel (%1)+,%3\n"
394 +        "22:movel %3,(%0)+\n"
395 +        "2: subql #1,%2\n"
396 +        "   jne 1b\n"
397 +        "3: movel %4,%2\n"
398 +        "   bclr #1,%2\n"
399 +        "   jeq 4f\n"
400 +        "   movew (%1)+,%3\n"
401 +        "24:movew %3,(%0)+\n"
402 +        "4: bclr #0,%2\n"
403 +        "   jeq 5f\n"
404 +        "   moveb (%1)+,%3\n"
405 +        "25:moveb %3,(%0)+\n"
406 +        "5:\n"
407 +        ".section .fixup,\"ax\"\n"
408 +        "   .even\n"
409 +        "60:addql #1,%2\n"
410 +        "6: lsll #2,%2\n"
411 +        "   addl %4,%2\n"
412 +        "   jra 5b\n"
413 +        "7: addql #2,%2\n"
414 +        "   jra 5b\n"
415 +        "8: addql #1,%2\n"
416 +        "   jra 5b\n"
417 +        ".previous\n"
418 +        ".section __ex_table,\"a\"\n"
419 +        "   .align 4\n"
420 +        "   .long 1b,60b\n"
421 +        "   .long 22b,6b\n"
422 +        "   .long 2b,6b\n"
423 +        "   .long 24b,7b\n"
424 +        "   .long 3b,60b\n"
425 +        "   .long 4b,7b\n"
426 +        "   .long 25b,8b\n"
427 +        "   .long 5b,8b\n"
428 +        ".previous"
429 +        : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
430 +        : "r"(n & 3), "0"(to), "1"(from), "2"(n / 4)
431 +        : "memory");
432 +    return n;
433 +}
434 +EXPORT_SYMBOL(__generic_copy_to_user);
435 +
436 +/*
437 + * Copy a null terminated string from userspace.
438 + */
439 +
440 +long strncpy_from_user(char *dst, const char *src, long count)
441 +{
442 +       long res = -EFAULT;
443 +       if (!(access_ok(VERIFY_READ, src, 1))) /* --tym-- */
444 +               return res;
445 +    if (count == 0) return count;
446 +    __asm__ __volatile__
447 +       ("1: moveb (%2)+,%%d0\n"
448 +        "12:moveb %%d0,(%1)+\n"
449 +        "   jeq 2f\n"
450 +        "   subql #1,%3\n"
451 +        "   jne 1b\n"
452 +        "2: subl %3,%0\n"
453 +        "3:\n"
454 +        ".section .fixup,\"ax\"\n"
455 +        "   .even\n"
456 +        "4: movel %4,%0\n"
457 +        "   jra 3b\n"
458 +        ".previous\n"
459 +        ".section __ex_table,\"a\"\n"
460 +        "   .align 4\n"
461 +        "   .long 1b,4b\n"
462 +        "   .long 12b,4b\n"
463 +        ".previous"
464 +        : "=d"(res), "=a"(dst), "=a"(src), "=d"(count)
465 +        : "i"(-EFAULT), "0"(count), "1"(dst), "2"(src), "3"(count)
466 +        : "d0", "memory");
467 +    return res;
468 +}
469 +EXPORT_SYMBOL(strncpy_from_user);
470 +
471 +/*
472 + * Return the size of a string (including the ending 0)
473 + *
474 + * Return 0 on exception, a value greater than N if too long
475 + */
476 +long strnlen_user(const char *src, long n)
477 +{
478 +    long res = -EFAULT;
479 +    if (!(access_ok(VERIFY_READ, src, 1))) /* --tym-- */
480 +       return res;
481 +
482 +       res = -(long)src;
483 +       __asm__ __volatile__
484 +               ("1:\n"
485 +                "   tstl %2\n"
486 +                "   jeq 3f\n"
487 +                "2: moveb (%1)+,%%d0\n"
488 +                "22:\n"
489 +                "   subql #1,%2\n"
490 +                "   tstb %%d0\n"
491 +                "   jne 1b\n"
492 +                "   jra 4f\n"
493 +                "3:\n"
494 +                "   addql #1,%0\n"
495 +                "4:\n"
496 +                "   addl %1,%0\n"
497 +                "5:\n"
498 +                ".section .fixup,\"ax\"\n"
499 +                "   .even\n"
500 +                "6: moveq %3,%0\n"
501 +                "   jra 5b\n"
502 +                ".previous\n"
503 +                ".section __ex_table,\"a\"\n"
504 +                "   .align 4\n"
505 +                "   .long 2b,6b\n"
506 +                "   .long 22b,6b\n"
507 +                ".previous"
508 +                : "=d"(res), "=a"(src), "=d"(n)
509 +                : "i"(0), "0"(res), "1"(src), "2"(n)
510 +                : "d0");
511 +       return res;
512 +}
513 +EXPORT_SYMBOL(strnlen_user);
514 +
515 +
516 +/*
517 + * Zero Userspace
518 + */
519 +
520 +unsigned long __clear_user(void *to, unsigned long n)
521 +{
522 +    __asm__ __volatile__
523 +       ("   tstl %1\n"
524 +        "   jeq 3f\n"
525 +        "1: movel %3,(%0)+\n"
526 +        "2: subql #1,%1\n"
527 +        "   jne 1b\n"
528 +        "3: movel %2,%1\n"
529 +        "   bclr #1,%1\n"
530 +        "   jeq 4f\n"
531 +        "24:movew %3,(%0)+\n"
532 +        "4: bclr #0,%1\n"
533 +        "   jeq 5f\n"
534 +        "25:moveb %3,(%0)+\n"
535 +        "5:\n"
536 +        ".section .fixup,\"ax\"\n"
537 +        "   .even\n"
538 +        "61:addql #1,%1\n"
539 +        "6: lsll #2,%1\n"
540 +        "   addl %2,%1\n"
541 +        "   jra 5b\n"
542 +        "7: addql #2,%1\n"
543 +        "   jra 5b\n"
544 +        "8: addql #1,%1\n"
545 +        "   jra 5b\n"
546 +        ".previous\n"
547 +        ".section __ex_table,\"a\"\n"
548 +        "   .align 4\n"
549 +        "   .long 1b,61b\n"
550 +        "   .long 2b,6b\n"
551 +        "   .long 3b,61b\n"
552 +        "   .long 24b,7b\n"
553 +        "   .long 4b,7b\n"
554 +        "   .long 25b,8b\n"
555 +        "   .long 5b,8b\n"
556 +        ".previous"
557 +        : "=a"(to), "=d"(n)
558 +        : "r"(n & 3), "d"(0), "0"(to), "1"(n/4));
559 +    return n;
560 +}
561 +EXPORT_SYMBOL(__clear_user);
562 +
563 +#endif /* CONFIG_COLDFIRE */
564 +