Subversion Repositories Kolibri OS

Rev

Rev 3584 | Blame | Last modification | View Log | RSS feed

  1. /* 64-bit multiplication and division
  2.    Copyright (C) 1989, 1992-1999, 2000, 2001, 2002, 2003, 2004
  3.    Free Software Foundation, Inc.
  4.    This file is part of the GNU C Library.
  5.  
  6.    The GNU C Library is free software; you can redistribute it and/or
  7.    modify it under the terms of the GNU Lesser General Public
  8.    License as published by the Free Software Foundation; either
  9.    version 2.1 of the License, or (at your option) any later version.
  10.  
  11.    The GNU C Library is distributed in the hope that it will be useful,
  12.    but WITHOUT ANY WARRANTY; without even the implied warranty of
  13.    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  14.    Lesser General Public License for more details.
  15.  
  16.    You should have received a copy of the GNU Lesser General Public
  17.    License along with the GNU C Library; if not, write to the Free
  18.    Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
  19.    02111-1307 USA.  */
  20.  
  21. #include <endian.h>
  22. #include <stdlib.h>
  23.  
  24. #define __WORDSIZE 32
  25. #define __i386__
  26.  
  27. #if __WORDSIZE == 32
  28.  
  29. typedef unsigned int UQItype    __attribute__ ((mode (QI)));
  30. typedef          int SItype     __attribute__ ((mode (SI)));
  31. typedef unsigned int USItype    __attribute__ ((mode (SI)));
  32. typedef          int DItype     __attribute__ ((mode (DI)));
  33. typedef unsigned int UDItype    __attribute__ ((mode (DI)));
  34. #define Wtype SItype
  35. #define HWtype SItype
  36. #define DWtype DItype
  37. #define UWtype USItype
  38. #define UHWtype USItype
  39. #define UDWtype UDItype
  40. #define W_TYPE_SIZE 32
  41.  
  42. #include "longlong.h"
  43.  
  44. const
  45. unsigned char __clz_tab[] =
  46. {
  47.   0,1,2,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
  48.   6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
  49.   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
  50.   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
  51.   8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
  52.   8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
  53.   8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
  54.   8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
  55. };
  56.  
  57. #if __BYTE_ORDER == __BIG_ENDIAN
  58. struct DWstruct { Wtype high, low;};
  59. #elif __BYTE_ORDER == __LITTLE_ENDIAN
  60. struct DWstruct { Wtype low, high;};
  61. #else
  62. #error Unhandled endianity
  63. #endif
  64. typedef union { struct DWstruct s; DWtype ll; } DWunion;
  65.  
  66. /* Prototypes of exported functions.  */
  67. extern DWtype __divdi3 (DWtype u, DWtype v);
  68. extern DWtype __moddi3 (DWtype u, DWtype v);
  69. extern UDWtype __udivdi3 (UDWtype u, UDWtype v);
  70. extern UDWtype __umoddi3 (UDWtype u, UDWtype v);
  71.  
  72. static UDWtype
  73. __udivmoddi4 (UDWtype n, UDWtype d, UDWtype *rp)
  74. {
  75.   DWunion ww;
  76.   DWunion nn, dd;
  77.   DWunion rr;
  78.   UWtype d0, d1, n0, n1, n2;
  79.   UWtype q0, q1;
  80.   UWtype b, bm;
  81.  
  82.   nn.ll = n;
  83.   dd.ll = d;
  84.  
  85.   d0 = dd.s.low;
  86.   d1 = dd.s.high;
  87.   n0 = nn.s.low;
  88.   n1 = nn.s.high;
  89.  
  90. #if !UDIV_NEEDS_NORMALIZATION
  91.   if (d1 == 0)
  92.     {
  93.       if (d0 > n1)
  94.         {
  95.           /* 0q = nn / 0D */
  96.  
  97.           udiv_qrnnd (q0, n0, n1, n0, d0);
  98.           q1 = 0;
  99.  
  100.           /* Remainder in n0.  */
  101.         }
  102.       else
  103.         {
  104.           /* qq = NN / 0d */
  105.  
  106.           if (d0 == 0)
  107.             d0 = 1 / d0;        /* Divide intentionally by zero.  */
  108.  
  109.           udiv_qrnnd (q1, n1, 0, n1, d0);
  110.           udiv_qrnnd (q0, n0, n1, n0, d0);
  111.  
  112.           /* Remainder in n0.  */
  113.         }
  114.  
  115.       if (rp != 0)
  116.         {
  117.           rr.s.low = n0;
  118.           rr.s.high = 0;
  119.           *rp = rr.ll;
  120.         }
  121.     }
  122.  
  123. #else /* UDIV_NEEDS_NORMALIZATION */
  124.  
  125.   if (d1 == 0)
  126.     {
  127.       if (d0 > n1)
  128.         {
  129.           /* 0q = nn / 0D */
  130.  
  131.           count_leading_zeros (bm, d0);
  132.  
  133.           if (bm != 0)
  134.             {
  135.               /* Normalize, i.e. make the most significant bit of the
  136.                  denominator set.  */
  137.  
  138.               d0 = d0 << bm;
  139.               n1 = (n1 << bm) | (n0 >> (W_TYPE_SIZE - bm));
  140.               n0 = n0 << bm;
  141.             }
  142.  
  143.           udiv_qrnnd (q0, n0, n1, n0, d0);
  144.           q1 = 0;
  145.  
  146.           /* Remainder in n0 >> bm.  */
  147.         }
  148.       else
  149.         {
  150.           /* qq = NN / 0d */
  151.  
  152.           if (d0 == 0)
  153.             d0 = 1 / d0;        /* Divide intentionally by zero.  */
  154.  
  155.           count_leading_zeros (bm, d0);
  156.  
  157.           if (bm == 0)
  158.             {
  159.               /* From (n1 >= d0) /\ (the most significant bit of d0 is set),
  160.                  conclude (the most significant bit of n1 is set) /\ (the
  161.                  leading quotient digit q1 = 1).
  162.  
  163.                  This special case is necessary, not an optimization.
  164.                  (Shifts counts of W_TYPE_SIZE are undefined.)  */
  165.  
  166.               n1 -= d0;
  167.               q1 = 1;
  168.             }
  169.           else
  170.             {
  171.               /* Normalize.  */
  172.  
  173.               b = W_TYPE_SIZE - bm;
  174.  
  175.               d0 = d0 << bm;
  176.               n2 = n1 >> b;
  177.               n1 = (n1 << bm) | (n0 >> b);
  178.               n0 = n0 << bm;
  179.  
  180.               udiv_qrnnd (q1, n1, n2, n1, d0);
  181.             }
  182.  
  183.           /* n1 != d0...  */
  184.  
  185.           udiv_qrnnd (q0, n0, n1, n0, d0);
  186.  
  187.           /* Remainder in n0 >> bm.  */
  188.         }
  189.  
  190.       if (rp != 0)
  191.         {
  192.           rr.s.low = n0 >> bm;
  193.           rr.s.high = 0;
  194.           *rp = rr.ll;
  195.         }
  196.     }
  197. #endif /* UDIV_NEEDS_NORMALIZATION */
  198.  
  199.   else
  200.     {
  201.       if (d1 > n1)
  202.         {
  203.           /* 00 = nn / DD */
  204.  
  205.           q0 = 0;
  206.           q1 = 0;
  207.  
  208.           /* Remainder in n1n0.  */
  209.           if (rp != 0)
  210.             {
  211.               rr.s.low = n0;
  212.               rr.s.high = n1;
  213.               *rp = rr.ll;
  214.             }
  215.         }
  216.       else
  217.         {
  218.           /* 0q = NN / dd */
  219.  
  220.           count_leading_zeros (bm, d1);
  221.           if (bm == 0)
  222.             {
  223.               /* From (n1 >= d1) /\ (the most significant bit of d1 is set),
  224.                  conclude (the most significant bit of n1 is set) /\ (the
  225.                  quotient digit q0 = 0 or 1).
  226.  
  227.                  This special case is necessary, not an optimization.  */
  228.  
  229.               /* The condition on the next line takes advantage of that
  230.                  n1 >= d1 (true due to program flow).  */
  231.               if (n1 > d1 || n0 >= d0)
  232.                 {
  233.                   q0 = 1;
  234.                   sub_ddmmss (n1, n0, n1, n0, d1, d0);
  235.                 }
  236.               else
  237.                 q0 = 0;
  238.  
  239.               q1 = 0;
  240.  
  241.               if (rp != 0)
  242.                 {
  243.                   rr.s.low = n0;
  244.                   rr.s.high = n1;
  245.                   *rp = rr.ll;
  246.                 }
  247.             }
  248.           else
  249.             {
  250.               UWtype m1, m0;
  251.               /* Normalize.  */
  252.  
  253.               b = W_TYPE_SIZE - bm;
  254.  
  255.               d1 = (d1 << bm) | (d0 >> b);
  256.               d0 = d0 << bm;
  257.               n2 = n1 >> b;
  258.               n1 = (n1 << bm) | (n0 >> b);
  259.               n0 = n0 << bm;
  260.  
  261.               udiv_qrnnd (q0, n1, n2, n1, d1);
  262.               umul_ppmm (m1, m0, q0, d0);
  263.  
  264.               if (m1 > n1 || (m1 == n1 && m0 > n0))
  265.                 {
  266.                   q0--;
  267.                   sub_ddmmss (m1, m0, m1, m0, d1, d0);
  268.                 }
  269.  
  270.               q1 = 0;
  271.  
  272.               /* Remainder in (n1n0 - m1m0) >> bm.  */
  273.               if (rp != 0)
  274.                 {
  275.                   sub_ddmmss (n1, n0, n1, n0, m1, m0);
  276.                   rr.s.low = (n1 << b) | (n0 >> bm);
  277.                   rr.s.high = n1 >> bm;
  278.                   *rp = rr.ll;
  279.                 }
  280.             }
  281.         }
  282.     }
  283.  
  284.   ww.s.low = q0;
  285.   ww.s.high = q1;
  286.   return ww.ll;
  287. }
  288.  
  289. DWtype
  290. __divdi3 (DWtype u, DWtype v)
  291. {
  292.   Wtype c = 0;
  293.   DWtype w;
  294.  
  295.   if (u < 0)
  296.     {
  297.       c = ~c;
  298.       u = -u;
  299.     }
  300.   if (v < 0)
  301.     {
  302.       c = ~c;
  303.       v = -v;
  304.     }
  305.   w = __udivmoddi4 (u, v, NULL);
  306.   if (c)
  307.     w = -w;
  308.   return w;
  309. }
  310.  
  311. DWtype
  312. __moddi3 (DWtype u, DWtype v)
  313. {
  314.   Wtype c = 0;
  315.   DWtype w;
  316.  
  317.   if (u < 0)
  318.     {
  319.       c = ~c;
  320.       u = -u;
  321.     }
  322.   if (v < 0)
  323.     v = -v;
  324.   __udivmoddi4 (u, v, &w);
  325.   if (c)
  326.     w = -w;
  327.   return w;
  328. }
  329.  
  330. UDWtype
  331. __udivdi3 (UDWtype u, UDWtype v)
  332. {
  333.   return __udivmoddi4 (u, v, NULL);
  334. }
  335.  
  336. UDWtype
  337. __umoddi3 (UDWtype u, UDWtype v)
  338. {
  339.   UDWtype w;
  340.  
  341.   __udivmoddi4 (u, v, &w);
  342.   return w;
  343. }
  344.  
  345. #endif
  346.