shr_Xsig.S 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. .file "shr_Xsig.S"
  3. /*---------------------------------------------------------------------------+
  4. | shr_Xsig.S |
  5. | |
  6. | 12 byte right shift function |
  7. | |
  8. | Copyright (C) 1992,1994,1995 |
  9. | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
  10. | Australia. E-mail [email protected] |
  11. | |
  12. | Call from C as: |
  13. | void shr_Xsig(Xsig *arg, unsigned nr) |
  14. | |
  15. | Extended shift right function. |
  16. | Fastest for small shifts. |
  17. | Shifts the 12 byte quantity pointed to by the first arg (arg) |
  18. | right by the number of bits specified by the second arg (nr). |
  19. | |
  20. +---------------------------------------------------------------------------*/
  21. #include "fpu_emu.h"
  22. .text
  23. SYM_FUNC_START(shr_Xsig)
  24. push %ebp
  25. movl %esp,%ebp
  26. pushl %esi
  27. movl PARAM2,%ecx
  28. movl PARAM1,%esi
  29. cmpl $32,%ecx /* shrd only works for 0..31 bits */
  30. jnc L_more_than_31
  31. /* less than 32 bits */
  32. pushl %ebx
  33. movl (%esi),%eax /* lsl */
  34. movl 4(%esi),%ebx /* midl */
  35. movl 8(%esi),%edx /* msl */
  36. shrd %cl,%ebx,%eax
  37. shrd %cl,%edx,%ebx
  38. shr %cl,%edx
  39. movl %eax,(%esi)
  40. movl %ebx,4(%esi)
  41. movl %edx,8(%esi)
  42. popl %ebx
  43. popl %esi
  44. leave
  45. RET
  46. L_more_than_31:
  47. cmpl $64,%ecx
  48. jnc L_more_than_63
  49. subb $32,%cl
  50. movl 4(%esi),%eax /* midl */
  51. movl 8(%esi),%edx /* msl */
  52. shrd %cl,%edx,%eax
  53. shr %cl,%edx
  54. movl %eax,(%esi)
  55. movl %edx,4(%esi)
  56. movl $0,8(%esi)
  57. popl %esi
  58. leave
  59. RET
  60. L_more_than_63:
  61. cmpl $96,%ecx
  62. jnc L_more_than_95
  63. subb $64,%cl
  64. movl 8(%esi),%eax /* msl */
  65. shr %cl,%eax
  66. xorl %edx,%edx
  67. movl %eax,(%esi)
  68. movl %edx,4(%esi)
  69. movl %edx,8(%esi)
  70. popl %esi
  71. leave
  72. RET
  73. L_more_than_95:
  74. xorl %eax,%eax
  75. movl %eax,(%esi)
  76. movl %eax,4(%esi)
  77. movl %eax,8(%esi)
  78. popl %esi
  79. leave
  80. RET
  81. SYM_FUNC_END(shr_Xsig)