Loading...
1/*---------------------------------------------------------------------------+
2 | reg_norm.S |
3 | |
4 | Copyright (C) 1992,1993,1994,1995,1997 |
5 | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
6 | Australia. E-mail billm@suburbia.net |
7 | |
8 | Normalize the value in a FPU_REG. |
9 | |
10 | Call from C as: |
11 | int FPU_normalize(FPU_REG *n) |
12 | |
13 | int FPU_normalize_nuo(FPU_REG *n) |
14 | |
15 | Return value is the tag of the answer, or-ed with FPU_Exception if |
16 | one was raised, or -1 on internal error. |
17 | |
18 +---------------------------------------------------------------------------*/
19
20#include "fpu_emu.h"
21
22
23.text
24ENTRY(FPU_normalize)
25 pushl %ebp
26 movl %esp,%ebp
27 pushl %ebx
28
29 movl PARAM1,%ebx
30
31 movl SIGH(%ebx),%edx
32 movl SIGL(%ebx),%eax
33
34 orl %edx,%edx /* ms bits */
35 js L_done /* Already normalized */
36 jnz L_shift_1 /* Shift left 1 - 31 bits */
37
38 orl %eax,%eax
39 jz L_zero /* The contents are zero */
40
41 movl %eax,%edx
42 xorl %eax,%eax
43 subw $32,EXP(%ebx) /* This can cause an underflow */
44
45/* We need to shift left by 1 - 31 bits */
46L_shift_1:
47 bsrl %edx,%ecx /* get the required shift in %ecx */
48 subl $31,%ecx
49 negl %ecx
50 shld %cl,%eax,%edx
51 shl %cl,%eax
52 subw %cx,EXP(%ebx) /* This can cause an underflow */
53
54 movl %edx,SIGH(%ebx)
55 movl %eax,SIGL(%ebx)
56
57L_done:
58 cmpw EXP_OVER,EXP(%ebx)
59 jge L_overflow
60
61 cmpw EXP_UNDER,EXP(%ebx)
62 jle L_underflow
63
64L_exit_valid:
65 movl TAG_Valid,%eax
66
67 /* Convert the exponent to 80x87 form. */
68 addw EXTENDED_Ebias,EXP(%ebx)
69 andw $0x7fff,EXP(%ebx)
70
71L_exit:
72 popl %ebx
73 leave
74 ret
75
76
77L_zero:
78 movw $0,EXP(%ebx)
79 movl TAG_Zero,%eax
80 jmp L_exit
81
82L_underflow:
83 /* Convert the exponent to 80x87 form. */
84 addw EXTENDED_Ebias,EXP(%ebx)
85 push %ebx
86 call arith_underflow
87 pop %ebx
88 jmp L_exit
89
90L_overflow:
91 /* Convert the exponent to 80x87 form. */
92 addw EXTENDED_Ebias,EXP(%ebx)
93 push %ebx
94 call arith_overflow
95 pop %ebx
96 jmp L_exit
97
98
99
100/* Normalise without reporting underflow or overflow */
101ENTRY(FPU_normalize_nuo)
102 pushl %ebp
103 movl %esp,%ebp
104 pushl %ebx
105
106 movl PARAM1,%ebx
107
108 movl SIGH(%ebx),%edx
109 movl SIGL(%ebx),%eax
110
111 orl %edx,%edx /* ms bits */
112 js L_exit_nuo_valid /* Already normalized */
113 jnz L_nuo_shift_1 /* Shift left 1 - 31 bits */
114
115 orl %eax,%eax
116 jz L_exit_nuo_zero /* The contents are zero */
117
118 movl %eax,%edx
119 xorl %eax,%eax
120 subw $32,EXP(%ebx) /* This can cause an underflow */
121
122/* We need to shift left by 1 - 31 bits */
123L_nuo_shift_1:
124 bsrl %edx,%ecx /* get the required shift in %ecx */
125 subl $31,%ecx
126 negl %ecx
127 shld %cl,%eax,%edx
128 shl %cl,%eax
129 subw %cx,EXP(%ebx) /* This can cause an underflow */
130
131 movl %edx,SIGH(%ebx)
132 movl %eax,SIGL(%ebx)
133
134L_exit_nuo_valid:
135 movl TAG_Valid,%eax
136
137 popl %ebx
138 leave
139 ret
140
141L_exit_nuo_zero:
142 movl TAG_Zero,%eax
143 movw EXP_UNDER,EXP(%ebx)
144
145 popl %ebx
146 leave
147 ret
1/* SPDX-License-Identifier: GPL-2.0 */
2/*---------------------------------------------------------------------------+
3 | reg_norm.S |
4 | |
5 | Copyright (C) 1992,1993,1994,1995,1997 |
6 | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
7 | Australia. E-mail billm@suburbia.net |
8 | |
9 | Normalize the value in a FPU_REG. |
10 | |
11 | Call from C as: |
12 | int FPU_normalize(FPU_REG *n) |
13 | |
14 | int FPU_normalize_nuo(FPU_REG *n) |
15 | |
16 | Return value is the tag of the answer, or-ed with FPU_Exception if |
17 | one was raised, or -1 on internal error. |
18 | |
19 +---------------------------------------------------------------------------*/
20
21#include "fpu_emu.h"
22
23
24.text
25SYM_FUNC_START(FPU_normalize)
26 pushl %ebp
27 movl %esp,%ebp
28 pushl %ebx
29
30 movl PARAM1,%ebx
31
32 movl SIGH(%ebx),%edx
33 movl SIGL(%ebx),%eax
34
35 orl %edx,%edx /* ms bits */
36 js L_done /* Already normalized */
37 jnz L_shift_1 /* Shift left 1 - 31 bits */
38
39 orl %eax,%eax
40 jz L_zero /* The contents are zero */
41
42 movl %eax,%edx
43 xorl %eax,%eax
44 subw $32,EXP(%ebx) /* This can cause an underflow */
45
46/* We need to shift left by 1 - 31 bits */
47L_shift_1:
48 bsrl %edx,%ecx /* get the required shift in %ecx */
49 subl $31,%ecx
50 negl %ecx
51 shld %cl,%eax,%edx
52 shl %cl,%eax
53 subw %cx,EXP(%ebx) /* This can cause an underflow */
54
55 movl %edx,SIGH(%ebx)
56 movl %eax,SIGL(%ebx)
57
58L_done:
59 cmpw EXP_OVER,EXP(%ebx)
60 jge L_overflow
61
62 cmpw EXP_UNDER,EXP(%ebx)
63 jle L_underflow
64
65L_exit_valid:
66 movl TAG_Valid,%eax
67
68 /* Convert the exponent to 80x87 form. */
69 addw EXTENDED_Ebias,EXP(%ebx)
70 andw $0x7fff,EXP(%ebx)
71
72L_exit:
73 popl %ebx
74 leave
75 RET
76
77
78L_zero:
79 movw $0,EXP(%ebx)
80 movl TAG_Zero,%eax
81 jmp L_exit
82
83L_underflow:
84 /* Convert the exponent to 80x87 form. */
85 addw EXTENDED_Ebias,EXP(%ebx)
86 push %ebx
87 call arith_underflow
88 pop %ebx
89 jmp L_exit
90
91L_overflow:
92 /* Convert the exponent to 80x87 form. */
93 addw EXTENDED_Ebias,EXP(%ebx)
94 push %ebx
95 call arith_overflow
96 pop %ebx
97 jmp L_exit
98SYM_FUNC_END(FPU_normalize)
99
100
101
102/* Normalise without reporting underflow or overflow */
103SYM_FUNC_START(FPU_normalize_nuo)
104 pushl %ebp
105 movl %esp,%ebp
106 pushl %ebx
107
108 movl PARAM1,%ebx
109
110 movl SIGH(%ebx),%edx
111 movl SIGL(%ebx),%eax
112
113 orl %edx,%edx /* ms bits */
114 js L_exit_nuo_valid /* Already normalized */
115 jnz L_nuo_shift_1 /* Shift left 1 - 31 bits */
116
117 orl %eax,%eax
118 jz L_exit_nuo_zero /* The contents are zero */
119
120 movl %eax,%edx
121 xorl %eax,%eax
122 subw $32,EXP(%ebx) /* This can cause an underflow */
123
124/* We need to shift left by 1 - 31 bits */
125L_nuo_shift_1:
126 bsrl %edx,%ecx /* get the required shift in %ecx */
127 subl $31,%ecx
128 negl %ecx
129 shld %cl,%eax,%edx
130 shl %cl,%eax
131 subw %cx,EXP(%ebx) /* This can cause an underflow */
132
133 movl %edx,SIGH(%ebx)
134 movl %eax,SIGL(%ebx)
135
136L_exit_nuo_valid:
137 movl TAG_Valid,%eax
138
139 popl %ebx
140 leave
141 RET
142
143L_exit_nuo_zero:
144 movl TAG_Zero,%eax
145 movw EXP_UNDER,EXP(%ebx)
146
147 popl %ebx
148 leave
149 RET
150SYM_FUNC_END(FPU_normalize_nuo)