Index: /trunk/src/VBox/Runtime/common/math/bignum-amd64-x86.asm
===================================================================
--- /trunk/src/VBox/Runtime/common/math/bignum-amd64-x86.asm	(revision 60697)
+++ /trunk/src/VBox/Runtime/common/math/bignum-amd64-x86.asm	(revision 60698)
@@ -26,7 +26,23 @@
 
 
+;*********************************************************************************************************************************
+;*  Header Files                                                                                                                 *
+;*********************************************************************************************************************************
 %define RT_ASM_WITH_SEH64
 %include "iprt/asmdefs.mac"
 %include "internal/bignum.mac"
+
+
+;*********************************************************************************************************************************
+;*  Defined Constants And Macros                                                                                                 *
+;*********************************************************************************************************************************
+%ifdef RT_ARCH_AMD64
+ %macro sahf 0
+  %error "SAHF not supported on ancient AMD64"
+ %endmacro
+ %macro lahf 0
+  %error "LAHF not supported on ancient AMD64"
+ %endmacro
+%endif
 
 
@@ -67,8 +83,8 @@
         xor     r11d, r11d              ; index register.
 
-%if RTBIGNUM_ELEMENT_SIZE == 4
+ %if RTBIGNUM_ELEMENT_SIZE == 4
         add     cUsed, 1                ; cUsed = RT_ALIGN(cUsed, 2) / 2
         shr     cUsed, 1
-%endif
+ %endif
         cmp     cUsed, 8                ; Skip the big loop if small number.
         jb      .small_job
@@ -106,4 +122,5 @@
         jnz     .big_loop
 
+ %if 0 ; Ancient AMD CPUs does have lahf/sahf, thus the mess in the %else.
         lahf                            ; Save CF
         and     cUsed, 7                ; Up to seven odd rounds.
@@ -111,5 +128,14 @@
         sahf                            ; Restore CF.
         jmp     .small_loop             ; Skip CF=1 (clc).
-
+ %else
+        jnc     .no_carry
+        and     cUsed, 7                ; Up to seven odd rounds.
+        jz      .done
+        stc
+        jmp     .small_loop             ; Skip CF=1 (clc).
+.no_carry:
+        and     cUsed, 7                ; Up to seven odd rounds.
+        jz      .done
+ %endif
 .small_job:
         clc
@@ -247,8 +273,8 @@
         xor     r11d, r11d              ; index register.
 
-%if RTBIGNUM_ELEMENT_SIZE == 4
+ %if RTBIGNUM_ELEMENT_SIZE == 4
         add     cUsed, 1                ; cUsed = RT_ALIGN(cUsed, 2) / 2
         shr     cUsed, 1
-%endif
+ %endif
         cmp     cUsed, 8                ; Skip the big loop if small number.
         jb      .small_job
@@ -278,4 +304,5 @@
         jnz     .big_loop
 
+ %if 0 ; Ancient AMD CPUs does have lahf/sahf, thus the mess in the %else.
         lahf                            ; Save CF
         and     cUsed, 7                ; Up to seven odd rounds.
@@ -283,5 +310,14 @@
         sahf                            ; Restore CF.
         jmp     .small_loop             ; Skip CF=1 (clc).
-
+ %else
+        jnc     .no_carry
+        and     cUsed, 7                ; Up to seven odd rounds.
+        jz      .done
+        stc
+        jmp     .small_loop             ; Skip CF=1 (clc).
+.no_carry:
+        and     cUsed, 7                ; Up to seven odd rounds.
+        jz      .done
+ %endif
 .small_job:
         clc
@@ -451,5 +487,5 @@
 
         ; More to do?
-        lahf                            ; save carry flag (uCarry no longer used on x86).
+        pushf                           ; save carry flag (uCarry no longer used on x86).
 %ifdef RT_ARCH_AMD64
         mov     cUsed, r11d
@@ -459,8 +495,8 @@
         and     cUsed, 7
         jz      .restore_cf_and_return  ; Jump if we're good and done.
-        sahf                            ; Restore CF.
+        popf                            ; Restore CF.
         jmp     .small_loop             ; Deal with the odd rounds.
 .restore_cf_and_return:
-        sahf
+        popf
         jmp     .carry_to_eax
 
