Index: /trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp
===================================================================
--- /trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp	(revision 72660)
+++ /trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp	(revision 72661)
@@ -2785,8 +2785,14 @@
 
     Log4(("hmR0SvmImportGuestState: fExtrn=%#RX64 fWhat=%#RX64\n", pCtx->fExtrn, fWhat));
-    if (pCtx->fExtrn & HMSVM_CPUMCTX_EXTRN_ALL)
-    {
-        fWhat &= pCtx->fExtrn;
-
+
+    /*
+     * We disable interrupts to make the updating of the state and in particular
+     * the fExtrn modification atomic wrt to preemption hooks.
+     */
+    RTCCUINTREG const fSavedFlags = ASMIntDisableFlags();
+
+    fWhat &= pCtx->fExtrn;
+    if (fWhat & pCtx->fExtrn)
+    {
 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM
         if (fWhat & CPUMCTX_EXTRN_HWVIRT)
@@ -2799,5 +2805,4 @@
                 pCtx->hwvirt.fGif = pVmcbCtrl->IntCtrl.n.u1VGif;
             }
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_HWVIRT);
         }
 
@@ -2807,8 +2812,5 @@
                 && VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_INTERRUPT_NESTED_GUEST))
                 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_INTERRUPT_NESTED_GUEST);
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_HM_SVM_HWVIRT_VIRQ);
-        }
-#else
-        ASMAtomicUoAndU64(&pCtx->fExtrn, ~(CPUMCTX_EXTRN_HWVIRT | CPUMCTX_EXTRN_HM_SVM_HWVIRT_VIRQ));
+        }
 #endif
 
@@ -2819,30 +2821,17 @@
             else if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_INHIBIT_INTERRUPTS))
                 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_INHIBIT_INTERRUPTS);
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_HM_SVM_INT_SHADOW);
         }
 
         if (fWhat & CPUMCTX_EXTRN_RIP)
-        {
             pCtx->rip = pVmcbGuest->u64RIP;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_RIP);
-        }
 
         if (fWhat & CPUMCTX_EXTRN_RFLAGS)
-        {
             pCtx->eflags.u32 = pVmcbGuest->u64RFlags;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_RFLAGS);
-        }
 
         if (fWhat & CPUMCTX_EXTRN_RSP)
-        {
             pCtx->rsp = pVmcbGuest->u64RSP;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_RSP);
-        }
 
         if (fWhat & CPUMCTX_EXTRN_RAX)
-        {
             pCtx->rax = pVmcbGuest->u64RAX;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_RAX);
-        }
 
         if (fWhat & CPUMCTX_EXTRN_SREG_MASK)
@@ -2865,5 +2854,4 @@
                 }
                 HMSVM_ASSERT_SEG_GRANULARITY(pCtx, cs);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_CS);
             }
             if (fWhat & CPUMCTX_EXTRN_SS)
@@ -2881,5 +2869,4 @@
                 if (pCtx->ss.Attr.n.u2Dpl != uCpl)
                     pCtx->ss.Attr.n.u2Dpl = uCpl & 0x3;
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_SS);
             }
             if (fWhat & CPUMCTX_EXTRN_DS)
@@ -2887,5 +2874,4 @@
                 HMSVM_SEG_REG_COPY_FROM_VMCB(pCtx, pVmcbGuest, DS, ds);
                 HMSVM_ASSERT_SEG_GRANULARITY(pCtx, ds);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_DS);
             }
             if (fWhat & CPUMCTX_EXTRN_ES)
@@ -2893,5 +2879,4 @@
                 HMSVM_SEG_REG_COPY_FROM_VMCB(pCtx, pVmcbGuest, ES, es);
                 HMSVM_ASSERT_SEG_GRANULARITY(pCtx, es);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_ES);
             }
             if (fWhat & CPUMCTX_EXTRN_FS)
@@ -2899,5 +2884,4 @@
                 HMSVM_SEG_REG_COPY_FROM_VMCB(pCtx, pVmcbGuest, FS, fs);
                 HMSVM_ASSERT_SEG_GRANULARITY(pCtx, fs);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_FS);
             }
             if (fWhat & CPUMCTX_EXTRN_GS)
@@ -2905,5 +2889,4 @@
                 HMSVM_SEG_REG_COPY_FROM_VMCB(pCtx, pVmcbGuest, GS, gs);
                 HMSVM_ASSERT_SEG_GRANULARITY(pCtx, gs);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_GS);
             }
         }
@@ -2927,12 +2910,8 @@
                         pCtx->tr.Attr.n.u4Type = X86_SEL_TYPE_SYS_286_TSS_BUSY;
                 }
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_TR);
             }
 
             if (fWhat & CPUMCTX_EXTRN_LDTR)
-            {
                 HMSVM_SEG_REG_COPY_FROM_VMCB(pCtx, pVmcbGuest, LDTR, ldtr);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_LDTR);
-            }
 
             if (fWhat & CPUMCTX_EXTRN_GDTR)
@@ -2940,5 +2919,4 @@
                 pCtx->gdtr.cbGdt = pVmcbGuest->GDTR.u32Limit;
                 pCtx->gdtr.pGdt  = pVmcbGuest->GDTR.u64Base;
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_GDTR);
             }
 
@@ -2947,5 +2925,4 @@
                 pCtx->idtr.cbIdt = pVmcbGuest->IDTR.u32Limit;
                 pCtx->idtr.pIdt  = pVmcbGuest->IDTR.u64Base;
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_IDTR);
             }
         }
@@ -2957,5 +2934,4 @@
             pCtx->msrCSTAR  = pVmcbGuest->u64CSTAR;
             pCtx->msrSFMASK = pVmcbGuest->u64SFMASK;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_SYSCALL_MSRS);
         }
 
@@ -2965,12 +2941,8 @@
             pCtx->SysEnter.eip = pVmcbGuest->u64SysEnterEIP;
             pCtx->SysEnter.esp = pVmcbGuest->u64SysEnterESP;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_SYSENTER_MSRS);
         }
 
         if (fWhat & CPUMCTX_EXTRN_KERNEL_GS_BASE)
-        {
             pCtx->msrKERNELGSBASE = pVmcbGuest->u64KernelGSBase;
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_KERNEL_GS_BASE);
-        }
 
         if (fWhat & CPUMCTX_EXTRN_DR_MASK)
@@ -2982,5 +2954,4 @@
                 else
                     CPUMSetHyperDR6(pVCpu, pVmcbGuest->u64DR6);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_DR6);
             }
 
@@ -2991,5 +2962,4 @@
                 else
                     Assert(pVmcbGuest->u64DR7 == CPUMGetHyperDR7(pVCpu));
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_DR7);
             }
         }
@@ -3002,13 +2972,11 @@
                 uint64_t const uCr0 = (pCtx->cr0          & ~(X86_CR0_TS | X86_CR0_MP))
                                     | (pVmcbGuest->u64CR0 &  (X86_CR0_TS | X86_CR0_MP));
+                VMMRZCallRing3Disable(pVCpu); /* CPUM has log statements and calls into PGM. */
                 CPUMSetGuestCR0(pVCpu, uCr0);
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_CR0);
+                VMMRZCallRing3Enable(pVCpu);
             }
 
             if (fWhat & CPUMCTX_EXTRN_CR2)
-            {
                 pCtx->cr2 = pVmcbGuest->u64CR2;
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_CR2);
-            }
 
             if (fWhat & CPUMCTX_EXTRN_CR3)
@@ -3018,30 +2986,25 @@
                 {
                     CPUMSetGuestCR3(pVCpu, pVmcbGuest->u64CR3);
-                    if (VMMRZCallRing3IsEnabled(pVCpu))
-                    {
-                        Log4(("hmR0SvmImportGuestState: Calling PGMUpdateCR3\n"));
-                        PGMUpdateCR3(pVCpu, pVmcbGuest->u64CR3);
-                    }
-                    else
-                    {
-                        Log4(("hmR0SvmImportGuestState: Setting VMCPU_FF_HM_UPDATE_CR3\n"));
-                        VMCPU_FF_SET(pVCpu, VMCPU_FF_HM_UPDATE_CR3);
-                    }
+                    VMCPU_FF_SET(pVCpu, VMCPU_FF_HM_UPDATE_CR3);
                 }
-                ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_CR3);
             }
 
             /* Changes to CR4 are always intercepted. */
         }
+
+        /* Update fExtrn. */
+        pCtx->fExtrn &= ~fWhat;
 
         /* If everything has been imported, clear the HM keeper bit. */
         if (!(pCtx->fExtrn & HMSVM_CPUMCTX_EXTRN_ALL))
         {
-            ASMAtomicUoAndU64(&pCtx->fExtrn, ~CPUMCTX_EXTRN_KEEPER_HM);
+            pCtx->fExtrn &= ~CPUMCTX_EXTRN_KEEPER_HM;
             Assert(!pCtx->fExtrn);
         }
     }
     else
-        Assert(!pCtx->fExtrn);
+        Assert(!pCtx->fExtrn || (pCtx->fExtrn & HMSVM_CPUMCTX_EXTRN_ALL));
+
+    ASMSetFlags(fSavedFlags);
 
     /*
