Changeset 99685 in vbox
- Timestamp:
- May 8, 2023 9:59:40 PM (17 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
-
VMMAll/IEMAll.cpp (modified) (9 diffs)
-
VMMAll/IEMAllInstructionsOneByte.cpp.h (modified) (1 diff)
-
include/IEMOpHlp.h (modified) (1 diff)
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r99220 r99685 8771 8771 * @param pVCpu The cross context virtual CPU structure of the calling thread. 8772 8772 * @param bRm The ModRM byte. 8773 * @param cbImm The size of any immediate following the 8774 * effective address opcode bytes. Important for 8775 * RIP relative addressing. 8773 * @param cbImmAndRspOffset - First byte: The size of any immediate 8774 * following the effective address opcode bytes 8775 * (only for RIP relative addressing). 8776 * - Second byte: RSP displacement (for POP [ESP]). 8776 8777 * @param pGCPtrEff Where to return the effective address. 8777 8778 */ 8778 VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint 8_t cbImm, PRTGCPTR pGCPtrEff) RT_NOEXCEPT8779 VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff) RT_NOEXCEPT 8779 8780 { 8780 8781 Log5(("iemOpHlpCalcRmEffAddr: bRm=%#x\n", bRm)); … … 8866 8867 case 2: u32EffAddr += pVCpu->cpum.GstCtx.edx; break; 8867 8868 case 3: u32EffAddr += pVCpu->cpum.GstCtx.ebx; break; 8868 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp ; SET_SS_DEF(); break;8869 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 8869 8870 case 5: 8870 8871 if ((bRm & X86_MODRM_MOD_MASK) != 0) … … 8931 8932 { 8932 8933 IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64EffAddr); 8933 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + cbImm;8934 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (cbImmAndRspOffset & UINT32_C(0xff)); 8934 8935 } 8935 8936 else … … 8988 8989 case 2: u64EffAddr += pVCpu->cpum.GstCtx.rdx; break; 8989 8990 case 3: u64EffAddr += pVCpu->cpum.GstCtx.rbx; break; 8990 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp ; SET_SS_DEF(); break;8991 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 8991 8992 case 6: u64EffAddr += pVCpu->cpum.GstCtx.rsi; break; 8992 8993 case 7: u64EffAddr += pVCpu->cpum.GstCtx.rdi; break; … … 9063 9064 9064 9065 9066 #ifdef IEM_WITH_SETJMP 9065 9067 /** 9066 9068 * Calculates the effective address of a ModR/M memory operand. … … 9068 9070 * Meant to be used via IEM_MC_CALC_RM_EFF_ADDR. 9069 9071 * 9070 * @return Strict VBox status code. 9072 * May longjmp on internal error. 9073 * 9074 * @return The effective address. 9071 9075 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9072 9076 * @param bRm The ModRM byte. 9073 * @param cbImm The size of any immediate following the 9074 * effective address opcode bytes. Important for 9075 * RIP relative addressing. 9076 * @param pGCPtrEff Where to return the effective address. 9077 * @param offRsp RSP displacement. 9078 */ 9079 VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff, int8_t offRsp) RT_NOEXCEPT 9080 { 9081 Log5(("iemOpHlpCalcRmEffAddr: bRm=%#x\n", bRm)); 9082 # define SET_SS_DEF() \ 9083 do \ 9084 { \ 9085 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SEG_MASK)) \ 9086 pVCpu->iem.s.iEffSeg = X86_SREG_SS; \ 9087 } while (0) 9088 9089 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT) 9090 { 9091 /** @todo Check the effective address size crap! */ 9092 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_16BIT) 9093 { 9094 uint16_t u16EffAddr; 9095 9096 /* Handle the disp16 form with no registers first. */ 9097 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 6) 9098 IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); 9099 else 9100 { 9101 /* Get the displacment. */ 9102 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9103 { 9104 case 0: u16EffAddr = 0; break; 9105 case 1: IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16EffAddr); break; 9106 case 2: IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); break; 9107 default: AssertFailedReturn(VERR_IEM_IPE_1); /* (caller checked for these) */ 9108 } 9109 9110 /* Add the base and index registers to the disp. */ 9111 switch (bRm & X86_MODRM_RM_MASK) 9112 { 9113 case 0: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.si; break; 9114 case 1: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.di; break; 9115 case 2: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.si; SET_SS_DEF(); break; 9116 case 3: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.di; SET_SS_DEF(); break; 9117 case 4: u16EffAddr += pVCpu->cpum.GstCtx.si; break; 9118 case 5: u16EffAddr += pVCpu->cpum.GstCtx.di; break; 9119 case 6: u16EffAddr += pVCpu->cpum.GstCtx.bp; SET_SS_DEF(); break; 9120 case 7: u16EffAddr += pVCpu->cpum.GstCtx.bx; break; 9121 } 9122 } 9123 9124 *pGCPtrEff = u16EffAddr; 9125 } 9126 else 9127 { 9128 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 9129 uint32_t u32EffAddr; 9130 9131 /* Handle the disp32 form with no registers first. */ 9132 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9133 IEM_OPCODE_GET_NEXT_U32(&u32EffAddr); 9134 else 9135 { 9136 /* Get the register (or SIB) value. */ 9137 switch ((bRm & X86_MODRM_RM_MASK)) 9138 { 9139 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9140 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9141 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9142 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9143 case 4: /* SIB */ 9144 { 9145 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9146 9147 /* Get the index and scale it. */ 9148 switch ((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) 9149 { 9150 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9151 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9152 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9153 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9154 case 4: u32EffAddr = 0; /*none */ break; 9155 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; break; 9156 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9157 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9158 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9159 } 9160 u32EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9161 9162 /* add base */ 9163 switch (bSib & X86_SIB_BASE_MASK) 9164 { 9165 case 0: u32EffAddr += pVCpu->cpum.GstCtx.eax; break; 9166 case 1: u32EffAddr += pVCpu->cpum.GstCtx.ecx; break; 9167 case 2: u32EffAddr += pVCpu->cpum.GstCtx.edx; break; 9168 case 3: u32EffAddr += pVCpu->cpum.GstCtx.ebx; break; 9169 case 4: 9170 u32EffAddr += pVCpu->cpum.GstCtx.esp + offRsp; 9171 SET_SS_DEF(); 9172 break; 9173 case 5: 9174 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9175 { 9176 u32EffAddr += pVCpu->cpum.GstCtx.ebp; 9177 SET_SS_DEF(); 9178 } 9179 else 9180 { 9181 uint32_t u32Disp; 9182 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9183 u32EffAddr += u32Disp; 9184 } 9185 break; 9186 case 6: u32EffAddr += pVCpu->cpum.GstCtx.esi; break; 9187 case 7: u32EffAddr += pVCpu->cpum.GstCtx.edi; break; 9188 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9189 } 9190 break; 9191 } 9192 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; SET_SS_DEF(); break; 9193 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9194 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9195 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9196 } 9197 9198 /* Get and add the displacement. */ 9199 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9200 { 9201 case 0: 9202 break; 9203 case 1: 9204 { 9205 int8_t i8Disp; IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9206 u32EffAddr += i8Disp; 9207 break; 9208 } 9209 case 2: 9210 { 9211 uint32_t u32Disp; IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9212 u32EffAddr += u32Disp; 9213 break; 9214 } 9215 default: 9216 AssertFailedReturn(VERR_IEM_IPE_2); /* (caller checked for these) */ 9217 } 9218 9219 } 9220 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT) 9221 *pGCPtrEff = u32EffAddr; 9222 else 9223 { 9224 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_16BIT); 9225 *pGCPtrEff = u32EffAddr & UINT16_MAX; 9226 } 9227 } 9228 } 9229 else 9230 { 9231 uint64_t u64EffAddr; 9232 9233 /* Handle the rip+disp32 form with no registers first. */ 9234 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9235 { 9236 IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64EffAddr); 9237 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + cbImm; 9238 } 9239 else 9240 { 9241 /* Get the register (or SIB) value. */ 9242 switch ((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB) 9243 { 9244 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9245 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9246 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9247 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9248 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; SET_SS_DEF(); break; 9249 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9250 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9251 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9252 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9253 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9254 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9255 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9256 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9257 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9258 /* SIB */ 9259 case 4: 9260 case 12: 9261 { 9262 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9263 9264 /* Get the index and scale it. */ 9265 switch (((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) | pVCpu->iem.s.uRexIndex) 9266 { 9267 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9268 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9269 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9270 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9271 case 4: u64EffAddr = 0; /*none */ break; 9272 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; break; 9273 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9274 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9275 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9276 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9277 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9278 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9279 case 12: u64EffAddr = pVCpu->cpum.GstCtx.r12; break; 9280 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9281 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9282 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9283 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9284 } 9285 u64EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9286 9287 /* add base */ 9288 switch ((bSib & X86_SIB_BASE_MASK) | pVCpu->iem.s.uRexB) 9289 { 9290 case 0: u64EffAddr += pVCpu->cpum.GstCtx.rax; break; 9291 case 1: u64EffAddr += pVCpu->cpum.GstCtx.rcx; break; 9292 case 2: u64EffAddr += pVCpu->cpum.GstCtx.rdx; break; 9293 case 3: u64EffAddr += pVCpu->cpum.GstCtx.rbx; break; 9294 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp + offRsp; SET_SS_DEF(); break; 9295 case 6: u64EffAddr += pVCpu->cpum.GstCtx.rsi; break; 9296 case 7: u64EffAddr += pVCpu->cpum.GstCtx.rdi; break; 9297 case 8: u64EffAddr += pVCpu->cpum.GstCtx.r8; break; 9298 case 9: u64EffAddr += pVCpu->cpum.GstCtx.r9; break; 9299 case 10: u64EffAddr += pVCpu->cpum.GstCtx.r10; break; 9300 case 11: u64EffAddr += pVCpu->cpum.GstCtx.r11; break; 9301 case 12: u64EffAddr += pVCpu->cpum.GstCtx.r12; break; 9302 case 14: u64EffAddr += pVCpu->cpum.GstCtx.r14; break; 9303 case 15: u64EffAddr += pVCpu->cpum.GstCtx.r15; break; 9304 /* complicated encodings */ 9305 case 5: 9306 case 13: 9307 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9308 { 9309 if (!pVCpu->iem.s.uRexB) 9310 { 9311 u64EffAddr += pVCpu->cpum.GstCtx.rbp; 9312 SET_SS_DEF(); 9313 } 9314 else 9315 u64EffAddr += pVCpu->cpum.GstCtx.r13; 9316 } 9317 else 9318 { 9319 uint32_t u32Disp; 9320 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9321 u64EffAddr += (int32_t)u32Disp; 9322 } 9323 break; 9324 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9325 } 9326 break; 9327 } 9328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9329 } 9330 9331 /* Get and add the displacement. */ 9332 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9333 { 9334 case 0: 9335 break; 9336 case 1: 9337 { 9338 int8_t i8Disp; 9339 IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9340 u64EffAddr += i8Disp; 9341 break; 9342 } 9343 case 2: 9344 { 9345 uint32_t u32Disp; 9346 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9347 u64EffAddr += (int32_t)u32Disp; 9348 break; 9349 } 9350 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* (caller checked for these) */ 9351 } 9352 9353 } 9354 9355 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_64BIT) 9356 *pGCPtrEff = u64EffAddr; 9357 else 9358 { 9359 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 9360 *pGCPtrEff = u64EffAddr & UINT32_MAX; 9361 } 9362 } 9363 9364 Log5(("iemOpHlpCalcRmEffAddr: EffAddr=%#010RGv\n", *pGCPtrEff)); 9365 return VINF_SUCCESS; 9366 } 9367 9368 9369 #ifdef IEM_WITH_SETJMP 9370 /** 9371 * Calculates the effective address of a ModR/M memory operand. 9372 * 9373 * Meant to be used via IEM_MC_CALC_RM_EFF_ADDR. 9374 * 9375 * May longjmp on internal error. 9376 * 9377 * @return The effective address. 9378 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9379 * @param bRm The ModRM byte. 9380 * @param cbImm The size of any immediate following the 9381 * effective address opcode bytes. Important for 9382 * RIP relative addressing. 9383 */ 9384 RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm) IEM_NOEXCEPT_MAY_LONGJMP 9077 * @param cbImmAndRspOffset - First byte: The size of any immediate 9078 * following the effective address opcode bytes 9079 * (only for RIP relative addressing). 9080 * - Second byte: RSP displacement (for POP [ESP]). 9081 */ 9082 RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset) IEM_NOEXCEPT_MAY_LONGJMP 9385 9083 { 9386 9084 Log5(("iemOpHlpCalcRmEffAddrJmp: bRm=%#x\n", bRm)); … … 9472 9170 case 2: u32EffAddr += pVCpu->cpum.GstCtx.edx; break; 9473 9171 case 3: u32EffAddr += pVCpu->cpum.GstCtx.ebx; break; 9474 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp ; SET_SS_DEF(); break;9172 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9475 9173 case 5: 9476 9174 if ((bRm & X86_MODRM_MOD_MASK) != 0) … … 9536 9234 { 9537 9235 IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64EffAddr); 9538 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + cbImm;9236 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (cbImmAndRspOffset & UINT32_C(0xff)); 9539 9237 } 9540 9238 else … … 9593 9291 case 2: u64EffAddr += pVCpu->cpum.GstCtx.rdx; break; 9594 9292 case 3: u64EffAddr += pVCpu->cpum.GstCtx.rbx; break; 9595 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp ; SET_SS_DEF(); break;9293 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9596 9294 case 6: u64EffAddr += pVCpu->cpum.GstCtx.rsi; break; 9597 9295 case 7: u64EffAddr += pVCpu->cpum.GstCtx.rdi; break; -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r99583 r99685 5214 5214 switch (pVCpu->iem.s.enmEffOpSize) 5215 5215 { 5216 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr Ex(pVCpu, bRm, 0, &GCPtrEff, 2); break;5217 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr Ex(pVCpu, bRm, 0, &GCPtrEff, 4); break;5218 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr Ex(pVCpu, bRm, 0, &GCPtrEff, 8); break;5216 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break; 5217 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break; 5218 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break; 5219 5219 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5220 5220 } -
trunk/src/VBox/VMM/include/IEMOpHlp.h
r99330 r99685 652 652 } while (0) 653 653 654 VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff) RT_NOEXCEPT; 655 VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff, int8_t offRsp) RT_NOEXCEPT; 654 VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff) RT_NOEXCEPT; 656 655 #ifdef IEM_WITH_SETJMP 657 RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint 8_t cbImm) IEM_NOEXCEPT_MAY_LONGJMP;656 RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset) IEM_NOEXCEPT_MAY_LONGJMP; 658 657 #endif 659 658
Note:
See TracChangeset
for help on using the changeset viewer.

