mirror of
https://github.com/asmjit/asmjit.git
synced 2025-12-18 13:04:36 +03:00
Fixed ASMJIT_FALL_THROUGH detection.
Renamed ASMJIT_FALL_THROUGH to ASMJIT_FALLTHROUGH.
This commit is contained in:
@@ -716,15 +716,15 @@
|
|||||||
#endif
|
#endif
|
||||||
// [@CC_EXPECT}@]
|
// [@CC_EXPECT}@]
|
||||||
|
|
||||||
// [@CC_FALL_THROUGH{@]
|
// [@CC_FALLTHROUGH{@]
|
||||||
// \def ASMJIT_FALL_THROUGH
|
// \def ASMJIT_FALLTHROUGH
|
||||||
// The code falls through annotation (switch / case).
|
// The code falls through annotation (switch / case).
|
||||||
#if ASMJIT_CC_CLANG_GE(3, 3, 0)
|
#if ASMJIT_CC_CLANG && __cplusplus >= 201103L
|
||||||
# define ASMJIT_FALL_THROUGH [[clang::fallthrough]]
|
# define ASMJIT_FALLTHROUGH [[clang::fallthrough]]
|
||||||
#else
|
#else
|
||||||
# define ASMJIT_FALL_THROUGH (void)0
|
# define ASMJIT_FALLTHROUGH (void)0
|
||||||
#endif
|
#endif
|
||||||
// [@CC_FALL_THROUGH}@]
|
// [@CC_FALLTHROUGH}@]
|
||||||
|
|
||||||
// [@CC_UNUSED{@]
|
// [@CC_UNUSED{@]
|
||||||
// \def ASMJIT_UNUSED(x)
|
// \def ASMJIT_UNUSED(x)
|
||||||
|
|||||||
@@ -660,7 +660,7 @@ static void X86Assembler_dumpRegister(StringBuilder& sb, uint32_t type, uint32_t
|
|||||||
goto _EmitNE;
|
goto _EmitNE;
|
||||||
|
|
||||||
index -= 4;
|
index -= 4;
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86RegTypeGpbHi:
|
case kX86RegTypeGpbHi:
|
||||||
if (index >= 4)
|
if (index >= 4)
|
||||||
@@ -1060,14 +1060,14 @@ static ASMJIT_INLINE Error X86Assembler_emit(Assembler* self_, uint32_t code, co
|
|||||||
|
|
||||||
case kX86InstEncodingIdX86Op_66H:
|
case kX86InstEncodingIdX86Op_66H:
|
||||||
ADD_66H_P(true);
|
ADD_66H_P(true);
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdX86Op:
|
case kX86InstEncodingIdX86Op:
|
||||||
goto _EmitX86Op;
|
goto _EmitX86Op;
|
||||||
|
|
||||||
case kX86InstEncodingIdX86Rm_B:
|
case kX86InstEncodingIdX86Rm_B:
|
||||||
opCode += o0->getSize() != 1;
|
opCode += o0->getSize() != 1;
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdX86Rm:
|
case kX86InstEncodingIdX86Rm:
|
||||||
ADD_66H_P_BY_SIZE(o0->getSize());
|
ADD_66H_P_BY_SIZE(o0->getSize());
|
||||||
@@ -1857,7 +1857,7 @@ static ASMJIT_INLINE Error X86Assembler_emit(Assembler* self_, uint32_t code, co
|
|||||||
EMIT_BYTE(imLen == 1 ? 0x6A : 0x68);
|
EMIT_BYTE(imLen == 1 ? 0x6A : 0x68);
|
||||||
goto _EmitImm;
|
goto _EmitImm;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdX86Pop:
|
case kX86InstEncodingIdX86Pop:
|
||||||
if (encoded == ENC_OPS(Reg, None, None)) {
|
if (encoded == ENC_OPS(Reg, None, None)) {
|
||||||
@@ -2104,7 +2104,7 @@ _GroupPop_Gp:
|
|||||||
rmMem = x86OpMem(o1);
|
rmMem = x86OpMem(o1);
|
||||||
goto _EmitX86M;
|
goto _EmitX86M;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdX86Xadd:
|
case kX86InstEncodingIdX86Xadd:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -2257,7 +2257,7 @@ _EmitFpArith_Mem:
|
|||||||
opCode += 1;
|
opCode += 1;
|
||||||
goto _EmitFpuOp;
|
goto _EmitFpuOp;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdFpuR:
|
case kX86InstEncodingIdFpuR:
|
||||||
if (encoded == ENC_OPS(Reg, None, None)) {
|
if (encoded == ENC_OPS(Reg, None, None)) {
|
||||||
@@ -2564,11 +2564,11 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdExtRm_PQ:
|
case kX86InstEncodingIdExtRm_PQ:
|
||||||
ADD_66H_P(o0->isRegType(kX86RegTypeXmm) || o1->isRegType(kX86RegTypeXmm));
|
ADD_66H_P(o0->isRegType(kX86RegTypeXmm) || o1->isRegType(kX86RegTypeXmm));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdExtRm_Q:
|
case kX86InstEncodingIdExtRm_Q:
|
||||||
ADD_REX_W(o0->isRegType(kX86RegTypeGpq) || o1->isRegType(kX86RegTypeGpq) || (o1->isMem() && o1->getSize() == 8));
|
ADD_REX_W(o0->isRegType(kX86RegTypeGpq) || o1->isRegType(kX86RegTypeGpq) || (o1->isMem() && o1->getSize() == 8));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdExtRm:
|
case kX86InstEncodingIdExtRm:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -2780,7 +2780,7 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxMr_P:
|
case kX86InstEncodingIdAvxMr_P:
|
||||||
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxMr:
|
case kX86InstEncodingIdAvxMr:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -2798,7 +2798,7 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxMri_P:
|
case kX86InstEncodingIdAvxMri_P:
|
||||||
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxMri:
|
case kX86InstEncodingIdAvxMri:
|
||||||
imVal = static_cast<const Imm*>(o2)->getInt64();
|
imVal = static_cast<const Imm*>(o2)->getInt64();
|
||||||
@@ -2819,7 +2819,7 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRm_P:
|
case kX86InstEncodingIdAvxRm_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRm:
|
case kX86InstEncodingIdAvxRm:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -2837,7 +2837,7 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRmi_P:
|
case kX86InstEncodingIdAvxRmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRmi:
|
case kX86InstEncodingIdAvxRmi:
|
||||||
imVal = static_cast<const Imm*>(o2)->getInt64();
|
imVal = static_cast<const Imm*>(o2)->getInt64();
|
||||||
@@ -2858,7 +2858,7 @@ _EmitMmMovD:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvm_P:
|
case kX86InstEncodingIdAvxRvm_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvm:
|
case kX86InstEncodingIdAvxRvm:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
||||||
@@ -2877,7 +2877,7 @@ _EmitAvxRvm:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmr_P:
|
case kX86InstEncodingIdAvxRvmr_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmr:
|
case kX86InstEncodingIdAvxRvmr:
|
||||||
if (!o3->isReg())
|
if (!o3->isReg())
|
||||||
@@ -2901,7 +2901,7 @@ _EmitAvxRvm:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmi_P:
|
case kX86InstEncodingIdAvxRvmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmi:
|
case kX86InstEncodingIdAvxRvmi:
|
||||||
if (!o3->isImm())
|
if (!o3->isImm())
|
||||||
@@ -2977,7 +2977,7 @@ _EmitAvxRvm:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRmMr_P:
|
case kX86InstEncodingIdAvxRmMr_P:
|
||||||
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRmMr:
|
case kX86InstEncodingIdAvxRmMr:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -3006,7 +3006,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmRmi_P:
|
case kX86InstEncodingIdAvxRvmRmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmRmi:
|
case kX86InstEncodingIdAvxRvmRmi:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
||||||
@@ -3072,7 +3072,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmMvr_P:
|
case kX86InstEncodingIdAvxRvmMvr_P:
|
||||||
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(o0) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmMvr:
|
case kX86InstEncodingIdAvxRvmMvr:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
||||||
@@ -3100,7 +3100,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmVmi_P:
|
case kX86InstEncodingIdAvxRvmVmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvmVmi:
|
case kX86InstEncodingIdAvxRvmVmi:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg)) {
|
||||||
@@ -3152,7 +3152,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxVmi_P:
|
case kX86InstEncodingIdAvxVmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxVmi:
|
case kX86InstEncodingIdAvxVmi:
|
||||||
imVal = static_cast<const Imm*>(o3)->getInt64();
|
imVal = static_cast<const Imm*>(o3)->getInt64();
|
||||||
@@ -3173,7 +3173,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdAvxRvrmRvmr_P:
|
case kX86InstEncodingIdAvxRvrmRvmr_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdAvxRvrmRvmr:
|
case kX86InstEncodingIdAvxRvrmRvmr:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
||||||
@@ -3262,7 +3262,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
case kX86InstEncodingIdFma4_P:
|
case kX86InstEncodingIdFma4_P:
|
||||||
// It's fine to just check the first operand, second is just for sanity.
|
// It's fine to just check the first operand, second is just for sanity.
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdFma4:
|
case kX86InstEncodingIdFma4:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
||||||
@@ -3303,7 +3303,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdXopRm_P:
|
case kX86InstEncodingIdXopRm_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(o1));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdXopRm:
|
case kX86InstEncodingIdXopRm:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
if (encoded == ENC_OPS(Reg, Reg, None)) {
|
||||||
@@ -3387,7 +3387,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdXopRvmr_P:
|
case kX86InstEncodingIdXopRvmr_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdXopRvmr:
|
case kX86InstEncodingIdXopRvmr:
|
||||||
if (!o3->isReg())
|
if (!o3->isReg())
|
||||||
@@ -3411,7 +3411,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdXopRvmi_P:
|
case kX86InstEncodingIdXopRvmi_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdXopRvmi:
|
case kX86InstEncodingIdXopRvmi:
|
||||||
if (!o3->isImm())
|
if (!o3->isImm())
|
||||||
@@ -3435,7 +3435,7 @@ _AvxRmMr_AfterRegRegCheck:
|
|||||||
|
|
||||||
case kX86InstEncodingIdXopRvrmRvmr_P:
|
case kX86InstEncodingIdXopRvrmRvmr_P:
|
||||||
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
ADD_VEX_L(x86IsYmm(static_cast<const X86Reg*>(o0)) | x86IsYmm(static_cast<const X86Reg*>(o1)));
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstEncodingIdXopRvrmRvmr:
|
case kX86InstEncodingIdXopRvrmRvmr:
|
||||||
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
if (encoded == ENC_OPS(Reg, Reg, Reg) && o3->isReg()) {
|
||||||
|
|||||||
@@ -495,7 +495,7 @@ static ASMJIT_INLINE const X86SpecialInst* X86SpecialInst_get(uint32_t instId, c
|
|||||||
return NULL;
|
return NULL;
|
||||||
if (opCount == 3 && !(opList[0].isVar() && opList[1].isVar() && opList[2].isVarOrMem()))
|
if (opCount == 3 && !(opList[0].isVar() && opList[1].isVar() && opList[2].isVarOrMem()))
|
||||||
return NULL;
|
return NULL;
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86InstIdMul:
|
case kX86InstIdMul:
|
||||||
return x86SpecialInstMul;
|
return x86SpecialInstMul;
|
||||||
@@ -928,7 +928,7 @@ void X86Context::emitConvertVarToVar(uint32_t dstType, uint32_t dstIndex, uint32
|
|||||||
compiler->emit(kX86InstIdCvtpd2ps, x86::xmm(dstIndex), x86::xmm(srcIndex));
|
compiler->emit(kX86InstIdCvtpd2ps, x86::xmm(dstIndex), x86::xmm(srcIndex));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86VarTypeXmmSs:
|
case kX86VarTypeXmmSs:
|
||||||
if (srcType == kX86VarTypeXmmSd || srcType == kX86VarTypeXmmPd || srcType == kX86VarTypeYmmPd) {
|
if (srcType == kX86VarTypeXmmSd || srcType == kX86VarTypeXmmPd || srcType == kX86VarTypeYmmPd) {
|
||||||
@@ -947,7 +947,7 @@ void X86Context::emitConvertVarToVar(uint32_t dstType, uint32_t dstIndex, uint32
|
|||||||
compiler->emit(kX86InstIdCvtps2pd, x86::xmm(dstIndex), x86::xmm(srcIndex));
|
compiler->emit(kX86InstIdCvtps2pd, x86::xmm(dstIndex), x86::xmm(srcIndex));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kX86VarTypeXmmSd:
|
case kX86VarTypeXmmSd:
|
||||||
if (srcType == kX86VarTypeXmmSs || srcType == kX86VarTypeXmmPs || srcType == kX86VarTypeYmmPs) {
|
if (srcType == kX86VarTypeXmmSs || srcType == kX86VarTypeXmmPs || srcType == kX86VarTypeYmmPs) {
|
||||||
@@ -5514,7 +5514,7 @@ _NextGroup:
|
|||||||
ASMJIT_PROPAGATE_ERROR(cAlloc.run(static_cast<X86CallNode*>(node_)));
|
ASMJIT_PROPAGATE_ERROR(cAlloc.run(static_cast<X86CallNode*>(node_)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
|
|
||||||
case kHLNodeTypeHint:
|
case kHLNodeTypeHint:
|
||||||
case kHLNodeTypeRet: {
|
case kHLNodeTypeRet: {
|
||||||
|
|||||||
@@ -265,7 +265,7 @@ static Error X86FuncDecl_initFunc(X86FuncDecl* self, uint32_t arch,
|
|||||||
self->_rets[1]._varType = static_cast<uint8_t>(ret - 2);
|
self->_rets[1]._varType = static_cast<uint8_t>(ret - 2);
|
||||||
self->_rets[1]._regIndex = kX86RegIndexDx;
|
self->_rets[1]._regIndex = kX86RegIndexDx;
|
||||||
}
|
}
|
||||||
ASMJIT_FALL_THROUGH;
|
ASMJIT_FALLTHROUGH;
|
||||||
#endif // ASMJIT_BUILD_X86
|
#endif // ASMJIT_BUILD_X86
|
||||||
|
|
||||||
case kVarTypeInt8:
|
case kVarTypeInt8:
|
||||||
|
|||||||
Reference in New Issue
Block a user